Merge JSScope into JSObject and JSScopeProperty (now js::Shape; bug 558451, r=jorendorff).

This commit is contained in:
Brendan Eich 2010-08-29 11:57:08 -07:00
Родитель 642851f9ba
Коммит a20f6ceaad
52 изменённых файлов: 4407 добавлений и 4986 удалений

Просмотреть файл

@ -115,8 +115,9 @@ using namespace js;
#endif
#ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES
JS_PUBLIC_DATA(jsid) JS_DEFAULT_XML_NAMESPACE_ID = { (size_t)JSID_TYPE_DEFAULT_XML_NAMESPACE };
JS_PUBLIC_DATA(jsid) JSID_VOID = { (size_t)JSID_TYPE_VOID };
JS_PUBLIC_DATA(jsid) JS_DEFAULT_XML_NAMESPACE_ID = { size_t(JSID_TYPE_DEFAULT_XML_NAMESPACE) };
JS_PUBLIC_DATA(jsid) JSID_VOID = { size_t(JSID_TYPE_VOID) };
JS_PUBLIC_DATA(jsid) JSID_EMPTY = { size_t(JSID_TYPE_OBJECT) };
#endif
#ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES
@ -576,6 +577,12 @@ JSRuntime::init(uint32 maxbytes)
if (!unjoinedFunctionCountMap.init())
return false;
}
propTreeStatFilename = getenv("JS_PROPTREE_STATFILE");
propTreeDumpFilename = getenv("JS_PROPTREE_DUMPFILE");
if (meterEmptyShapes()) {
if (!emptyShapes.init())
return false;
}
#endif
if (!(defaultCompartment = new JSCompartment(this)) ||
@ -1572,9 +1579,8 @@ static JSBool
AlreadyHasOwnProperty(JSContext *cx, JSObject *obj, JSAtom *atom)
{
JS_LOCK_OBJ(cx, obj);
JSScope *scope = obj->scope();
bool found = scope->hasProperty(ATOM_TO_JSID(atom));
JS_UNLOCK_SCOPE(cx, scope);
bool found = obj->nativeContains(ATOM_TO_JSID(atom));
JS_UNLOCK_OBJ(cx, obj);
return found;
}
@ -2098,8 +2104,7 @@ JS_PrintTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing, ui
JS_snprintf(buf, bufsize, "%p", fun);
} else {
if (fun->atom)
js_PutEscapedString(buf, bufsize,
ATOM_TO_STRING(fun->atom), 0);
js_PutEscapedString(buf, bufsize, ATOM_TO_STRING(fun->atom), 0);
}
} else if (clasp->flags & JSCLASS_HAS_PRIVATE) {
JS_snprintf(buf, bufsize, "%p", obj->getPrivate());
@ -2930,67 +2935,47 @@ JS_SealObject(JSContext *cx, JSObject *obj, JSBool deep)
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj);
JSScope *scope;
JSIdArray *ida;
uint32 nslots, i;
/* Nothing to do if obj is already sealed. */
if (obj->sealed())
return true;
if (obj->isDenseArray() && !obj->makeDenseArraySlow(cx))
return JS_FALSE;
return false;
if (!obj->isNative()) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
JSMSG_CANT_SEAL_OBJECT,
obj->getClass()->name);
return JS_FALSE;
return false;
}
scope = obj->scope();
#if defined JS_THREADSAFE && defined DEBUG
#ifdef JS_THREADSAFE
/* Insist on scope being used exclusively by cx's thread. */
if (scope->title.ownercx != cx) {
JS_LOCK_OBJ(cx, obj);
JS_ASSERT(obj->scope() == scope);
JS_ASSERT(scope->title.ownercx == cx);
JS_UNLOCK_SCOPE(cx, scope);
}
JS_ASSERT(obj->title.ownercx == cx);
#endif
/* Nothing to do if obj's scope is already sealed. */
if (scope->sealed())
return JS_TRUE;
/* XXX Enumerate lazy properties now, as they can't be added later. */
ida = JS_Enumerate(cx, obj);
JSIdArray *ida = JS_Enumerate(cx, obj);
if (!ida)
return JS_FALSE;
return false;
JS_DestroyIdArray(cx, ida);
/* Ensure that obj has its own, mutable scope, and seal that scope. */
JS_LOCK_OBJ(cx, obj);
scope = js_GetMutableScope(cx, obj);
if (scope)
scope->seal(cx);
JS_UNLOCK_OBJ(cx, obj);
if (!scope)
return JS_FALSE;
/* If we are not sealing an entire object graph, we're done. */
/* If not sealing an entire object graph, we're done after sealing obj. */
obj->seal(cx);
if (!deep)
return JS_TRUE;
return true;
/* Walk slots in obj and if any value is a non-null object, seal it. */
nslots = scope->freeslot;
for (i = 0; i != nslots; ++i) {
for (uint32 i = 0, n = obj->freeslot; i != n; ++i) {
const Value &v = obj->getSlot(i);
if (i == JSSLOT_PRIVATE && (obj->getClass()->flags & JSCLASS_HAS_PRIVATE))
continue;
if (v.isPrimitive())
continue;
if (!JS_SealObject(cx, &v.toObject(), deep))
return JS_FALSE;
return false;
}
return JS_TRUE;
return true;
}
JS_PUBLIC_API(JSObject *)
@ -3041,18 +3026,18 @@ LookupResult(JSContext *cx, JSObject *obj, JSObject *obj2, jsid id,
}
if (obj2->isNative()) {
JSScopeProperty *sprop = (JSScopeProperty *) prop;
Shape *shape = (Shape *) prop;
if (sprop->isMethod()) {
AutoScopePropertyRooter root(cx, sprop);
if (shape->isMethod()) {
AutoShapeRooter root(cx, shape);
JS_UNLOCK_OBJ(cx, obj2);
vp->setObject(sprop->methodObject());
return obj2->scope()->methodReadBarrier(cx, sprop, vp);
vp->setObject(shape->methodObject());
return obj2->methodReadBarrier(cx, *shape, vp);
}
/* Peek at the native property's slot value, without doing a Get. */
if (SPROP_HAS_VALID_SLOT(sprop, obj2->scope()))
*vp = obj2->lockedGetSlot(sprop->slot);
if (obj2->containsSlot(shape->slot))
*vp = obj2->lockedGetSlot(shape->slot);
else
vp->setBoolean(true);
JS_UNLOCK_OBJ(cx, obj2);
@ -3177,9 +3162,8 @@ JS_AlreadyHasOwnPropertyById(JSContext *cx, JSObject *obj, jsid id, JSBool *foun
}
JS_LOCK_OBJ(cx, obj);
JSScope *scope = obj->scope();
*foundp = scope->hasProperty(id);
JS_UNLOCK_SCOPE(cx, scope);
*foundp = obj->nativeContains(id);
JS_UNLOCK_OBJ(cx, obj);
return JS_TRUE;
}
@ -3276,7 +3260,7 @@ JS_DefinePropertyWithTinyId(JSContext *cx, JSObject *obj, const char *name, int8
jsval value, JSPropertyOp getter, JSPropertyOp setter, uintN attrs)
{
return DefineProperty(cx, obj, name, Valueify(value), Valueify(getter),
Valueify(setter), attrs, JSScopeProperty::HAS_SHORTID, tinyid);
Valueify(setter), attrs, Shape::HAS_SHORTID, tinyid);
}
static JSBool
@ -3303,7 +3287,7 @@ JS_DefineUCPropertyWithTinyId(JSContext *cx, JSObject *obj, const jschar *name,
uintN attrs)
{
return DefineUCProperty(cx, obj, name, namelen, Valueify(value), Valueify(getter),
Valueify(setter), attrs, JSScopeProperty::HAS_SHORTID, tinyid);
Valueify(setter), attrs, Shape::HAS_SHORTID, tinyid);
}
JS_PUBLIC_API(JSBool)
@ -3362,7 +3346,7 @@ JS_DefineProperties(JSContext *cx, JSObject *obj, JSPropertySpec *ps)
for (ok = true; ps->name; ps++) {
ok = DefineProperty(cx, obj, ps->name, UndefinedValue(),
Valueify(ps->getter), Valueify(ps->setter),
ps->flags, JSScopeProperty::HAS_SHORTID, ps->tinyid);
ps->flags, Shape::HAS_SHORTID, ps->tinyid);
if (!ok)
break;
}
@ -3375,7 +3359,7 @@ JS_AliasProperty(JSContext *cx, JSObject *obj, const char *name, const char *ali
JSObject *obj2;
JSProperty *prop;
JSBool ok;
JSScopeProperty *sprop;
Shape *shape;
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj);
@ -3399,11 +3383,11 @@ JS_AliasProperty(JSContext *cx, JSObject *obj, const char *name, const char *ali
if (!atom) {
ok = JS_FALSE;
} else {
sprop = (JSScopeProperty *)prop;
shape = (Shape *)prop;
ok = (js_AddNativeProperty(cx, obj, ATOM_TO_JSID(atom),
sprop->getter(), sprop->setter(), sprop->slot,
sprop->attributes(), sprop->getFlags() | JSScopeProperty::ALIAS,
sprop->shortid)
shape->getter(), shape->setter(), shape->slot,
shape->attributes(), shape->getFlags() | Shape::ALIAS,
shape->shortid)
!= NULL);
}
JS_UNLOCK_OBJ(cx, obj);
@ -3415,7 +3399,7 @@ JS_AliasElement(JSContext *cx, JSObject *obj, const char *name, jsint alias)
{
JSObject *obj2;
JSProperty *prop;
JSScopeProperty *sprop;
Shape *shape;
JSBool ok;
CHECK_REQUEST(cx);
@ -3438,11 +3422,11 @@ JS_AliasElement(JSContext *cx, JSObject *obj, const char *name, jsint alias)
numBuf, name, obj2->getClass()->name);
return JS_FALSE;
}
sprop = (JSScopeProperty *)prop;
shape = (Shape *)prop;
ok = (js_AddNativeProperty(cx, obj, INT_TO_JSID(alias),
sprop->getter(), sprop->setter(), sprop->slot,
sprop->attributes(), sprop->getFlags() | JSScopeProperty::ALIAS,
sprop->shortid)
shape->getter(), shape->setter(), shape->slot,
shape->attributes(), shape->getFlags() | Shape::ALIAS,
shape->shortid)
!= NULL);
JS_UNLOCK_OBJ(cx, obj);
return ok;
@ -3471,27 +3455,28 @@ GetPropertyDescriptorById(JSContext *cx, JSObject *obj, jsid id, uintN flags,
desc->obj = obj2;
if (obj2->isNative()) {
JSScopeProperty *sprop = (JSScopeProperty *) prop;
desc->attrs = sprop->attributes();
Shape *shape = (Shape *) prop;
desc->attrs = shape->attributes();
if (sprop->isMethod()) {
if (shape->isMethod()) {
desc->getter = desc->setter = PropertyStub;
desc->value.setObject(sprop->methodObject());
desc->value.setObject(shape->methodObject());
} else {
desc->getter = sprop->getter();
desc->setter = sprop->setter();
if (SPROP_HAS_VALID_SLOT(sprop, obj2->scope()))
desc->value = obj2->lockedGetSlot(sprop->slot);
desc->getter = shape->getter();
desc->setter = shape->setter();
if (obj2->containsSlot(shape->slot))
desc->value = obj2->lockedGetSlot(shape->slot);
else
desc->value.setUndefined();
}
JS_UNLOCK_OBJ(cx, obj2);
} else if (obj2->isProxy()) {
JSAutoResolveFlags rf(cx, flags);
return own
? JSProxy::getOwnPropertyDescriptor(cx, obj2, id, desc)
: JSProxy::getPropertyDescriptor(cx, obj2, id, desc);
} else {
if (obj2->isProxy()) {
JSAutoResolveFlags rf(cx, flags);
return own
? JSProxy::getOwnPropertyDescriptor(cx, obj2, id, desc)
: JSProxy::getPropertyDescriptor(cx, obj2, id, desc);
}
if (!obj2->getAttributes(cx, id, &desc->attrs))
return false;
desc->getter = NULL;
@ -3587,7 +3572,7 @@ SetPropertyAttributesById(JSContext *cx, JSObject *obj, jsid id, uintN attrs, JS
return true;
}
JSBool ok = obj->isNative()
? js_SetNativeAttributes(cx, obj, (JSScopeProperty *) prop, attrs)
? js_SetNativeAttributes(cx, obj, (Shape *) prop, attrs)
: obj->setAttributes(cx, id, &attrs);
if (ok)
*foundp = true;
@ -3780,7 +3765,7 @@ JS_Enumerate(JSContext *cx, JSObject *obj)
* XXX reverse iterator for properties, unreverse and meld with jsinterp.c's
* prop_iterator_class somehow...
* + preserve the obj->enumerate API while optimizing the native object case
* + native case here uses a JSScopeProperty *, but that iterates in reverse!
* + native case here uses a Shape *, but that iterates in reverse!
* + so we make non-native match, by reverse-iterating after JS_Enumerating
*/
const uint32 JSSLOT_ITER_INDEX = JSSLOT_PRIVATE + 1;
@ -3809,7 +3794,7 @@ prop_iter_trace(JSTracer *trc, JSObject *obj)
if (obj->fslots[JSSLOT_ITER_INDEX].toInt32() < 0) {
/* Native case: just mark the next property to visit. */
((JSScopeProperty *) pdata)->trace(trc);
((Shape *) pdata)->trace(trc);
} else {
/* Non-native case: mark each id in the JSIdArray private. */
JSIdArray *ida = (JSIdArray *) pdata;
@ -3842,8 +3827,7 @@ JS_PUBLIC_API(JSObject *)
JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
{
JSObject *iterobj;
JSScope *scope;
void *pdata;
const void *pdata;
jsint index;
JSIdArray *ida;
@ -3854,9 +3838,8 @@ JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
return NULL;
if (obj->isNative()) {
/* Native case: start with the last property in obj's own scope. */
scope = obj->scope();
pdata = scope->lastProperty();
/* Native case: start with the last property in obj. */
pdata = obj->lastProperty();
index = -1;
} else {
/*
@ -3874,7 +3857,7 @@ JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
}
/* iterobj cannot escape to other threads here. */
iterobj->setPrivate(pdata);
iterobj->setPrivate(const_cast<void *>(pdata));
iterobj->fslots[JSSLOT_ITER_INDEX].setInt32(index);
return iterobj;
}
@ -3884,7 +3867,7 @@ JS_NextProperty(JSContext *cx, JSObject *iterobj, jsid *idp)
{
jsint i;
JSObject *obj;
JSScopeProperty *sprop;
const Shape *shape;
JSIdArray *ida;
CHECK_REQUEST(cx);
@ -3894,21 +3877,22 @@ JS_NextProperty(JSContext *cx, JSObject *iterobj, jsid *idp)
/* Native case: private data is a property tree node pointer. */
obj = iterobj->getParent();
JS_ASSERT(obj->isNative());
sprop = (JSScopeProperty *) iterobj->getPrivate();
shape = (Shape *) iterobj->getPrivate();
/*
* If the next property in the property tree ancestor line is
* not enumerable, or it's an alias, skip it and keep on trying
* to find an enumerable property that is still in scope.
* If the next property mapped by obj in the property tree ancestor
* line is not enumerable, or it's an alias, skip it and keep on trying
* to find an enumerable property that is still in obj.
*/
while (sprop && (!sprop->enumerable() || sprop->isAlias()))
sprop = sprop->parent;
while (shape->previous() && (!shape->enumerable() || shape->isAlias()))
shape = shape->previous();
if (!sprop) {
if (!shape->previous()) {
JS_ASSERT(JSID_IS_EMPTY(shape->id));
*idp = JSID_VOID;
} else {
iterobj->setPrivate(sprop->parent);
*idp = sprop->id;
iterobj->setPrivate(const_cast<Shape *>(shape->previous()));
*idp = shape->id;
}
} else {
/* Non-native case: use the ida enumerated when iterobj was created. */
@ -4083,9 +4067,8 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
}
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
JSObject *clone = CloneFunctionObject(cx, fun, parent);
if (!clone)
return NULL;
if (!FUN_FLAT_CLOSURE(fun))
return CloneFunctionObject(cx, fun, parent);
/*
* A flat closure carries its own environment, so why clone it? In case
@ -4099,42 +4082,27 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
* they were activations, respecting the skip field in each upvar's cookie
* but looking up the property by name instead of frame slot.
*/
if (FUN_FLAT_CLOSURE(fun)) {
JS_ASSERT(funobj->dslots);
if (!js_EnsureReservedSlots(cx, clone,
fun->countInterpretedReservedSlots())) {
return NULL;
}
JSObject *clone = js_AllocFlatClosure(cx, fun, parent);
if (!clone)
return NULL;
JSUpvarArray *uva = fun->u.i.script->upvars();
JS_ASSERT(uva->length <= clone->dslots[-1].toPrivateUint32());
JSUpvarArray *uva = fun->u.i.script->upvars();
uint32 i = uva->length;
JS_ASSERT(i != 0);
void *mark = JS_ARENA_MARK(&cx->tempPool);
jsuword *names = js_GetLocalNameArray(cx, fun, &cx->tempPool);
if (!names)
return NULL;
uint32 i = 0, n = uva->length;
for (; i < n; i++) {
JSObject *obj = parent;
int skip = uva->vector[i].level();
while (--skip > 0) {
if (!obj) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
JSMSG_BAD_CLONE_FUNOBJ_SCOPE);
goto break2;
}
obj = obj->getParent();
for (Shape::Range r(fun->lastUpvar()); i-- != 0; r.popFront()) {
JSObject *obj = parent;
int skip = uva->vector[i].level();
while (--skip > 0) {
if (!obj) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
JSMSG_BAD_CLONE_FUNOBJ_SCOPE);
return NULL;
}
JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(names[i]);
if (!obj->getProperty(cx, ATOM_TO_JSID(atom), &clone->dslots[i]))
break;
obj = obj->getParent();
}
break2:
JS_ARENA_RELEASE(&cx->tempPool, mark);
if (i < n)
if (!obj->getProperty(cx, r.front().id, clone->getFlatClosureUpvars() + i))
return NULL;
}
@ -4609,7 +4577,7 @@ JS_CompileUCFunctionForPrincipals(JSContext *cx, JSObject *obj,
fun = NULL;
goto out2;
}
if (!js_AddLocal(cx, fun, argAtom, JSLOCAL_ARG)) {
if (!fun->addLocal(cx, argAtom, JSLOCAL_ARG)) {
fun = NULL;
goto out2;
}

Просмотреть файл

@ -308,21 +308,22 @@ JSVAL_TO_PRIVATE(jsval v)
#define JSID_TYPE_MASK 0x7
/*
* Do not use canonical 'id' for jsid parameters since this is a magic word in
* Avoid using canonical 'id' for jsid parameters since this is a magic word in
* Objective-C++ which, apparently, wants to be able to #include jsapi.h.
*/
#define id iden
static JS_ALWAYS_INLINE JSBool
JSID_IS_STRING(jsid iden)
JSID_IS_STRING(jsid id)
{
return (JSID_BITS(iden) & JSID_TYPE_MASK) == 0;
return (JSID_BITS(id) & JSID_TYPE_MASK) == 0;
}
static JS_ALWAYS_INLINE JSString *
JSID_TO_STRING(jsid iden)
JSID_TO_STRING(jsid id)
{
JS_ASSERT(JSID_IS_STRING(iden));
return (JSString *)(JSID_BITS(iden));
JS_ASSERT(JSID_IS_STRING(id));
return (JSString *)(JSID_BITS(id));
}
JS_PUBLIC_API(JSBool)
@ -332,24 +333,24 @@ JS_StringHasBeenInterned(JSString *str);
static JS_ALWAYS_INLINE jsid
INTERNED_STRING_TO_JSID(JSString *str)
{
jsid iden;
jsid id;
JS_ASSERT(JS_StringHasBeenInterned(str));
JS_ASSERT(((size_t)str & JSID_TYPE_MASK) == 0);
JSID_BITS(iden) = (size_t)str;
return iden;
JSID_BITS(id) = (size_t)str;
return id;
}
static JS_ALWAYS_INLINE JSBool
JSID_IS_INT(jsid iden)
JSID_IS_INT(jsid id)
{
return !!(JSID_BITS(iden) & JSID_TYPE_INT);
return !!(JSID_BITS(id) & JSID_TYPE_INT);
}
static JS_ALWAYS_INLINE int32
JSID_TO_INT(jsid iden)
JSID_TO_INT(jsid id)
{
JS_ASSERT(JSID_IS_INT(iden));
return ((int32)JSID_BITS(iden)) >> 1;
JS_ASSERT(JSID_IS_INT(id));
return ((int32)JSID_BITS(id)) >> 1;
}
#define JSID_INT_MIN (-(1 << 30))
@ -365,45 +366,46 @@ INT_FITS_IN_JSID(int32 i)
static JS_ALWAYS_INLINE jsid
INT_TO_JSID(int32 i)
{
jsid iden;
jsid id;
JS_ASSERT(INT_FITS_IN_JSID(i));
JSID_BITS(iden) = ((i << 1) | JSID_TYPE_INT);
return iden;
JSID_BITS(id) = ((i << 1) | JSID_TYPE_INT);
return id;
}
static JS_ALWAYS_INLINE JSBool
JSID_IS_OBJECT(jsid iden)
JSID_IS_OBJECT(jsid id)
{
return (JSID_BITS(iden) & JSID_TYPE_MASK) == JSID_TYPE_OBJECT;
return (JSID_BITS(id) & JSID_TYPE_MASK) == JSID_TYPE_OBJECT &&
(size_t)JSID_BITS(id) != JSID_TYPE_OBJECT;
}
static JS_ALWAYS_INLINE JSObject *
JSID_TO_OBJECT(jsid iden)
JSID_TO_OBJECT(jsid id)
{
JS_ASSERT(JSID_IS_OBJECT(iden));
return (JSObject *)(JSID_BITS(iden) & ~(size_t)JSID_TYPE_MASK);
JS_ASSERT(JSID_IS_OBJECT(id));
return (JSObject *)(JSID_BITS(id) & ~(size_t)JSID_TYPE_MASK);
}
static JS_ALWAYS_INLINE jsid
OBJECT_TO_JSID(JSObject *obj)
{
jsid iden;
jsid id;
JS_ASSERT(obj != NULL);
JS_ASSERT(((size_t)obj & JSID_TYPE_MASK) == 0);
JSID_BITS(iden) = ((size_t)obj | JSID_TYPE_OBJECT);
return iden;
JSID_BITS(id) = ((size_t)obj | JSID_TYPE_OBJECT);
return id;
}
static JS_ALWAYS_INLINE JSBool
JSID_IS_GCTHING(jsid iden)
JSID_IS_GCTHING(jsid id)
{
return JSID_IS_STRING(iden) || JSID_IS_OBJECT(iden);
return JSID_IS_STRING(id) || JSID_IS_OBJECT(id);
}
static JS_ALWAYS_INLINE void *
JSID_TO_GCTHING(jsid iden)
JSID_TO_GCTHING(jsid id)
{
return (void *)(JSID_BITS(iden) & ~(size_t)JSID_TYPE_MASK);
return (void *)(JSID_BITS(id) & ~(size_t)JSID_TYPE_MASK);
}
/*
@ -412,11 +414,11 @@ JSID_TO_GCTHING(jsid iden)
*/
static JS_ALWAYS_INLINE JSBool
JSID_IS_DEFAULT_XML_NAMESPACE(jsid iden)
JSID_IS_DEFAULT_XML_NAMESPACE(jsid id)
{
JS_ASSERT_IF(((size_t)JSID_BITS(iden) & JSID_TYPE_MASK) == JSID_TYPE_DEFAULT_XML_NAMESPACE,
JSID_BITS(iden) == JSID_TYPE_DEFAULT_XML_NAMESPACE);
return ((size_t)JSID_BITS(iden) == JSID_TYPE_DEFAULT_XML_NAMESPACE);
JS_ASSERT_IF(((size_t)JSID_BITS(id) & JSID_TYPE_MASK) == JSID_TYPE_DEFAULT_XML_NAMESPACE,
JSID_BITS(id) == JSID_TYPE_DEFAULT_XML_NAMESPACE);
return ((size_t)JSID_BITS(id) == JSID_TYPE_DEFAULT_XML_NAMESPACE);
}
#ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES
@ -433,17 +435,27 @@ extern JS_PUBLIC_DATA(jsid) JS_DEFAULT_XML_NAMESPACE_ID;
*/
static JS_ALWAYS_INLINE JSBool
JSID_IS_VOID(jsid iden)
JSID_IS_VOID(jsid id)
{
JS_ASSERT_IF(((size_t)JSID_BITS(iden) & JSID_TYPE_MASK) == JSID_TYPE_VOID,
JSID_BITS(iden) == JSID_TYPE_VOID);
return ((size_t)JSID_BITS(iden) == JSID_TYPE_VOID);
JS_ASSERT_IF(((size_t)JSID_BITS(id) & JSID_TYPE_MASK) == JSID_TYPE_VOID,
JSID_BITS(id) == JSID_TYPE_VOID);
return ((size_t)JSID_BITS(id) == JSID_TYPE_VOID);
}
static JS_ALWAYS_INLINE JSBool
JSID_IS_EMPTY(jsid id)
{
return ((size_t)JSID_BITS(id) == JSID_TYPE_OBJECT);
}
#undef id
#ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES
extern JS_PUBLIC_DATA(jsid) JSID_VOID;
extern JS_PUBLIC_DATA(jsid) JSID_EMPTY;
#else
#define JSID_VOID ((jsid)JSID_TYPE_VOID)
# define JSID_VOID ((jsid)JSID_TYPE_VOID)
# define JSID_EMPTY ((jsid)JSID_TYPE_OBJECT)
#endif
/************************************************************************/

Просмотреть файл

@ -755,7 +755,7 @@ array_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
obj->getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) {
JSObject *obj2;
JSProperty *prop;
JSScopeProperty *sprop;
const Shape *shape;
JSObject *proto = obj->getProto();
if (!proto) {
@ -769,8 +769,8 @@ array_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
return JS_FALSE;
if (prop && obj2->isNative()) {
sprop = (JSScopeProperty *) prop;
if (!js_NativeGet(cx, obj, obj2, sprop, JSGET_METHOD_BARRIER, vp))
shape = (const Shape *) prop;
if (!js_NativeGet(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, vp))
return JS_FALSE;
JS_UNLOCK_OBJ(cx, obj2);
}
@ -842,7 +842,7 @@ js_PrototypeHasIndexedProperties(JSContext *cx, JSObject *obj)
*/
if (!obj->isNative())
return JS_TRUE;
if (obj->scope()->hadIndexedProperties())
if (obj->isIndexed())
return JS_TRUE;
}
return JS_FALSE;
@ -1006,7 +1006,7 @@ array_trace(JSTracer *trc, JSObject *obj)
Class js_ArrayClass = {
"Array",
Class::NON_NATIVE |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::DENSE_ARRAY_FIXED_RESERVED_SLOTS) |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::DENSE_ARRAY_CLASS_RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_Array) |
JSCLASS_FAST_CONSTRUCTOR,
PropertyStub, /* addProperty */
@ -1063,30 +1063,25 @@ JSObject::makeDenseArraySlow(JSContext *cx)
{
JS_ASSERT(isDenseArray());
/*
* Save old map now, before calling InitScopeForObject. We'll have to undo
* on error. This is gross, but a better way is not obvious.
*/
JSObjectMap *oldMap = map;
/*
* Create a native scope. All slow arrays other than Array.prototype get
* the same initial shape.
*/
uint32 emptyShape;
JSObject *obj = this;
JSObject *arrayProto = obj->getProto();
if (arrayProto->getClass() == &js_ObjectClass) {
/* obj is Array.prototype. */
emptyShape = js_GenerateShape(cx, false);
} else {
/* arrayProto is Array.prototype. */
JS_ASSERT(arrayProto->getClass() == &js_SlowArrayClass);
emptyShape = arrayProto->scope()->emptyScope->shape;
}
JSScope *scope = JSScope::create(cx, &js_SlowArrayClass, obj, emptyShape);
if (!scope)
return JS_FALSE;
JSObject *arrayProto = getProto();
if (!InitScopeForObject(cx, this, &js_SlowArrayClass, arrayProto))
return false;
uint32 capacity;
if (obj->dslots) {
capacity = obj->getDenseArrayCapacity();
obj->dslots[-1].setPrivateUint32(JS_INITIAL_NSLOTS + capacity);
if (dslots) {
capacity = getDenseArrayCapacity();
dslots[-1].setPrivateUint32(JS_INITIAL_NSLOTS + capacity);
} else {
/*
* Array.prototype is constructed as a dense array, but is immediately slowified before
@ -1095,28 +1090,40 @@ JSObject::makeDenseArraySlow(JSContext *cx)
capacity = 0;
}
scope->freeslot = obj->numSlots();
uint32 nslots = numSlots();
if (nslots >= JS_NSLOTS_LIMIT) {
setMap(oldMap);
JS_ReportOutOfMemory(cx);
return false;
}
freeslot = nslots;
/* Begin with the length property to share more of the property tree. */
if (!scope->addProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom),
array_length_getter, array_length_setter,
JSSLOT_ARRAY_LENGTH, JSPROP_PERMANENT | JSPROP_SHARED, 0, 0)) {
goto out_bad;
if (!addProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom),
array_length_getter, array_length_setter,
JSSLOT_ARRAY_LENGTH, JSPROP_PERMANENT | JSPROP_SHARED, 0, 0)) {
setMap(oldMap);
return false;
}
/* Create new properties pointing to existing elements. */
for (uint32 i = 0; i < capacity; i++) {
jsid id;
if (!ValueToId(cx, Int32Value(i), &id))
goto out_bad;
if (!ValueToId(cx, Int32Value(i), &id)) {
setMap(oldMap);
return false;
}
if (obj->getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) {
obj->setDenseArrayElement(i, UndefinedValue());
if (getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) {
setDenseArrayElement(i, UndefinedValue());
continue;
}
if (!scope->addDataProperty(cx, id, JS_INITIAL_NSLOTS + i, JSPROP_ENUMERATE))
goto out_bad;
if (!addDataProperty(cx, id, JS_INITIAL_NSLOTS + i, JSPROP_ENUMERATE)) {
setMap(oldMap);
return false;
}
}
/*
@ -1126,14 +1133,11 @@ JSObject::makeDenseArraySlow(JSContext *cx)
* can store an arbitrary value.
*/
JS_ASSERT(js_SlowArrayClass.flags & JSCLASS_HAS_PRIVATE);
obj->voidDenseOnlyArraySlots();
obj->clasp = &js_SlowArrayClass;
obj->map = scope;
return JS_TRUE;
voidDenseOnlyArraySlots();
out_bad:
scope->destroy(cx);
return JS_FALSE;
/* Finally, update class. */
clasp = &js_SlowArrayClass;
return true;
}
/* Transfer ownership of buffer to returned string. */
@ -2998,9 +3002,9 @@ js_NewEmptyArray(JSContext* cx, JSObject* proto, int32 len)
if (!obj)
return NULL;
/* Initialize all fields of JSObject. */
obj->map = const_cast<JSObjectMap *>(&JSObjectMap::sharedNonNative);
obj->init(&js_ArrayClass, proto, proto->getParent(), NullValue());
/* Initialize all fields, calling init before setting obj->map. */
obj->init(&js_ArrayClass, proto, proto->getParent(), NullValue(), cx);
obj->setSharedNonNativeMap();
obj->setArrayLength(len);
obj->setDenseArrayCapacity(0);
return obj;

Просмотреть файл

@ -110,7 +110,7 @@ JSObject::isArray() const
/*
* Dense arrays are not native -- aobj->isNative() for a dense array aobj
* results in false, meaning aobj->map does not point to a JSScope.
* results in false, meaning aobj->map does not point to a js::Shape.
*
* But Array methods are called via aobj.sort(), e.g., and the interpreter and
* the trace recorder must consult the property cache in order to perform well.
@ -249,8 +249,8 @@ js_Array(JSContext *cx, uintN argc, js::Value *vp);
* parameter. The caller promises to fill in the first |capacity| values
* starting from that pointer immediately after this function returns and
* without triggering GC (so this method is allowed to leave those
* uninitialized) and to set them to non-JSVAL_HOLE values, so that the
* resulting array has length and count both equal to |capacity|.
* uninitialized) and to set them to non-JS_ARRAY_HOLE-magic-why values, so
* that the resulting array has length and count both equal to |capacity|.
*
* FIXME: for some strange reason, when this file is included from
* dom/ipc/TabParent.cpp in MSVC, jsuint resolves to a slightly different

Просмотреть файл

@ -183,73 +183,55 @@ JS_DEFINE_CALLINFO_2(extern, INT32, js_StringToInt32, CONTEXT, STRING, 1, ACCSET
/* Nb: it's always safe to set isDefinitelyAtom to false if you're unsure or don't know. */
static inline JSBool
AddPropertyHelper(JSContext* cx, JSObject* obj, JSScopeProperty* sprop, bool isDefinitelyAtom)
AddPropertyHelper(JSContext* cx, JSObject* obj, Shape* shape, bool isDefinitelyAtom)
{
JS_LOCK_OBJ(cx, obj);
JS_ASSERT(shape->previous() == obj->lastProperty());
uint32 slot = sprop->slot;
JSScope* scope = obj->scope();
if (slot != scope->freeslot)
return false;
JS_ASSERT(sprop->parent == scope->lastProperty());
if (scope->isSharedEmpty()) {
scope = js_GetMutableScope(cx, obj);
if (!scope)
return false;
} else {
JS_ASSERT(!scope->hasProperty(sprop));
}
if (!scope->table) {
if (slot < obj->numSlots()) {
JS_ASSERT(obj->getSlot(scope->freeslot).isUndefined());
++scope->freeslot;
} else {
if (!js_AllocSlot(cx, obj, &slot))
goto exit_trace;
if (slot != sprop->slot) {
js_FreeSlot(cx, obj, slot);
goto exit_trace;
}
}
scope->extend(cx, sprop, isDefinitelyAtom);
} else {
JSScopeProperty *sprop2 =
scope->addProperty(cx, sprop->id, sprop->getter(), sprop->setter(),
SPROP_INVALID_SLOT, sprop->attributes(), sprop->getFlags(),
sprop->shortid);
if (sprop2 != sprop)
if (obj->nativeEmpty()) {
if (!obj->ensureClassReservedSlotsForEmptyObject(cx))
goto exit_trace;
}
uint32 slot;
slot = shape->slot;
JS_ASSERT(slot == obj->freeslot);
if (slot < obj->numSlots()) {
JS_ASSERT(obj->getSlot(slot).isUndefined());
++obj->freeslot;
JS_ASSERT(obj->freeslot != 0);
} else {
if (!obj->allocSlot(cx, &slot))
goto exit_trace;
JS_ASSERT(slot == shape->slot);
}
obj->extend(cx, shape, isDefinitelyAtom);
if (js_IsPropertyCacheDisabled(cx))
goto exit_trace;
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, obj);
return true;
exit_trace:
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, obj);
return false;
}
JSBool FASTCALL
js_AddProperty(JSContext* cx, JSObject* obj, JSScopeProperty* sprop)
js_AddProperty(JSContext* cx, JSObject* obj, Shape* shape)
{
return AddPropertyHelper(cx, obj, sprop, /* isDefinitelyAtom = */false);
return AddPropertyHelper(cx, obj, shape, /* isDefinitelyAtom = */false);
}
JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddProperty, CONTEXT, OBJECT, SCOPEPROP, 0, ACCSET_STORE_ANY)
JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddProperty, CONTEXT, OBJECT, SHAPE, 0, ACCSET_STORE_ANY)
JSBool FASTCALL
js_AddAtomProperty(JSContext* cx, JSObject* obj, JSScopeProperty* sprop)
js_AddAtomProperty(JSContext* cx, JSObject* obj, Shape* shape)
{
return AddPropertyHelper(cx, obj, sprop, /* isDefinitelyAtom = */true);
return AddPropertyHelper(cx, obj, shape, /* isDefinitelyAtom = */true);
}
JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddAtomProperty, CONTEXT, OBJECT, SCOPEPROP,
0, ACCSET_STORE_ANY)
JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddAtomProperty, CONTEXT, OBJECT, SHAPE, 0, ACCSET_STORE_ANY)
static JSBool
HasProperty(JSContext* cx, JSObject* obj, jsid id)
@ -326,7 +308,7 @@ js_NewNullClosure(JSContext* cx, JSObject* funobj, JSObject* proto, JSObject* pa
if (!closure)
return NULL;
closure->initSharingEmptyScope(&js_FunctionClass, proto, parent, PrivateValue(fun));
closure->initSharingEmptyShape(&js_FunctionClass, proto, parent, PrivateValue(fun), cx);
return closure;
}
JS_DEFINE_CALLINFO_4(extern, OBJECT, js_NewNullClosure, CONTEXT, OBJECT, OBJECT, OBJECT,

Просмотреть файл

@ -222,7 +222,7 @@ struct ClosureVarInfo;
#define _JS_CTYPE_CONSTRUCTOR_RETRY _JS_CTYPE(JSObject *, _JS_PTR, --, --, FAIL_NULL | \
JSTN_CONSTRUCTOR)
#define _JS_CTYPE_REGEXP _JS_CTYPE(JSObject *, _JS_PTR, "","r", INFALLIBLE)
#define _JS_CTYPE_SCOPEPROP _JS_CTYPE(JSScopeProperty *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_SHAPE _JS_CTYPE(js::Shape *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_TRACERSTATE _JS_CTYPE(TracerState *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_FRAGMENT _JS_CTYPE(nanojit::Fragment *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_CLASS _JS_CTYPE(js::Class *, _JS_PTR, --, --, INFALLIBLE)

Просмотреть файл

@ -826,13 +826,13 @@ js_NewContext(JSRuntime *rt, size_t stackChunkSize)
if (ok) {
/*
* Ensure that the empty scopes initialized by
* JSScope::initRuntimeState get the desired special shapes.
* Shape::initRuntimeState get the desired special shapes.
* (The rt->state dance above guarantees that this abuse of
* rt->shapeGen is thread-safe.)
*/
uint32 shapeGen = rt->shapeGen;
rt->shapeGen = 0;
ok = JSScope::initRuntimeState(cx);
ok = Shape::initRuntimeState(cx);
if (rt->shapeGen < shapeGen)
rt->shapeGen = shapeGen;
}
@ -1063,7 +1063,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
JS_BeginRequest(cx);
#endif
JSScope::finishRuntimeState(cx);
Shape::finishRuntimeState(cx);
js_FinishRuntimeNumberState(cx);
/* Unpin all common atoms before final GC. */

Просмотреть файл

@ -59,6 +59,7 @@
#include "jsatom.h"
#include "jsdhash.h"
#include "jsdtoa.h"
#include "jsfun.h"
#include "jsgc.h"
#include "jsgcchunk.h"
#include "jshashtable.h"
@ -1183,7 +1184,7 @@ typedef enum JSRuntimeState {
typedef struct JSPropertyTreeEntry {
JSDHashEntryHdr hdr;
JSScopeProperty *child;
js::Shape *child;
} JSPropertyTreeEntry;
@ -1346,17 +1347,6 @@ struct JSRuntime {
bool gcRunning;
bool gcRegenShapes;
/*
* During gc, if rt->gcRegenShapes &&
* (scope->flags & JSScope::SHAPE_REGEN) == rt->gcRegenShapesScopeFlag,
* then the scope's shape has already been regenerated during this GC.
* To avoid having to sweep JSScopes, the bit's meaning toggles with each
* shape-regenerating GC.
*
* FIXME Once scopes are GC'd (bug 505004), this will be obsolete.
*/
uint8 gcRegenShapesScopeFlag;
#ifdef JS_GC_ZEAL
jsrefcount gcZeal;
#endif
@ -1485,9 +1475,9 @@ struct JSRuntime {
#define JS_PROPERTY_TREE(cx) ((cx)->runtime->propertyTree)
/*
* The propertyRemovals counter is incremented for every JSScope::clear,
* and for each JSScope::remove method call that frees a slot in an object.
* See js_NativeGet and js_NativeSet in jsobj.cpp.
* The propertyRemovals counter is incremented for every JSObject::clear,
* and for each JSObject::remove method call that frees a slot in the given
* object. See js_NativeGet and js_NativeSet in jsobj.cpp.
*/
int32 propertyRemovals;
@ -1539,14 +1529,14 @@ struct JSRuntime {
/*
* Runtime-shared empty scopes for well-known built-in objects that lack
* class prototypes (the usual locus of an emptyScope). Mnemonic: ABCDEW
* class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
*/
JSEmptyScope *emptyArgumentsScope;
JSEmptyScope *emptyBlockScope;
JSEmptyScope *emptyCallScope;
JSEmptyScope *emptyDeclEnvScope;
JSEmptyScope *emptyEnumeratorScope;
JSEmptyScope *emptyWithScope;
js::EmptyShape *emptyArgumentsShape;
js::EmptyShape *emptyBlockShape;
js::EmptyShape *emptyCallShape;
js::EmptyShape *emptyDeclEnvShape;
js::EmptyShape *emptyEnumeratorShape;
js::EmptyShape *emptyWithShape;
/*
* Various metering fields are defined at the end of JSRuntime. In this
@ -1578,16 +1568,34 @@ struct JSRuntime {
jsrefcount claimedTitles;
jsrefcount deadContexts;
jsrefcount deadlocksAvoided;
jsrefcount liveScopes;
jsrefcount liveShapes;
jsrefcount sharedTitles;
jsrefcount totalScopes;
jsrefcount liveScopeProps;
jsrefcount liveScopePropsPreSweep;
jsrefcount totalScopeProps;
jsrefcount totalShapes;
jsrefcount liveObjectProps;
jsrefcount liveObjectPropsPreSweep;
jsrefcount totalObjectProps;
jsrefcount livePropTreeNodes;
jsrefcount duplicatePropTreeNodes;
jsrefcount totalPropTreeNodes;
jsrefcount propTreeKidsChunks;
jsrefcount liveDictModeNodes;
/*
* NB: emptyShapes is init'ed iff at least one of these envars is set:
*
* JS_PROPTREE_STATFILE statistics on the property tree forest
* JS_PROPTREE_DUMPFILE all paths in the property tree forest
*/
const char *propTreeStatFilename;
const char *propTreeDumpFilename;
bool meterEmptyShapes() const { return propTreeStatFilename || propTreeDumpFilename; }
typedef js::HashSet<js::EmptyShape *,
js::DefaultHasher<js::EmptyShape *>,
js::SystemAllocPolicy> EmptyShapeSet;
EmptyShapeSet emptyShapes;
/* String instrumentation. */
jsrefcount liveStrings;
@ -1741,12 +1749,8 @@ typedef struct JSResolvingEntry {
extern const JSDebugHooks js_NullDebugHooks; /* defined in jsdbgapi.cpp */
namespace js {
class AutoGCRooter;
}
namespace js {
class RegExp;
class RegExpStatics
{
@ -1853,7 +1857,7 @@ class RegExpStatics
void getRightContext(JSSubString *out) const;
};
}
} /* namespace js */
struct JSContext
{
@ -2416,7 +2420,7 @@ class AutoGCRooter {
enum {
JSVAL = -1, /* js::AutoValueRooter */
SPROP = -2, /* js::AutoScopePropertyRooter */
SHAPE = -2, /* js::AutoShapeRooter */
PARSER = -3, /* js::Parser */
SCRIPT = -4, /* js::AutoScriptRooter */
ENUMERATOR = -5, /* js::AutoEnumStateRooter */
@ -2600,11 +2604,11 @@ class AutoArrayRooter : private AutoGCRooter {
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
class AutoScopePropertyRooter : private AutoGCRooter {
class AutoShapeRooter : private AutoGCRooter {
public:
AutoScopePropertyRooter(JSContext *cx, JSScopeProperty *sprop
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: AutoGCRooter(cx, SPROP), sprop(sprop)
AutoShapeRooter(JSContext *cx, const js::Shape *shape
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: AutoGCRooter(cx, SHAPE), shape(shape)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
@ -2613,7 +2617,7 @@ class AutoScopePropertyRooter : private AutoGCRooter {
friend void MarkRuntime(JSTracer *trc);
private:
JSScopeProperty * const sprop;
const js::Shape * const shape;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
@ -2809,6 +2813,37 @@ class AutoReleasePtr {
~AutoReleasePtr() { cx->free(ptr); }
};
class AutoLocalNameArray {
public:
explicit AutoLocalNameArray(JSContext *cx, JSFunction *fun
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: context(cx),
mark(JS_ARENA_MARK(&cx->tempPool)),
names(fun->getLocalNameArray(cx, &cx->tempPool)),
count(fun->countLocalNames())
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
~AutoLocalNameArray() {
JS_ARENA_RELEASE(&context->tempPool, mark);
}
operator bool() const { return !!names; }
uint32 length() const { return count; }
const jsuword &operator [](unsigned i) const { return names[i]; }
private:
JSContext *context;
void *mark;
jsuword *names;
uint32 count;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
} /* namespace js */
class JSAutoResolveFlags

Просмотреть файл

@ -548,7 +548,7 @@ assertSameCompartment(JSContext *cx, T1 t1, T2 t2, T3 t3, T4 t4, T5 t5)
#undef START_ASSERT_SAME_COMPARTMENT
inline JSBool
callJSNative(JSContext *cx, js::Native native, JSObject *thisobj, uintN argc, js::Value *argv, js::Value *rval)
CallJSNative(JSContext *cx, js::Native native, JSObject *thisobj, uintN argc, js::Value *argv, js::Value *rval)
{
assertSameCompartment(cx, thisobj, ValueArray(argv, argc));
JSBool ok = native(cx, thisobj, argc, argv, rval);
@ -558,7 +558,7 @@ callJSNative(JSContext *cx, js::Native native, JSObject *thisobj, uintN argc, js
}
inline JSBool
callJSFastNative(JSContext *cx, js::FastNative native, uintN argc, js::Value *vp)
CallJSFastNative(JSContext *cx, js::FastNative native, uintN argc, js::Value *vp)
{
assertSameCompartment(cx, ValueArray(vp, argc + 2));
JSBool ok = native(cx, argc, vp);
@ -568,7 +568,7 @@ callJSFastNative(JSContext *cx, js::FastNative native, uintN argc, js::Value *vp
}
inline JSBool
callJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp)
CallJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp)
{
assertSameCompartment(cx, obj, id, *vp);
JSBool ok = op(cx, obj, id, vp);
@ -578,7 +578,7 @@ callJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::V
}
inline JSBool
callJSPropertyOpSetter(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp)
CallJSPropertyOpSetter(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp)
{
assertSameCompartment(cx, obj, id, *vp);
return op(cx, obj, id, vp);

Просмотреть файл

@ -413,7 +413,7 @@ JS_ClearInterrupt(JSRuntime *rt, JSInterruptHook *hoop, void **closurep)
typedef struct JSWatchPoint {
JSCList links;
JSObject *object; /* weak link, see js_FinalizeObject */
JSScopeProperty *sprop;
const Shape *shape;
PropertyOp setter;
JSWatchPointHandler handler;
JSObject *closure;
@ -424,7 +424,7 @@ typedef struct JSWatchPoint {
#define JSWP_HELD 0x2 /* held while running handler/setter */
static bool
IsWatchedProperty(JSContext *cx, JSScopeProperty *sprop);
IsWatchedProperty(JSContext *cx, const Shape &shape);
/*
* NB: DropWatchPointAndUnlock releases cx->runtime->debuggerLock in all cases.
@ -432,53 +432,53 @@ IsWatchedProperty(JSContext *cx, JSScopeProperty *sprop);
static JSBool
DropWatchPointAndUnlock(JSContext *cx, JSWatchPoint *wp, uintN flag)
{
JSBool ok;
JSScopeProperty *sprop;
JSScope *scope;
PropertyOp setter;
bool ok = true;
JSRuntime *rt = cx->runtime;
ok = JS_TRUE;
wp->flags &= ~flag;
if (wp->flags != 0) {
DBG_UNLOCK(cx->runtime);
DBG_UNLOCK(rt);
return ok;
}
/*
* Remove wp from the list, then if there are no other watchpoints for
* wp->sprop in any scope, restore wp->sprop->setter from wp.
* wp->shape in any scope, restore wp->shape->setter from wp.
*/
++cx->runtime->debuggerMutations;
++rt->debuggerMutations;
JS_REMOVE_LINK(&wp->links);
sprop = wp->sprop;
/*
* Passing null for the scope parameter tells js_GetWatchedSetter to find
* any watch point for sprop, and not to lock or unlock rt->debuggerLock.
* If js_ChangeNativePropertyAttrs fails, propagate failure after removing
* wp->closure's root and freeing wp.
*/
setter = js_GetWatchedSetter(cx->runtime, NULL, sprop);
DBG_UNLOCK(cx->runtime);
const Shape *shape = wp->shape;
PropertyOp setter = NULL;
for (JSWatchPoint *wp2 = (JSWatchPoint *)rt->watchPointList.next;
&wp2->links != &rt->watchPointList;
wp2 = (JSWatchPoint *)wp2->links.next) {
if (wp2->shape == shape) {
setter = wp->setter;
break;
}
}
DBG_UNLOCK(rt);
if (!setter) {
JS_LOCK_OBJ(cx, wp->object);
scope = wp->object->scope();
/*
* If the property wasn't found on wp->object, or it isn't still being
* watched, then someone else must have deleted or unwatched it, and we
* don't need to change the property attributes.
*/
JSScopeProperty *wprop = scope->lookup(sprop->id);
const Shape *wprop = wp->object->nativeLookup(shape->id);
if (wprop &&
wprop->hasSetterValue() == sprop->hasSetterValue() &&
IsWatchedProperty(cx, wprop)) {
sprop = scope->changeProperty(cx, wprop, 0, wprop->attributes(),
wprop->getter(), wp->setter);
if (!sprop)
ok = JS_FALSE;
wprop->hasSetterValue() == shape->hasSetterValue() &&
IsWatchedProperty(cx, *wprop)) {
shape = wp->object->changeProperty(cx, wprop, 0, wprop->attributes(),
wprop->getter(), wp->setter);
if (!shape)
ok = false;
}
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, wp->object);
}
cx->free(wp);
@ -502,8 +502,8 @@ js_TraceWatchPoints(JSTracer *trc, JSObject *obj)
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
if (wp->object == obj) {
wp->sprop->trace(trc);
if (wp->sprop->hasSetterValue() && wp->setter)
wp->shape->trace(trc);
if (wp->shape->hasSetterValue() && wp->setter)
JS_CALL_OBJECT_TRACER(trc, CastAsObject(wp->setter), "wp->setter");
JS_CALL_OBJECT_TRACER(trc, wp->closure, "wp->closure");
}
@ -542,57 +542,30 @@ js_SweepWatchPoints(JSContext *cx)
* NB: FindWatchPoint must be called with rt->debuggerLock acquired.
*/
static JSWatchPoint *
FindWatchPoint(JSRuntime *rt, JSScope *scope, jsid id)
FindWatchPoint(JSRuntime *rt, JSObject *obj, jsid id)
{
JSWatchPoint *wp;
for (wp = (JSWatchPoint *)rt->watchPointList.next;
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
if (wp->object->scope() == scope && wp->sprop->id == id)
if (wp->object == obj && wp->shape->id == id)
return wp;
}
return NULL;
}
JSScopeProperty *
js_FindWatchPoint(JSRuntime *rt, JSScope *scope, jsid id)
const Shape *
js_FindWatchPoint(JSRuntime *rt, JSObject *obj, jsid id)
{
JSWatchPoint *wp;
JSScopeProperty *sprop;
const Shape *shape;
DBG_LOCK(rt);
wp = FindWatchPoint(rt, scope, id);
sprop = wp ? wp->sprop : NULL;
wp = FindWatchPoint(rt, obj, id);
shape = wp ? wp->shape : NULL;
DBG_UNLOCK(rt);
return sprop;
}
/*
* Secret handshake with DropWatchPointAndUnlock: if (!scope), we know our
* caller has acquired rt->debuggerLock, so we don't have to.
*/
PropertyOp
js_GetWatchedSetter(JSRuntime *rt, JSScope *scope,
const JSScopeProperty *sprop)
{
PropertyOp setter;
JSWatchPoint *wp;
setter = NULL;
if (scope)
DBG_LOCK(rt);
for (wp = (JSWatchPoint *)rt->watchPointList.next;
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
if ((!scope || wp->object->scope() == scope) && wp->sprop == sprop) {
setter = wp->setter;
break;
}
}
if (scope)
DBG_UNLOCK(rt);
return setter;
return shape;
}
JSBool
@ -603,22 +576,21 @@ js_watch_set(JSContext *cx, JSObject *obj, jsid id, Value *vp)
for (JSWatchPoint *wp = (JSWatchPoint *)rt->watchPointList.next;
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
JSScopeProperty *sprop = wp->sprop;
if (wp->object == obj && SPROP_USERID(sprop) == id &&
const Shape *shape = wp->shape;
if (wp->object == obj && SHAPE_USERID(shape) == id &&
!(wp->flags & JSWP_HELD)) {
wp->flags |= JSWP_HELD;
DBG_UNLOCK(rt);
JS_LOCK_OBJ(cx, obj);
jsid propid = sprop->id;
jsid userid = SPROP_USERID(sprop);
JSScope *scope = obj->scope();
jsid propid = shape->id;
jsid userid = SHAPE_USERID(shape);
JS_UNLOCK_OBJ(cx, obj);
/* NB: wp is held, so we can safely dereference it still. */
if (!wp->handler(cx, obj, propid,
SPROP_HAS_VALID_SLOT(sprop, scope)
? Jsvalify(obj->getSlotMT(cx, sprop->slot))
obj->containsSlot(shape->slot)
? Jsvalify(obj->getSlotMT(cx, shape->slot))
: JSVAL_VOID,
Jsvalify(vp), wp->closure)) {
DBG_LOCK(rt);
@ -631,11 +603,11 @@ js_watch_set(JSContext *cx, JSObject *obj, jsid id, Value *vp)
* prevent any funny business between watchpoints and setters.
*/
JSBool ok = !wp->setter ||
(sprop->hasSetterValue()
(shape->hasSetterValue()
? InternalCall(cx, obj,
ObjectValue(*CastAsObject(wp->setter)),
1, vp, vp)
: callJSPropertyOpSetter(cx, wp->setter, obj, userid, vp));
: CallJSPropertyOpSetter(cx, wp->setter, obj, userid, vp));
DBG_LOCK(rt);
return DropWatchPointAndUnlock(cx, wp, JSWP_HELD) && ok;
@ -661,17 +633,17 @@ js_watch_set_wrapper(JSContext *cx, JSObject *obj, uintN argc, Value *argv,
}
static bool
IsWatchedProperty(JSContext *cx, JSScopeProperty *sprop)
IsWatchedProperty(JSContext *cx, const Shape &shape)
{
if (sprop->hasSetterValue()) {
JSObject *funobj = sprop->setterObject();
if (shape.hasSetterValue()) {
JSObject *funobj = shape.setterObject();
if (!funobj || !funobj->isFunction())
return false;
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
return FUN_NATIVE(fun) == js_watch_set_wrapper;
}
return sprop->setterOp() == js_watch_set;
return shape.setterOp() == js_watch_set;
}
PropertyOp
@ -710,7 +682,7 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
jsid propid;
JSObject *pobj;
JSProperty *prop;
JSScopeProperty *sprop;
const Shape *shape;
JSRuntime *rt;
JSBool ok;
JSWatchPoint *wp;
@ -747,18 +719,18 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
if (!js_LookupProperty(cx, obj, propid, &pobj, &prop))
return JS_FALSE;
sprop = (JSScopeProperty *) prop;
shape = (Shape *) prop;
rt = cx->runtime;
if (!sprop) {
if (!shape) {
/* Check for a deleted symbol watchpoint, which holds its property. */
sprop = js_FindWatchPoint(rt, obj->scope(), propid);
if (!sprop) {
shape = js_FindWatchPoint(rt, obj, propid);
if (!shape) {
/* Make a new property in obj so we can watch for the first set. */
if (!js_DefineNativeProperty(cx, obj, propid, UndefinedValue(), NULL, NULL,
JSPROP_ENUMERATE, 0, 0, &prop)) {
return JS_FALSE;
}
sprop = (JSScopeProperty *) prop;
shape = (Shape *) prop;
}
} else if (pobj != obj) {
/* Clone the prototype property so we can watch the right object. */
@ -768,14 +740,14 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
intN shortid;
if (pobj->isNative()) {
valroot.set(SPROP_HAS_VALID_SLOT(sprop, pobj->scope())
? pobj->lockedGetSlot(sprop->slot)
valroot.set(pobj->containsSlot(shape->slot)
? pobj->lockedGetSlot(shape->slot)
: UndefinedValue());
getter = sprop->getter();
setter = sprop->setter();
attrs = sprop->attributes();
flags = sprop->getFlags();
shortid = sprop->shortid;
getter = shape->getter();
setter = shape->setter();
attrs = shape->attributes();
flags = shape->getFlags();
shortid = shape->shortid;
JS_UNLOCK_OBJ(cx, pobj);
} else {
if (!pobj->getProperty(cx, propid, valroot.addr()) ||
@ -793,19 +765,19 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
shortid, &prop)) {
return JS_FALSE;
}
sprop = (JSScopeProperty *) prop;
shape = (Shape *) prop;
}
/*
* At this point, prop/sprop exists in obj, obj is locked, and we must
* At this point, prop/shape exists in obj, obj is locked, and we must
* unlock the object before returning.
*/
ok = JS_TRUE;
DBG_LOCK(rt);
wp = FindWatchPoint(rt, obj->scope(), propid);
wp = FindWatchPoint(rt, obj, propid);
if (!wp) {
DBG_UNLOCK(rt);
watcher = js_WrapWatchedSetter(cx, propid, sprop->attributes(), sprop->setter());
watcher = js_WrapWatchedSetter(cx, propid, shape->attributes(), shape->setter());
if (!watcher) {
ok = JS_FALSE;
goto out;
@ -819,13 +791,13 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
wp->handler = NULL;
wp->closure = NULL;
wp->object = obj;
wp->setter = sprop->setter();
wp->setter = shape->setter();
wp->flags = JSWP_LIVE;
/* XXXbe nest in obj lock here */
sprop = js_ChangeNativePropertyAttrs(cx, obj, sprop, 0, sprop->attributes(),
sprop->getter(), watcher);
if (!sprop) {
shape = js_ChangeNativePropertyAttrs(cx, obj, shape, 0, shape->attributes(),
shape->getter(), watcher);
if (!shape) {
/* Self-link so DropWatchPointAndUnlock can JS_REMOVE_LINK it. */
JS_INIT_CLIST(&wp->links);
DBG_LOCK(rt);
@ -833,7 +805,7 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
ok = JS_FALSE;
goto out;
}
wp->sprop = sprop;
wp->shape = shape;
/*
* Now that wp is fully initialized, append it to rt's wp list.
@ -841,7 +813,7 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
* a watchpoint for (obj, propid).
*/
DBG_LOCK(rt);
JS_ASSERT(!FindWatchPoint(rt, obj->scope(), propid));
JS_ASSERT(!FindWatchPoint(rt, obj, propid));
JS_APPEND_LINK(&wp->links, &rt->watchPointList);
++rt->debuggerMutations;
}
@ -866,7 +838,7 @@ JS_ClearWatchPoint(JSContext *cx, JSObject *obj, jsid id,
for (wp = (JSWatchPoint *)rt->watchPointList.next;
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
if (wp->object == obj && SPROP_USERID(wp->sprop) == id) {
if (wp->object == obj && SHAPE_USERID(wp->shape) == id) {
if (handlerp)
*handlerp = wp->handler;
if (closurep)
@ -962,7 +934,7 @@ extern JS_PUBLIC_API(jsuword *)
JS_GetFunctionLocalNameArray(JSContext *cx, JSFunction *fun, void **markp)
{
*markp = JS_ARENA_MARK(&cx->tempPool);
return js_GetLocalNameArray(cx, fun, &cx->tempPool);
return fun->getLocalNameArray(cx, &cx->tempPool);
}
extern JS_PUBLIC_API(JSAtom *)
@ -1324,34 +1296,40 @@ JS_EvaluateInStackFrame(JSContext *cx, JSStackFrame *fp,
/************************************************************************/
/* XXXbe this all needs to be reworked to avoid requiring JSScope types. */
/* This all should be reworked to avoid requiring JSScopeProperty types. */
JS_PUBLIC_API(JSScopeProperty *)
JS_PropertyIterator(JSObject *obj, JSScopeProperty **iteratorp)
{
JSScopeProperty *sprop;
JSScope *scope;
const Shape *shape;
sprop = *iteratorp;
scope = obj->scope();
/* The caller passes null in *iteratorp to get things started. */
shape = (Shape *) *iteratorp;
if (!shape) {
shape = obj->lastProperty();
} else {
shape = shape->previous();
if (!shape->previous()) {
JS_ASSERT(JSID_IS_EMPTY(shape->id));
shape = NULL;
}
}
/* XXXbe minor(?) incompatibility: iterate in reverse definition order */
sprop = sprop ? sprop->parent : scope->lastProperty();
*iteratorp = sprop;
return sprop;
return *iteratorp = reinterpret_cast<JSScopeProperty *>(const_cast<Shape *>(shape));
}
JS_PUBLIC_API(JSBool)
JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
JSPropertyDesc *pd)
{
pd->id = IdToJsval(sprop->id);
Shape *shape = (Shape *) sprop;
pd->id = IdToJsval(shape->id);
JSBool wasThrowing = cx->throwing;
AutoValueRooter lastException(cx, cx->exception);
cx->throwing = JS_FALSE;
if (!js_GetProperty(cx, obj, sprop->id, Valueify(&pd->value))) {
if (!js_GetProperty(cx, obj, shape->id, Valueify(&pd->value))) {
if (!cx->throwing) {
pd->flags = JSPD_ERROR;
pd->value = JSVAL_VOID;
@ -1367,27 +1345,26 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
if (wasThrowing)
cx->exception = lastException.value();
pd->flags |= (sprop->enumerable() ? JSPD_ENUMERATE : 0)
| (!sprop->writable() ? JSPD_READONLY : 0)
| (!sprop->configurable() ? JSPD_PERMANENT : 0);
pd->flags |= (shape->enumerable() ? JSPD_ENUMERATE : 0)
| (!shape->writable() ? JSPD_READONLY : 0)
| (!shape->configurable() ? JSPD_PERMANENT : 0);
pd->spare = 0;
if (sprop->getter() == js_GetCallArg) {
pd->slot = sprop->shortid;
if (shape->getter() == js_GetCallArg) {
pd->slot = shape->shortid;
pd->flags |= JSPD_ARGUMENT;
} else if (sprop->getter() == js_GetCallVar) {
pd->slot = sprop->shortid;
} else if (shape->getter() == js_GetCallVar) {
pd->slot = shape->shortid;
pd->flags |= JSPD_VARIABLE;
} else {
pd->slot = 0;
}
pd->alias = JSVAL_VOID;
JSScope *scope = obj->scope();
if (SPROP_HAS_VALID_SLOT(sprop, scope)) {
JSScopeProperty *aprop;
for (aprop = scope->lastProperty(); aprop; aprop = aprop->parent) {
if (aprop != sprop && aprop->slot == sprop->slot) {
pd->alias = IdToJsval(aprop->id);
if (obj->containsSlot(shape->slot)) {
for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
const Shape &aprop = r.front();
if (&aprop != shape && aprop.slot == shape->slot) {
pd->alias = IdToJsval(aprop.id);
break;
}
}
@ -1398,11 +1375,6 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
JS_PUBLIC_API(JSBool)
JS_GetPropertyDescArray(JSContext *cx, JSObject *obj, JSPropertyDescArray *pda)
{
JSScope *scope;
uint32 i, n;
JSPropertyDesc *pd;
JSScopeProperty *sprop;
Class *clasp = obj->getClass();
if (!obj->isNative() || (clasp->flags & JSCLASS_NEW_ENUMERATE)) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
@ -1412,25 +1384,25 @@ JS_GetPropertyDescArray(JSContext *cx, JSObject *obj, JSPropertyDescArray *pda)
if (!clasp->enumerate(cx, obj))
return JS_FALSE;
/* have no props, or object's scope has not mutated from that of proto */
scope = obj->scope();
if (scope->entryCount == 0) {
/* Return an empty pda early if obj has no own properties. */
if (obj->nativeEmpty()) {
pda->length = 0;
pda->array = NULL;
return JS_TRUE;
}
n = scope->entryCount;
pd = (JSPropertyDesc *) cx->malloc((size_t)n * sizeof(JSPropertyDesc));
uint32 n = obj->propertyCount();
JSPropertyDesc *pd = (JSPropertyDesc *) cx->malloc(size_t(n) * sizeof(JSPropertyDesc));
if (!pd)
return JS_FALSE;
i = 0;
for (sprop = scope->lastProperty(); sprop; sprop = sprop->parent) {
uint32 i = 0;
for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
if (!js_AddRoot(cx, Valueify(&pd[i].id), NULL))
goto bad;
if (!js_AddRoot(cx, Valueify(&pd[i].value), NULL))
goto bad;
if (!JS_GetPropertyDesc(cx, obj, sprop, &pd[i]))
Shape *shape = const_cast<Shape *>(&r.front());
if (!JS_GetPropertyDesc(cx, obj, reinterpret_cast<JSScopeProperty *>(shape), &pd[i]))
goto bad;
if ((pd[i].flags & JSPD_ALIAS) && !js_AddRoot(cx, Valueify(&pd[i].alias), NULL))
goto bad;
@ -1580,21 +1552,14 @@ JS_SetDebugErrorHook(JSRuntime *rt, JSDebugErrorHook hook, void *closure)
JS_PUBLIC_API(size_t)
JS_GetObjectTotalSize(JSContext *cx, JSObject *obj)
{
size_t nbytes;
JSScope *scope;
size_t nbytes = (obj->isFunction() && obj->getPrivate() == obj)
? sizeof(JSFunction)
: sizeof *obj;
nbytes = sizeof *obj;
if (obj->dslots) {
nbytes += (obj->dslots[-1].toPrivateUint32() - JS_INITIAL_NSLOTS + 1)
* sizeof obj->dslots[0];
}
if (obj->isNative()) {
scope = obj->scope();
if (!scope->isSharedEmpty()) {
nbytes += sizeof *scope;
nbytes += SCOPE_CAPACITY(scope) * sizeof(JSScopeProperty *);
}
}
return nbytes;
}

Просмотреть файл

@ -111,17 +111,10 @@ js_TraceWatchPoints(JSTracer *trc, JSObject *obj);
extern void
js_SweepWatchPoints(JSContext *cx);
extern JSScopeProperty *
js_FindWatchPoint(JSRuntime *rt, JSScope *scope, jsid id);
#ifdef __cplusplus
/*
* NB: callers outside of jsdbgapi.c must pass non-null scope.
*/
extern js::PropertyOp
js_GetWatchedSetter(JSRuntime *rt, JSScope *scope,
const JSScopeProperty *sprop);
extern const js::Shape *
js_FindWatchPoint(JSRuntime *rt, JSObject *obj, jsid id);
extern JSBool
js_watch_set(JSContext *cx, JSObject *obj, jsid id, js::Value *vp);
@ -380,11 +373,13 @@ typedef struct JSPropertyDescArray {
JSPropertyDesc *array; /* alloc'd by Get, freed by Put */
} JSPropertyDescArray;
typedef struct JSScopeProperty JSScopeProperty;
extern JS_PUBLIC_API(JSScopeProperty *)
JS_PropertyIterator(JSObject *obj, JSScopeProperty **iteratorp);
extern JS_PUBLIC_API(JSBool)
JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *shape,
JSPropertyDesc *pd);
extern JS_PUBLIC_API(JSBool)

Просмотреть файл

@ -1275,9 +1275,9 @@ JSTreeContext::ensureSharpSlots()
return false;
sharpSlotBase = fun->u.i.nvars;
if (!js_AddLocal(cx, fun, sharpArrayAtom, JSLOCAL_VAR))
if (!fun->addLocal(cx, sharpArrayAtom, JSLOCAL_VAR))
return false;
if (!js_AddLocal(cx, fun, sharpDepthAtom, JSLOCAL_VAR))
if (!fun->addLocal(cx, sharpDepthAtom, JSLOCAL_VAR))
return false;
} else {
/*
@ -1561,10 +1561,6 @@ js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
JSStmtInfo *
js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
{
JSObject *obj;
JSScope *scope;
JSScopeProperty *sprop;
if (!stmt)
stmt = tc->topScopeStmt;
for (; stmt; stmt = stmt->downScope) {
@ -1575,17 +1571,17 @@ js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt
if (!(stmt->flags & SIF_SCOPE))
continue;
obj = stmt->blockObj;
JSObject *obj = stmt->blockObj;
JS_ASSERT(obj->getClass() == &js_BlockClass);
scope = obj->scope();
sprop = scope->lookup(ATOM_TO_JSID(atom));
if (sprop) {
JS_ASSERT(sprop->hasShortID());
const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom));
if (shape) {
JS_ASSERT(shape->hasShortID());
if (slotp) {
JS_ASSERT(obj->fslots[JSSLOT_BLOCK_DEPTH].isInt32());
*slotp = obj->fslots[JSSLOT_BLOCK_DEPTH].toInt32() +
sprop->shortid;
shape->shortid;
}
return stmt;
}
@ -1634,30 +1630,29 @@ LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
* nor can prop be deleted.
*/
if (cg->inFunction()) {
if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE)
if (cg->fun->lookupLocal(cx, atom, NULL) != JSLOCAL_NONE)
break;
} else {
JS_ASSERT(cg->compileAndGo());
obj = cg->scopeChain;
JS_LOCK_OBJ(cx, obj);
JSScope *scope = obj->scope();
JSScopeProperty *sprop = scope->lookup(ATOM_TO_JSID(atom));
if (sprop) {
const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom));
if (shape) {
/*
* We're compiling code that will be executed immediately,
* not re-executed against a different scope chain and/or
* variable object. Therefore we can get constant values
* from our variable object here.
*/
if (!sprop->writable() && !sprop->configurable() &&
sprop->hasDefaultGetter() && SPROP_HAS_VALID_SLOT(sprop, scope)) {
*constp = obj->lockedGetSlot(sprop->slot);
if (!shape->writable() && !shape->configurable() &&
shape->hasDefaultGetter() && obj->containsSlot(shape->slot)) {
*constp = obj->lockedGetSlot(shape->slot);
}
}
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, obj);
if (sprop)
if (shape)
break;
}
}
@ -1852,8 +1847,10 @@ EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
#endif
}
blockObj->scope()->freeslot = base;
return blockObj->growSlots(cx, base);
if (!blockObj->growSlots(cx, base))
return false;
blockObj->freeslot = base;
return true;
}
/*
@ -1904,7 +1901,7 @@ MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
JSAtom *atom = pn->pn_atom;
uintN index;
JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index);
JSLocalKind localKind = fun->lookupLocal(cx, atom, &index);
if (localKind == JSLOCAL_NONE)
return true;
@ -1914,10 +1911,8 @@ MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
JSAtomListElement *ale = cg->upvarList.lookup(atom);
if (!ale) {
if (cg->inFunction() &&
!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) {
if (cg->inFunction() && !cg->fun->addLocal(cx, atom, JSLOCAL_UPVAR))
return false;
}
ale = cg->upvarList.add(cg->parser, atom);
if (!ale)
@ -2206,7 +2201,7 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
return JS_TRUE;
if (FUN_FLAT_CLOSURE(cg->fun)) {
op = JSOP_GETDSLOT;
op = JSOP_GETFCSLOT;
} else {
/*
* The function we're compiling may not be heavyweight, but if it
@ -2233,7 +2228,7 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
if (ale) {
index = ALE_INDEX(ale);
} else {
if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR))
if (!cg->fun->addLocal(cx, atom, JSLOCAL_UPVAR))
return JS_FALSE;
ale = cg->upvarList.add(cg->parser, atom);
@ -2601,8 +2596,8 @@ EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
case JSOP_GETUPVAR:
op = JSOP_CALLUPVAR;
break;
case JSOP_GETDSLOT:
op = JSOP_CALLDSLOT;
case JSOP_GETFCSLOT:
op = JSOP_CALLFCSLOT;
break;
default:
JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
@ -4446,7 +4441,7 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
#ifdef DEBUG
JSLocalKind localKind =
#endif
js_LookupLocal(cx, cg->fun, fun->atom, &slot);
cg->fun->lookupLocal(cx, fun->atom, &slot);
JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST);
JS_ASSERT(index < JS_BIT(20));
pn->pn_index = index;
@ -7307,8 +7302,8 @@ js_FinishTakingTryNotes(JSCodeGenerator *cg, JSTryNoteArray *array)
* cloned function objects and with the compiler-created clone-parent. There
* are nregexps = script->regexps()->length such reserved slots in each
* function object cloned from fun->object. NB: during compilation, a funobj
* slots element must never be allocated, because js_AllocSlot could hand out
* one of the slots that should be given to a regexp clone.
* slots element must never be allocated, because JSObject::allocSlot could
* hand out one of the slots that should be given to a regexp clone.
*
* If the code being compiled is global code, the cloned regexp are stored in
* fp->vars slot after cg->ngvars and to protect regexp slots from GC we set

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -48,21 +48,6 @@
#include "jsatom.h"
#include "jsstr.h"
typedef struct JSLocalNameMap JSLocalNameMap;
/*
* Depending on the number of arguments and variables in the function their
* names and attributes are stored either as a single atom or as an array of
* tagged atoms (when there are few locals) or as a hash-based map (when there
* are many locals). In the first 2 cases the lowest bit of the atom is used
* as a tag to distinguish const from var. See jsfun.c for details.
*/
typedef union JSLocalNames {
jsuword taggedAtom;
jsuword *array;
JSLocalNameMap *map;
} JSLocalNames;
/*
* The high two bits of JSFunction.flags encode whether the function is native
* or interpreted, and if interpreted, what kind of optimized closure form (if
@ -143,6 +128,29 @@ typedef union JSLocalNames {
JS_ASSERT((fun)->flags & JSFUN_TRCINFO), \
fun->u.n.trcinfo)
/*
* Formal parameters, local variables, and upvars are stored in a shape tree
* path with its latest node at fun->u.i.names. The addLocal, lookupLocal, and
* getLocalNameArray methods abstract away this detail.
*
* The lastArg, lastVar, and lastUpvar JSFunction methods provide more direct
* access to the shape path. These methods may be used to make a Shape::Range
* for iterating over the relevant shapes from youngest to oldest (i.e., last
* or right-most to first or left-most in source order).
*
* Sometimes iteration order must be from oldest to youngest, however. For such
* cases, use getLocalNameArray. The RAII helper class js::AutoLocalNameArray,
* defined in jscntxt.h, should be used where possible instead of direct calls
* to getLocalNameArray.
*/
enum JSLocalKind {
JSLOCAL_NONE,
JSLOCAL_ARG,
JSLOCAL_VAR,
JSLOCAL_CONST,
JSLOCAL_UPVAR
};
struct JSFunction : public JSObject
{
uint16 nargs; /* maximum number of specified arguments,
@ -171,7 +179,7 @@ struct JSFunction : public JSObject
indirect eval; if true, then this function
object's proto is the wrapped object */
JSScript *script; /* interpreted bytecode descriptor or null */
JSLocalNames names; /* argument and variable names */
js::Shape *names; /* argument and variable names */
} i;
} u;
JSAtom *atom; /* name for diagnostics and decompiling */
@ -186,7 +194,7 @@ struct JSFunction : public JSObject
inline bool inStrictMode() const;
inline bool isBound() const;
bool isBound() const;
uintN countVars() const {
JS_ASSERT(FUN_INTERPRETED(this));
@ -213,13 +221,51 @@ struct JSFunction : public JSObject
int sharpSlotBase(JSContext *cx);
uint32 countUpvarSlots() const;
const js::Shape *lastArg() const;
const js::Shape *lastVar() const;
const js::Shape *lastUpvar() const { return u.i.names; }
bool addLocal(JSContext *cx, JSAtom *atom, JSLocalKind kind);
/*
* Look up an argument or variable name returning its kind when found or
* JSLOCAL_NONE when no such name exists. When indexp is not null and the
* name exists, *indexp will receive the index of the corresponding
* argument or variable.
*/
JSLocalKind lookupLocal(JSContext *cx, JSAtom *atom, uintN *indexp);
/*
* Function and macros to work with local names as an array of words.
* getLocalNameArray returns the array, or null if we are out of memory.
* This function must be called only when fun->hasLocalNames().
*
* The supplied pool is used to allocate the returned array, so the caller
* is obligated to mark and release to free it.
*
* The elements of the array with index less than fun->nargs correspond to
* the names of function formal parameters. An index >= fun->nargs
* addresses a var binding. Use JS_LOCAL_NAME_TO_ATOM to convert array's
* element to an atom pointer. This pointer can be null when the element is
* for a formal parameter corresponding to a destructuring pattern.
*
* If nameWord does not name a formal parameter, use JS_LOCAL_NAME_IS_CONST
* to check if nameWord corresponds to the const declaration.
*/
jsuword *getLocalNameArray(JSContext *cx, struct JSArenaPool *pool);
void freezeLocalNames(JSContext *cx);
/*
* If fun's formal parameters include any duplicate names, return one
* of them (chosen arbitrarily). If they are all unique, return NULL.
*/
JSAtom *findDuplicateFormal() const;
uint32 countInterpretedReservedSlots() const;
#define JS_LOCAL_NAME_TO_ATOM(nameWord) ((JSAtom *) ((nameWord) & ~(jsuword) 1))
#define JS_LOCAL_NAME_IS_CONST(nameWord) ((((nameWord) & (jsuword) 1)) != 0)
bool mightEscape() const {
return FUN_INTERPRETED(this) && (FUN_FLAT_CLOSURE(this) || u.i.nupvars == 0);
@ -262,6 +308,10 @@ struct JSFunction : public JSObject
JS_ASSERT(joinable());
fslots[METHOD_ATOM_SLOT].setString(ATOM_TO_STRING(atom));
}
/* Number of extra fixed function object slots besides JSSLOT_PRIVATE. */
static const uint32 CLASS_RESERVED_SLOTS = JSObject::FUN_CLASS_RESERVED_SLOTS;
static const uint32 FIRST_FREE_SLOT = JSSLOT_PRIVATE + CLASS_RESERVED_SLOTS + 1;
};
JS_STATIC_ASSERT(sizeof(JSFunction) % JS_GCTHING_ALIGN == 0);
@ -296,8 +346,16 @@ JS_STATIC_ASSERT(sizeof(JSFunction) % JS_GCTHING_ALIGN == 0);
* single-threaded objects and GC heaps.
*/
extern js::Class js_ArgumentsClass;
namespace js {
extern Class StrictArgumentsClass;
struct ArgumentsData {
js::Value callee;
js::Value slots[1];
};
}
inline bool
@ -318,12 +376,18 @@ JSObject::isArguments() const
return isNormalArguments() || isStrictArguments();
}
#define JS_ARGUMENT_OBJECT_ON_TRACE ((void *)0xa126)
#define JS_ARGUMENTS_OBJECT_ON_TRACE ((void *)0xa126)
extern JS_PUBLIC_DATA(js::Class) js_CallClass;
extern JS_PUBLIC_DATA(js::Class) js_FunctionClass;
extern js::Class js_DeclEnvClass;
extern const uint32 CALL_CLASS_FIXED_RESERVED_SLOTS;
extern const uint32 CALL_CLASS_RESERVED_SLOTS;
inline bool
JSObject::isCall() const
{
return getClass() == &js_CallClass;
}
inline bool
JSObject::isFunction() const
@ -331,18 +395,6 @@ JSObject::isFunction() const
return getClass() == &js_FunctionClass;
}
inline bool
JSObject::isCallable()
{
return isFunction() || getClass()->call;
}
static inline bool
js_IsCallable(const js::Value &v)
{
return v.isObject() && v.toObject().isCallable();
}
/*
* NB: jsapi.h and jsobj.h must be included before any call to this macro.
*/
@ -419,6 +471,9 @@ CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent)
return js_CloneFunctionObject(cx, fun, parent, proto);
}
extern JSObject * JS_FASTCALL
js_AllocFlatClosure(JSContext *cx, JSFunction *fun, JSObject *scopeChain);
extern JS_REQUIRES_STACK JSObject *
js_NewFlatClosure(JSContext *cx, JSFunction *fun);
@ -531,56 +586,6 @@ JS_STATIC_ASSERT(((JS_ARGS_LENGTH_MAX << 1) | 1) <= JSVAL_INT_MAX);
extern JSBool
js_XDRFunctionObject(JSXDRState *xdr, JSObject **objp);
typedef enum JSLocalKind {
JSLOCAL_NONE,
JSLOCAL_ARG,
JSLOCAL_VAR,
JSLOCAL_CONST,
JSLOCAL_UPVAR
} JSLocalKind;
extern JSBool
js_AddLocal(JSContext *cx, JSFunction *fun, JSAtom *atom, JSLocalKind kind);
/*
* Look up an argument or variable name returning its kind when found or
* JSLOCAL_NONE when no such name exists. When indexp is not null and the name
* exists, *indexp will receive the index of the corresponding argument or
* variable.
*/
extern JSLocalKind
js_LookupLocal(JSContext *cx, JSFunction *fun, JSAtom *atom, uintN *indexp);
/*
* Functions to work with local names as an array of words.
*
* js_GetLocalNameArray returns the array, or null if we are out of memory.
* This function must be called only when fun->hasLocalNames().
*
* The supplied pool is used to allocate the returned array, so the caller is
* obligated to mark and release to free it.
*
* The elements of the array with index less than fun->nargs correspond to the
* names of function formal parameters. An index >= fun->nargs addresses a var
* binding. Use JS_LOCAL_NAME_TO_ATOM to convert array's element to an atom
* pointer. This pointer can be null when the element is for a formal parameter
* corresponding to a destructuring pattern.
*
* If nameWord does not name a formal parameter, use JS_LOCAL_NAME_IS_CONST to
* check if nameWord corresponds to the const declaration.
*/
extern jsuword *
js_GetLocalNameArray(JSContext *cx, JSFunction *fun, struct JSArenaPool *pool);
#define JS_LOCAL_NAME_TO_ATOM(nameWord) \
((JSAtom *) ((nameWord) & ~(jsuword) 1))
#define JS_LOCAL_NAME_IS_CONST(nameWord) \
((((nameWord) & (jsuword) 1)) != 0)
extern void
js_FreezeLocalNames(JSContext *cx, JSFunction *fun);
extern JSBool
js_fun_apply(JSContext *cx, uintN argc, js::Value *vp);

Просмотреть файл

@ -2024,8 +2024,8 @@ AutoGCRooter::trace(JSTracer *trc)
MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
return;
case SPROP:
static_cast<AutoScopePropertyRooter *>(this)->sprop->trace(trc);
case SHAPE:
static_cast<AutoShapeRooter *>(this)->shape->trace(trc);
return;
case PARSER:
@ -2244,6 +2244,19 @@ MarkRuntime(JSTracer *trc)
for (ThreadDataIter i(rt); !i.empty(); i.popFront())
i.threadData()->mark(trc);
if (rt->emptyArgumentsShape)
rt->emptyArgumentsShape->trace(trc);
if (rt->emptyBlockShape)
rt->emptyBlockShape->trace(trc);
if (rt->emptyCallShape)
rt->emptyCallShape->trace(trc);
if (rt->emptyDeclEnvShape)
rt->emptyDeclEnvShape->trace(trc);
if (rt->emptyEnumeratorShape)
rt->emptyEnumeratorShape->trace(trc);
if (rt->emptyWithShape)
rt->emptyWithShape->trace(trc);
/*
* We mark extra roots at the last thing so it can use use additional
* colors to implement cycle collection.
@ -2319,15 +2332,7 @@ FinalizeObject(JSContext *cx, JSObject *obj, unsigned thingKind)
DTrace::finalizeObject(obj);
if (JS_LIKELY(obj->isNative())) {
JSScope *scope = obj->scope();
if (scope->isSharedEmpty())
static_cast<JSEmptyScope *>(scope)->dropFromGC(cx);
else
scope->destroy(cx);
}
if (obj->hasSlotsArray())
obj->freeSlotsArray(cx);
obj->finish(cx);
}
inline void
@ -2635,7 +2640,7 @@ SweepCompartments(JSContext *cx)
/*
* Common cache invalidation and so forth that must be done before GC. Even if
* GCUntilDone calls GC several times, this work only needs to be done once.
* GCUntilDone calls GC several times, this work needs to be done only once.
*/
static void
PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
@ -2666,8 +2671,7 @@ PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
#endif
) {
rt->gcRegenShapes = true;
rt->gcRegenShapesScopeFlag ^= JSScope::SHAPE_REGEN;
rt->shapeGen = JSScope::LAST_RESERVED_SHAPE;
rt->shapeGen = Shape::LAST_RESERVED_SHAPE;
rt->protoHazardShape = 0;
}
@ -2745,7 +2749,7 @@ MarkAndSweep(JSContext *cx GCTIMER_PARAM)
#ifdef DEBUG
/* Save the pre-sweep count of scope-mapped properties. */
rt->liveScopePropsPreSweep = rt->liveScopeProps;
rt->liveObjectPropsPreSweep = rt->liveObjectProps;
#endif
/*
@ -2780,10 +2784,10 @@ MarkAndSweep(JSContext *cx GCTIMER_PARAM)
SweepCompartments(cx);
/*
* Sweep the runtime's property tree after finalizing objects, in case any
* Sweep the runtime's property trees after finalizing objects, in case any
* had watchpoints referencing tree nodes.
*/
js::SweepScopeProperties(cx);
js::PropertyTree::sweepShapes(cx);
/*
* Sweep script filenames after sweeping functions in the generic loop

Просмотреть файл

@ -294,7 +294,7 @@ js_NewGCExternalString(JSContext *cx, uintN type)
return (JSString *) js_NewFinalizableGCThing(cx, type);
}
static inline JSFunction*
static inline JSFunction *
js_NewGCFunction(JSContext *cx)
{
JSFunction* obj = (JSFunction *)js_NewFinalizableGCThing(cx, FINALIZE_FUNCTION);
@ -383,7 +383,7 @@ class BackgroundSweepTask : public JSBackgroundTask {
BackgroundSweepTask()
: freeCursor(NULL), freeCursorEnd(NULL) { }
void freeLater(void* ptr) {
void freeLater(void *ptr) {
if (freeCursor != freeCursorEnd)
*freeCursor++ = ptr;
else

Просмотреть файл

@ -306,7 +306,7 @@ ComputeThisFromArgv(JSContext *cx, Value *argv)
}
thisp = &argv[-1].toObject();
if (thisp->getClass() == &js_CallClass || thisp->getClass() == &js_BlockClass)
if (thisp->isCall() || thisp->isBlock())
return ComputeGlobalThis(cx, argv);
return CallThisObjectHook(cx, thisp, argv);
@ -379,7 +379,7 @@ js_OnUnknownMethod(JSContext *cx, Value *vp)
* NoSuchMethod helper objects own no manually allocated resources.
*/
obj->map = NULL;
obj->init(&js_NoSuchMethodClass, NULL, NULL, tvr.value());
obj->init(&js_NoSuchMethodClass, NULL, NULL, tvr.value(), cx);
obj->fslots[JSSLOT_SAVED_ID] = vp[0];
vp[0].setObject(*obj);
}
@ -432,10 +432,10 @@ class AutoPreserveEnumerators {
};
static JS_REQUIRES_STACK bool
callJSNative(JSContext *cx, CallOp callOp, JSObject *thisp, uintN argc, Value *argv, Value *rval)
CallJSNative(JSContext *cx, CallOp callOp, JSObject *thisp, uintN argc, Value *argv, Value *rval)
{
Value *vp = argv - 2;
if (callJSFastNative(cx, callOp, argc, vp)) {
if (CallJSFastNative(cx, callOp, argc, vp)) {
*rval = JS_RVAL(cx, vp);
return true;
}
@ -453,7 +453,7 @@ InvokeCommon(JSContext *cx, JSFunction *fun, JSScript *script, T native,
#ifdef DEBUG_NOT_THROWING
JSBool alreadyThrowing = cx->throwing;
#endif
JSBool ok = callJSFastNative(cx, (FastNative) native, args.argc(), args.base());
JSBool ok = CallJSFastNative(cx, (FastNative) native, args.argc(), args.base());
JS_RUNTIME_METER(cx->runtime, nativeCalls);
#ifdef DEBUG_NOT_THROWING
if (ok && !alreadyThrowing)
@ -562,7 +562,7 @@ InvokeCommon(JSContext *cx, JSFunction *fun, JSScript *script, T native,
#endif
JSObject *thisp = fp->getThisValue().toObjectOrNull();
ok = callJSNative(cx, native, thisp, fp->numActualArgs(), fp->argv,
ok = CallJSNative(cx, native, thisp, fp->numActualArgs(), fp->argv,
fp->addressReturnValue());
JS_ASSERT(cx->fp() == fp);
@ -920,7 +920,7 @@ CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs,
if (!prop)
return true;
if (obj2->isNative()) {
oldAttrs = ((JSScopeProperty *) prop)->attributes();
oldAttrs = ((Shape *) prop)->attributes();
/* If our caller doesn't want prop, unlock obj2. */
if (!propp)
@ -1923,27 +1923,27 @@ AssertValidPropertyCacheHit(JSContext *cx, JSScript *script, JSFrameRegs& regs,
JS_ASSERT(prop);
JS_ASSERT(pobj == found);
JSScopeProperty *sprop = (JSScopeProperty *) prop;
const Shape *shape = (Shape *) prop;
if (entry->vword.isSlot()) {
JS_ASSERT(entry->vword.toSlot() == sprop->slot);
JS_ASSERT(!sprop->isMethod());
} else if (entry->vword.isSprop()) {
JS_ASSERT(entry->vword.toSprop() == sprop);
JS_ASSERT_IF(sprop->isMethod(),
&sprop->methodObject() == &pobj->lockedGetSlot(sprop->slot).toObject());
JS_ASSERT(entry->vword.toSlot() == shape->slot);
JS_ASSERT(!shape->isMethod());
} else if (entry->vword.isShape()) {
JS_ASSERT(entry->vword.toShape() == shape);
JS_ASSERT_IF(shape->isMethod(),
&shape->methodObject() == &pobj->lockedGetSlot(shape->slot).toObject());
} else {
Value v;
JS_ASSERT(entry->vword.isFunObj());
JS_ASSERT(!entry->vword.isNull());
JS_ASSERT(pobj->scope()->brandedOrHasMethodBarrier());
JS_ASSERT(sprop->hasDefaultGetterOrIsMethod());
JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, pobj->scope()));
v = pobj->lockedGetSlot(sprop->slot);
JS_ASSERT(pobj->brandedOrHasMethodBarrier());
JS_ASSERT(shape->hasDefaultGetterOrIsMethod());
JS_ASSERT(pobj->containsSlot(shape->slot));
v = pobj->lockedGetSlot(shape->slot);
JS_ASSERT(&entry->vword.toFunObj() == &v.toObject());
if (sprop->isMethod()) {
if (shape->isMethod()) {
JS_ASSERT(js_CodeSpec[*regs.pc].format & JOF_CALLOP);
JS_ASSERT(&sprop->methodObject() == &v.toObject());
JS_ASSERT(&shape->methodObject() == &v.toObject());
}
}
@ -1964,7 +1964,7 @@ JS_STATIC_ASSERT(JSOP_GETGVAR_LENGTH == JSOP_CALLGVAR_LENGTH);
JS_STATIC_ASSERT(JSOP_GETUPVAR_LENGTH == JSOP_CALLUPVAR_LENGTH);
JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_CALLUPVAR_DBG_LENGTH);
JS_STATIC_ASSERT(JSOP_GETUPVAR_DBG_LENGTH == JSOP_GETUPVAR_LENGTH);
JS_STATIC_ASSERT(JSOP_GETDSLOT_LENGTH == JSOP_CALLDSLOT_LENGTH);
JS_STATIC_ASSERT(JSOP_GETFCSLOT_LENGTH == JSOP_CALLFCSLOT_LENGTH);
JS_STATIC_ASSERT(JSOP_GETARG_LENGTH == JSOP_CALLARG_LENGTH);
JS_STATIC_ASSERT(JSOP_GETLOCAL_LENGTH == JSOP_CALLLOCAL_LENGTH);
JS_STATIC_ASSERT(JSOP_XMLNAME_LENGTH == JSOP_CALLXMLNAME_LENGTH);
@ -2309,7 +2309,7 @@ Interpret(JSContext *cx)
/*
* Optimized Get and SetVersion for proper script language versioning.
*
* If any native method or Class/JSObjectOps hook calls js_SetVersion
* If any native method or a Class or ObjectOps hook calls js_SetVersion
* and changes cx->version, the effect will "stick" and we will stop
* maintaining currentVersion. This is relied upon by testsuites, for
* the most part -- web browsers select version before compiling and not
@ -3019,32 +3019,32 @@ BEGIN_CASE(JSOP_PICK)
}
END_CASE(JSOP_PICK)
#define NATIVE_GET(cx,obj,pobj,sprop,getHow,vp) \
#define NATIVE_GET(cx,obj,pobj,shape,getHow,vp) \
JS_BEGIN_MACRO \
if (sprop->hasDefaultGetter()) { \
if (shape->hasDefaultGetter()) { \
/* Fast path for Object instance properties. */ \
JS_ASSERT((sprop)->slot != SPROP_INVALID_SLOT || \
!sprop->hasDefaultSetter()); \
if (((sprop)->slot != SPROP_INVALID_SLOT)) \
*(vp) = (pobj)->lockedGetSlot((sprop)->slot); \
JS_ASSERT((shape)->slot != SHAPE_INVALID_SLOT || \
!shape->hasDefaultSetter()); \
if (((shape)->slot != SHAPE_INVALID_SLOT)) \
*(vp) = (pobj)->lockedGetSlot((shape)->slot); \
else \
(vp)->setUndefined(); \
} else { \
if (!js_NativeGet(cx, obj, pobj, sprop, getHow, vp)) \
if (!js_NativeGet(cx, obj, pobj, shape, getHow, vp)) \
goto error; \
} \
JS_END_MACRO
#define NATIVE_SET(cx,obj,sprop,entry,vp) \
#define NATIVE_SET(cx,obj,shape,entry,vp) \
JS_BEGIN_MACRO \
TRACE_2(SetPropHit, entry, sprop); \
if (sprop->hasDefaultSetter() && \
(sprop)->slot != SPROP_INVALID_SLOT && \
!(obj)->scope()->brandedOrHasMethodBarrier()) { \
TRACE_2(SetPropHit, entry, shape); \
if (shape->hasDefaultSetter() && \
(shape)->slot != SHAPE_INVALID_SLOT && \
!(obj)->brandedOrHasMethodBarrier()) { \
/* Fast path for, e.g., plain Object instance properties. */ \
(obj)->lockedSetSlot((sprop)->slot, *vp); \
(obj)->lockedSetSlot((shape)->slot, *vp); \
} else { \
if (!js_NativeSet(cx, obj, sprop, false, vp)) \
if (!js_NativeSet(cx, obj, shape, false, vp)) \
goto error; \
} \
JS_END_MACRO
@ -3711,7 +3711,7 @@ BEGIN_CASE(JSOP_NAMEDEC)
ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry);
if (obj == obj2 && entry->vword.isSlot()) {
uint32 slot = entry->vword.toSlot();
JS_ASSERT(slot < obj->scope()->freeslot);
JS_ASSERT(slot < obj->freeslot);
Value &rref = obj->getSlotRef(slot);
int32_t tmp;
if (JS_LIKELY(rref.isInt32() && CanIncDecWithoutOverflow(tmp = rref.toInt32()))) {
@ -3922,7 +3922,7 @@ BEGIN_CASE(JSOP_UNBRANDTHIS)
JSObject *obj = fp->getThisObject(cx);
if (!obj)
goto error;
if (!obj->unbrand(cx))
if (obj->isNative() && !obj->unbrand(cx))
goto error;
}
END_CASE(JSOP_UNBRANDTHIS)
@ -3989,12 +3989,12 @@ BEGIN_CASE(JSOP_GETXPROP)
rval.setObject(entry->vword.toFunObj());
} else if (entry->vword.isSlot()) {
uint32 slot = entry->vword.toSlot();
JS_ASSERT(slot < obj2->scope()->freeslot);
JS_ASSERT(slot < obj2->freeslot);
rval = obj2->lockedGetSlot(slot);
} else {
JS_ASSERT(entry->vword.isSprop());
JSScopeProperty *sprop = entry->vword.toSprop();
NATIVE_GET(cx, obj, obj2, sprop,
JS_ASSERT(entry->vword.isShape());
const Shape *shape = entry->vword.toShape();
NATIVE_GET(cx, obj, obj2, shape,
fp->hasIMacroPC() ? JSGET_NO_METHOD_BARRIER : JSGET_METHOD_BARRIER,
&rval);
}
@ -4084,12 +4084,12 @@ BEGIN_CASE(JSOP_CALLPROP)
rval.setObject(entry->vword.toFunObj());
} else if (entry->vword.isSlot()) {
uint32 slot = entry->vword.toSlot();
JS_ASSERT(slot < obj2->scope()->freeslot);
JS_ASSERT(slot < obj2->freeslot);
rval = obj2->lockedGetSlot(slot);
} else {
JS_ASSERT(entry->vword.isSprop());
JSScopeProperty *sprop = entry->vword.toSprop();
NATIVE_GET(cx, &objv.toObject(), obj2, sprop, JSGET_NO_METHOD_BARRIER, &rval);
JS_ASSERT(entry->vword.isShape());
const Shape *shape = entry->vword.toShape();
NATIVE_GET(cx, &objv.toObject(), obj2, shape, JSGET_NO_METHOD_BARRIER, &rval);
}
regs.sp[-1] = rval;
PUSH_COPY(lval);
@ -4190,6 +4190,8 @@ BEGIN_CASE(JSOP_SETMETHOD)
JSObject *obj2;
JSAtom *atom;
if (cache->testForSet(cx, regs.pc, obj, &entry, &obj2, &atom)) {
JS_ASSERT(!obj->sealed());
/*
* Fast property cache hit, only partially confirmed by
* testForSet. We know that the entry applies to regs.pc and
@ -4199,140 +4201,110 @@ BEGIN_CASE(JSOP_SETMETHOD)
* directly to obj by this set, or on an existing "own"
* property, or on a prototype property that has a setter.
*/
JS_ASSERT(entry->vword.isSprop());
JSScopeProperty *sprop = entry->vword.toSprop();
JS_ASSERT_IF(sprop->isDataDescriptor(), sprop->writable());
JS_ASSERT_IF(sprop->hasSlot(), entry->vcapTag() == 0);
JSScope *scope = obj->scope();
JS_ASSERT(!scope->sealed());
const Shape *shape = entry->vword.toShape();
JS_ASSERT_IF(shape->isDataDescriptor(), shape->writable());
JS_ASSERT_IF(shape->hasSlot(), entry->vcapTag() == 0);
/*
* Fastest path: check whether the cached sprop is already
* in scope and call NATIVE_SET and break to get out of the
* do-while(0). But we can call NATIVE_SET only if obj owns
* scope or sprop is shared.
* Fastest path: check whether obj already has the cached shape and
* call NATIVE_SET and break to get out of the do-while(0). But we
* can call NATIVE_SET only for a direct or proto-setter hit.
*/
bool checkForAdd;
if (!sprop->hasSlot()) {
if (!entry->adding()) {
if (entry->vcapTag() == 0 ||
((obj2 = obj->getProto()) &&
obj2->isNative() &&
obj2->shape() == entry->vshape())) {
goto fast_set_propcache_hit;
}
(obj2 = obj->getProto()) && obj2->shape() == entry->vshape())
{
#ifdef DEBUG
if (entry->directHit()) {
JS_ASSERT(obj->nativeContains(*shape));
} else {
JS_ASSERT(obj2->nativeContains(*shape));
JS_ASSERT(entry->vcapTag() == 1);
JS_ASSERT(entry->kshape != entry->vshape());
JS_ASSERT(!shape->hasSlot());
}
#endif
/* The cache entry doesn't apply. vshape mismatch. */
checkForAdd = false;
} else if (!scope->isSharedEmpty()) {
if (sprop == scope->lastProperty() || scope->hasProperty(sprop)) {
fast_set_propcache_hit:
PCMETER(cache->pchits++);
PCMETER(cache->setpchits++);
NATIVE_SET(cx, obj, sprop, entry, &rval);
NATIVE_SET(cx, obj, shape, entry, &rval);
break;
}
checkForAdd = sprop->hasSlot() && sprop->parent == scope->lastProperty();
} else {
/*
* We check that cx own obj here and will continue to
* own it after js_GetMutableScope returns so we can
* continue to skip JS_UNLOCK_OBJ calls.
*/
JS_ASSERT(CX_OWNS_OBJECT_TITLE(cx, obj));
scope = js_GetMutableScope(cx, obj);
JS_ASSERT(CX_OWNS_OBJECT_TITLE(cx, obj));
if (!scope)
goto error;
checkForAdd = !sprop->parent;
}
uint32 slot;
if (checkForAdd &&
entry->vshape() == rt->protoHazardShape &&
sprop->hasDefaultSetter() &&
(slot = sprop->slot) == scope->freeslot) {
/*
* Fast path: adding a plain old property that was once
* at the frontier of the property tree, whose slot is
* next to claim among the allocated slots in obj,
* where scope->table has not been created yet.
*
* We may want to remove hazard conditions above and
* inline compensation code here, depending on
* real-world workloads.
*/
PCMETER(cache->pchits++);
PCMETER(cache->addpchits++);
if (slot < obj->numSlots()) {
++scope->freeslot;
} else {
if (!js_AllocSlot(cx, obj, &slot))
if (obj->nativeEmpty()) {
/*
* We check that cx owns obj here and will continue to own
* it after ensureClassReservedSlotsForEmptyObject returns
* so we can continue to skip JS_UNLOCK_OBJ calls.
*/
JS_ASSERT(CX_OWNS_OBJECT_TITLE(cx, obj));
bool ok = obj->ensureClassReservedSlotsForEmptyObject(cx);
JS_ASSERT(CX_OWNS_OBJECT_TITLE(cx, obj));
if (!ok)
goto error;
JS_ASSERT(slot + 1 == scope->freeslot);
}
/*
* If this obj's number of reserved slots differed, or
* if something created a hash table for scope, we must
* pay the price of JSScope::putProperty.
*
* (A built-in object with a pre-allocated but not fixed
* population of reserved slots hook can cause scopes of the
* same shape to have different freeslot values. Arguments,
* Block, Call, and certain Function objects pre-allocate
* reserveds lots this way. This is what causes the slot !=
* sprop->slot case. See js_GetMutableScope. FIXME 558451)
*/
if (slot == sprop->slot && !scope->table) {
scope->extend(cx, sprop);
} else {
JSScopeProperty *sprop2 =
scope->putProperty(cx, sprop->id,
sprop->getter(), sprop->setter(),
slot, sprop->attributes(),
sprop->getFlags(), sprop->shortid);
if (!sprop2) {
js_FreeSlot(cx, obj, slot);
goto error;
uint32 slot;
if (shape->previous() == obj->lastProperty() &&
entry->vshape() == rt->protoHazardShape &&
shape->hasDefaultSetter()) {
slot = shape->slot;
JS_ASSERT(slot == obj->freeslot);
/*
* Fast path: adding a plain old property that was once at
* the frontier of the property tree, whose slot is next to
* claim among the already-allocated slots in obj, where
* shape->table has not been created yet.
*/
PCMETER(cache->pchits++);
PCMETER(cache->addpchits++);
if (slot < obj->numSlots()) {
JS_ASSERT(obj->getSlot(slot).isUndefined());
++obj->freeslot;
JS_ASSERT(obj->freeslot != 0);
} else {
if (!obj->allocSlot(cx, &slot))
goto error;
JS_ASSERT(slot == shape->slot);
}
sprop = sprop2;
/* Simply extend obj's property tree path with shape! */
obj->extend(cx, shape);
/*
* No method change check here because here we are adding a
* new property, not updating an existing slot's value that
* might contain a method of a branded shape.
*/
TRACE_2(SetPropHit, entry, shape);
obj->lockedSetSlot(slot, rval);
/*
* Purge the property cache of the id we may have just
* shadowed in obj's scope and proto chains.
*/
js_PurgeScopeChain(cx, obj, shape->id);
break;
}
/*
* No method change check here because here we are
* adding a new property, not updating an existing
* slot's value that might contain a method of a
* branded scope.
*/
TRACE_2(SetPropHit, entry, sprop);
obj->lockedSetSlot(slot, rval);
/*
* Purge the property cache of the id we may have just
* shadowed in obj's scope and proto chains. We do this
* after unlocking obj's scope to avoid lock nesting.
*/
js_PurgeScopeChain(cx, obj, sprop->id);
break;
}
PCMETER(cache->setpcmisses++);
atom = NULL;
} else if (!atom) {
/*
* Slower property cache hit, fully confirmed by testForSet (in
* the slow path, via fullTest).
* Slower property cache hit, fully confirmed by testForSet (in the
* slow path, via fullTest).
*/
ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry);
JSScopeProperty *sprop = NULL;
const Shape *shape = NULL;
if (obj == obj2) {
sprop = entry->vword.toSprop();
JS_ASSERT(sprop->writable());
JS_ASSERT(!obj2->scope()->sealed());
NATIVE_SET(cx, obj, sprop, entry, &rval);
shape = entry->vword.toShape();
JS_ASSERT(shape->writable());
JS_ASSERT(!obj2->sealed());
NATIVE_SET(cx, obj, shape, entry, &rval);
}
if (sprop)
if (shape)
break;
}
@ -4733,7 +4705,7 @@ BEGIN_CASE(JSOP_CALLNAME)
{
JSObject *obj = fp->getScopeChain();
JSScopeProperty *sprop;
const Shape *shape;
Value rval;
PropertyCacheEntry *entry;
@ -4749,13 +4721,13 @@ BEGIN_CASE(JSOP_CALLNAME)
if (entry->vword.isSlot()) {
uintN slot = entry->vword.toSlot();
JS_ASSERT(slot < obj2->scope()->freeslot);
JS_ASSERT(slot < obj2->freeslot);
PUSH_COPY(obj2->lockedGetSlot(slot));
goto do_push_obj_if_call;
}
JS_ASSERT(entry->vword.isSprop());
sprop = entry->vword.toSprop();
JS_ASSERT(entry->vword.isShape());
shape = entry->vword.toShape();
goto do_native_get;
}
@ -4782,9 +4754,9 @@ BEGIN_CASE(JSOP_CALLNAME)
if (!obj->getProperty(cx, id, &rval))
goto error;
} else {
sprop = (JSScopeProperty *)prop;
shape = (Shape *)prop;
do_native_get:
NATIVE_GET(cx, obj, obj2, sprop, JSGET_METHOD_BARRIER, &rval);
NATIVE_GET(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, &rval);
JS_UNLOCK_OBJ(cx, obj2);
}
@ -5181,8 +5153,7 @@ BEGIN_CASE(JSOP_CALLUPVAR_DBG)
jsid id;
JSAtom *atom;
{
void *mark = JS_ARENA_MARK(&cx->tempPool);
jsuword *names = js_GetLocalNameArray(cx, fun, &cx->tempPool);
AutoLocalNameArray names(cx, fun);
if (!names)
goto error;
@ -5190,9 +5161,7 @@ BEGIN_CASE(JSOP_CALLUPVAR_DBG)
atom = JS_LOCAL_NAME_TO_ATOM(names[index]);
id = ATOM_TO_JSID(atom);
JSBool ok = js_FindProperty(cx, id, &obj, &obj2, &prop);
JS_ARENA_RELEASE(&cx->tempPool, mark);
if (!ok)
if (!js_FindProperty(cx, id, &obj, &obj2, &prop))
goto error;
}
@ -5213,24 +5182,19 @@ BEGIN_CASE(JSOP_CALLUPVAR_DBG)
}
END_CASE(JSOP_GETUPVAR_DBG)
BEGIN_CASE(JSOP_GETDSLOT)
BEGIN_CASE(JSOP_CALLDSLOT)
BEGIN_CASE(JSOP_GETFCSLOT)
BEGIN_CASE(JSOP_CALLFCSLOT)
{
JS_ASSERT(fp->argv);
JSObject *obj = &fp->argv[-2].toObject();
JS_ASSERT(obj);
JS_ASSERT(obj->dslots);
uintN index = GET_UINT16(regs.pc);
JS_ASSERT(JS_INITIAL_NSLOTS + index < obj->dslots[-1].toPrivateUint32());
JS_ASSERT_IF(obj->scope()->object == obj,
JS_INITIAL_NSLOTS + index < obj->scope()->freeslot);
JSObject *obj = &fp->argv[-2].toObject();
PUSH_COPY(obj->dslots[index]);
if (op == JSOP_CALLDSLOT)
JS_ASSERT(index < obj->getFunctionPrivate()->u.i.nupvars);
PUSH_COPY(obj->getFlatClosureUpvar(index));
if (op == JSOP_CALLFCSLOT)
PUSH_NULL();
}
END_CASE(JSOP_GETDSLOT)
END_CASE(JSOP_GETFCSLOT)
BEGIN_CASE(JSOP_GETGVAR)
BEGIN_CASE(JSOP_CALLGVAR)
@ -5286,13 +5250,12 @@ BEGIN_CASE(JSOP_SETGVAR)
} else {
uint32 slot = (uint32)lref.toInt32();
JS_LOCK_OBJ(cx, obj);
JSScope *scope = obj->scope();
if (!scope->methodWriteBarrier(cx, slot, rref)) {
JS_UNLOCK_SCOPE(cx, scope);
if (!obj->methodWriteBarrier(cx, slot, rref)) {
JS_UNLOCK_OBJ(cx, obj);
goto error;
}
obj->lockedSetSlot(slot, rref);
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, obj);
}
}
END_SET_CASE(JSOP_SETGVAR)
@ -5352,18 +5315,18 @@ BEGIN_CASE(JSOP_DEFVAR)
index < GlobalVarCount(fp) &&
obj2 == obj &&
obj->isNative()) {
JSScopeProperty *sprop = (JSScopeProperty *) prop;
if (!sprop->configurable() &&
SPROP_HAS_VALID_SLOT(sprop, obj->scope()) &&
sprop->hasDefaultGetterOrIsMethod() &&
sprop->hasDefaultSetter()) {
const Shape *shape = (Shape *) prop;
if (!shape->configurable() &&
obj->containsSlot(shape->slot) &&
shape->hasDefaultGetterOrIsMethod() &&
shape->hasDefaultSetter()) {
/*
* Fast globals use frame variables to map the global name's atom
* index to the permanent varobj slot number, tagged as a jsval.
* The atom index for the global's name literal is identical to its
* variable index.
*/
fp->slots()[index].setInt32(sprop->slot);
fp->slots()[index].setInt32(shape->slot);
}
}
@ -5478,8 +5441,8 @@ BEGIN_CASE(JSOP_DEFFUN)
JS_ASSERT_IF(doSet, fp->flags & JSFRAME_EVAL);
if (prop) {
if (parent == pobj &&
parent->getClass() == &js_CallClass &&
(old = ((JSScopeProperty *) prop)->attributes(),
parent->isCall() &&
(old = ((Shape *) prop)->attributes(),
!(old & (JSPROP_GETTER|JSPROP_SETTER)) &&
(old & (JSPROP_ENUMERATE|JSPROP_PERMANENT)) == attrs)) {
/*
@ -5641,7 +5604,7 @@ BEGIN_CASE(JSOP_LAMBDA)
JS_ASSERT(lref.isObject());
JSObject *obj2 = &lref.toObject();
JS_ASSERT(obj2->getClass() == &js_ObjectClass);
JS_ASSERT(obj2->scope()->object == obj2);
JS_ASSERT(obj2->freeslot >= JSSLOT_FREE(&js_ObjectClass));
#endif
fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(JSOP_LAMBDA_LENGTH)));
@ -5911,23 +5874,6 @@ BEGIN_CASE(JSOP_NEWINIT)
obj = NewBuiltinClassInstance(cx, &js_ObjectClass);
if (!obj)
goto error;
if (regs.pc[JSOP_NEWINIT_LENGTH] != JSOP_ENDINIT) {
JS_LOCK_OBJ(cx, obj);
JSScope *scope = js_GetMutableScope(cx, obj);
if (!scope) {
JS_UNLOCK_OBJ(cx, obj);
goto error;
}
/*
* We cannot assume that js_GetMutableScope above creates a scope
* owned by cx and skip JS_UNLOCK_SCOPE. A new object debugger
* hook may add properties to the newly created object, suspend
* the current request and share the object with other threads.
*/
JS_UNLOCK_SCOPE(cx, scope);
}
}
PUSH_OBJECT(*obj);
@ -5954,8 +5900,6 @@ BEGIN_CASE(JSOP_INITMETHOD)
JSObject *obj = &regs.sp[-2].toObject();
JS_ASSERT(obj->isNative());
JSScope *scope = obj->scope();
/*
* Probe the property cache.
*
@ -5963,50 +5907,43 @@ BEGIN_CASE(JSOP_INITMETHOD)
* single-threaded as the debugger can access it from other threads.
* So check first.
*
* On a hit, if the cached sprop has a non-default setter, it must be
* __proto__. If sprop->parent != scope->lastProperty(), there is a
* On a hit, if the cached shape has a non-default setter, it must be
* __proto__. If shape->previous() != obj->lastProperty(), there must be a
* repeated property name. The fast path does not handle these two cases.
*/
PropertyCacheEntry *entry;
JSScopeProperty *sprop;
const Shape *shape;
if (CX_OWNS_OBJECT_TITLE(cx, obj) &&
JS_PROPERTY_CACHE(cx).testForInit(rt, regs.pc, obj, scope, &sprop, &entry) &&
sprop->hasDefaultSetter() &&
sprop->parent == scope->lastProperty())
JS_PROPERTY_CACHE(cx).testForInit(rt, regs.pc, obj, &shape, &entry) &&
shape->hasDefaultSetter() &&
shape->previous() == obj->lastProperty())
{
/* Fast path. Property cache hit. */
uint32 slot = sprop->slot;
JS_ASSERT(slot == scope->freeslot);
uint32 slot = shape->slot;
JS_ASSERT(slot == obj->freeslot);
JS_ASSERT(slot >= JSSLOT_FREE(obj->getClass()));
if (slot < obj->numSlots()) {
++scope->freeslot;
JS_ASSERT(obj->getSlot(slot).isUndefined());
++obj->freeslot;
JS_ASSERT(obj->freeslot != 0);
} else {
if (!js_AllocSlot(cx, obj, &slot))
if (!obj->allocSlot(cx, &slot))
goto error;
JS_ASSERT(slot == sprop->slot);
JS_ASSERT(slot == shape->slot);
}
JS_ASSERT(!scope->lastProperty() ||
scope->shape == scope->lastProperty()->shape);
if (scope->table) {
JSScopeProperty *sprop2 =
scope->addProperty(cx, sprop->id, sprop->getter(), sprop->setter(), slot,
sprop->attributes(), sprop->getFlags(), sprop->shortid);
if (!sprop2) {
js_FreeSlot(cx, obj, slot);
goto error;
}
JS_ASSERT(sprop2 == sprop);
} else {
JS_ASSERT(!scope->isSharedEmpty());
scope->extend(cx, sprop);
}
/* A new object, or one we just extended in a recent initprop op. */
JS_ASSERT(!obj->lastProperty() ||
obj->shape() == obj->lastProperty()->shape);
obj->extend(cx, shape);
/*
* No method change check here because here we are adding a new
* property, not updating an existing slot's value that might
* contain a method of a branded scope.
* contain a method of a branded shape.
*/
TRACE_2(SetPropHit, entry, sprop);
TRACE_2(SetPropHit, entry, shape);
obj->lockedSetSlot(slot, rval);
} else {
PCMETER(JS_PROPERTY_CACHE(cx).inipcmisses++);

Просмотреть файл

@ -469,7 +469,7 @@ struct JSStackFrame
/*
* Fallible getter to compute the correct callee function object, which may
* require deferred cloning due to JSScope::methodReadBarrier. For a frame
* require deferred cloning due to JSObject::methodReadBarrier. For a frame
* with null fun member, return true with *vp set from this->calleeValue(),
* which may not be an object (it could be undefined).
*/

Просмотреть файл

@ -256,12 +256,13 @@ EnumerateNativeProperties(JSContext *cx, JSObject *obj, JSObject *pobj, uintN fl
size_t initialLength = props.length();
/* Collect all unique properties from this object's scope. */
JSScope *scope = pobj->scope();
for (JSScopeProperty *sprop = scope->lastProperty(); sprop; sprop = sprop->parent) {
if (!JSID_IS_DEFAULT_XML_NAMESPACE(sprop->id) &&
!sprop->isAlias() &&
!Enumerate<EnumPolicy>(cx, obj, pobj, sprop->id, sprop->enumerable(), sprop->isSharedPermanent(),
flags, ht, props))
for (Shape::Range r = pobj->lastProperty()->all(); !r.empty(); r.popFront()) {
const Shape &shape = r.front();
if (!JSID_IS_DEFAULT_XML_NAMESPACE(shape.id) &&
!shape.isAlias() &&
!Enumerate<EnumPolicy>(cx, obj, pobj, shape.id, shape.enumerable(),
shape.isSharedPermanent(), flags, ht, props))
{
return false;
}
@ -269,7 +270,7 @@ EnumerateNativeProperties(JSContext *cx, JSObject *obj, JSObject *pobj, uintN fl
Reverse(props.begin() + initialLength, props.end());
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, pobj);
return true;
}
@ -455,8 +456,8 @@ NewIteratorObject(JSContext *cx, uintN flags)
JSObject *obj = js_NewGCObject(cx);
if (!obj)
return false;
obj->map = cx->runtime->emptyEnumeratorScope->hold();
obj->init(&js_IteratorClass, NULL, NULL, NullValue());
obj->init(&js_IteratorClass, NULL, NULL, NullValue(), cx);
obj->setMap(cx->runtime->emptyEnumeratorShape);
return obj;
}
@ -853,7 +854,7 @@ js_SuppressDeletedProperty(JSContext *cx, JSObject *obj, jsid id)
if (prop) {
uintN attrs;
if (obj2.object()->isNative()) {
attrs = ((JSScopeProperty *) prop)->attributes();
attrs = ((Shape *) prop)->attributes();
JS_UNLOCK_OBJ(cx, obj2.object());
} else if (!obj2.object()->getAttributes(cx, id, &attrs)) {
return false;

Просмотреть файл

@ -494,10 +494,9 @@ FinishSharingTitle(JSContext *cx, JSTitle *title)
js_InitLock(&title->lock);
title->u.count = 0; /* NULL may not pun as 0 */
JSScope *scope = TITLE_TO_SCOPE(title);
JSObject *obj = scope->object;
JSObject *obj = TITLE_TO_OBJECT(title);
if (obj) {
uint32 nslots = scope->freeslot;
uint32 nslots = obj->freeslot;
JS_ASSERT(nslots >= JSSLOT_START(obj->getClass()));
for (uint32 i = JSSLOT_START(obj->getClass()); i != nslots; ++i) {
Value v = obj->getSlot(i);
@ -542,9 +541,8 @@ ClaimTitle(JSTitle *title, JSContext *cx)
* has the same thread as cx, or cx->thread runs the GC (in which case
* all other requests must be suspended), or ownercx->thread runs a GC
* and the GC waits for all requests to finish. Set title->ownercx to
* cx so that the matching JS_UNLOCK_SCOPE or JS_UNLOCK_OBJ macro call
* will take the fast path around the corresponding js_UnlockTitle or
* js_UnlockObj function call.
* cx so that the matching JS_UNLOCK_OBJ macro call will take the fast
* path around the corresponding js_UnlockObj function call.
*
* If title->u.link is non-null, title has already been inserted on
* the rt->titleSharingTodo list, because another thread's context
@ -653,8 +651,6 @@ JS_FRIEND_API(jsval)
js_GetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot)
{
jsval v;
JSScope *scope;
JSTitle *title;
#ifndef NSPR_LOCK
JSThinLock *tl;
jsword me;
@ -666,51 +662,44 @@ js_GetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot)
* Native object locking is inlined here to optimize the single-threaded
* and contention-free multi-threaded cases.
*/
scope = obj->scope();
title = &scope->title;
JS_ASSERT(title->ownercx != cx);
JS_ASSERT(slot < scope->freeslot);
JS_ASSERT(obj->title.ownercx != cx);
JS_ASSERT(slot < obj->freeslot);
/*
* Avoid locking if called from the GC. Also avoid locking an object
* owning a sealed scope. If neither of those special cases applies, try
* to claim scope's flyweight lock from whatever context may have had it in
* an earlier request.
* Avoid locking if called from the GC. Also avoid locking a sealed
* object. If neither of those special cases applies, try to claim obj's
* flyweight lock from whatever context may have had it in an earlier
* request.
*/
if (CX_THREAD_IS_RUNNING_GC(cx) ||
scope->sealed() ||
(title->ownercx && ClaimTitle(title, cx))) {
obj->sealed() ||
(obj->title.ownercx && ClaimTitle(&obj->title, cx))) {
return Jsvalify(obj->getSlot(slot));
}
#ifndef NSPR_LOCK
tl = &title->lock;
tl = &obj->title.lock;
me = CX_THINLOCK_ID(cx);
JS_ASSERT(CURRENT_THREAD_IS_ME(me));
if (NativeCompareAndSwap(&tl->owner, 0, me)) {
/*
* Got the lock with one compare-and-swap. Even so, someone else may
* have mutated obj so it now has its own scope and lock, which would
* Got the lock with one compare-and-swap. Even so, someone else may
* have mutated obj so it now has its own title lock, which would
* require either a restart from the top of this routine, or a thin
* lock release followed by fat lock acquisition.
*/
if (scope == obj->scope()) {
v = Jsvalify(obj->getSlot(slot));
if (!NativeCompareAndSwap(&tl->owner, me, 0)) {
/* Assert that scope locks never revert to flyweight. */
JS_ASSERT(title->ownercx != cx);
LOGIT(title, '1');
title->u.count = 1;
js_UnlockObj(cx, obj);
}
return v;
v = Jsvalify(obj->getSlot(slot));
if (!NativeCompareAndSwap(&tl->owner, me, 0)) {
/* Assert that title locks never revert to flyweight. */
JS_ASSERT(obj->title.ownercx != cx);
LOGIT(obj->title, '1');
obj->title.u.count = 1;
js_UnlockObj(cx, obj);
}
if (!NativeCompareAndSwap(&tl->owner, me, 0))
js_Dequeue(tl);
return v;
}
else if (Thin_RemoveWait(ReadWord(tl->owner)) == me) {
if (Thin_RemoveWait(ReadWord(tl->owner)) == me)
return Jsvalify(obj->getSlot(slot));
}
#endif
js_LockObj(cx, obj);
@ -725,17 +714,14 @@ js_GetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot)
* object's scope (whose lock was not flyweight, else we wouldn't be here
* in the first place!).
*/
title = &obj->scope()->title;
if (title->ownercx != cx)
js_UnlockTitle(cx, title);
if (obj->title.ownercx != cx)
js_UnlockTitle(cx, &obj->title);
return v;
}
void
js_SetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot, jsval v)
{
JSTitle *title;
JSScope *scope;
#ifndef NSPR_LOCK
JSThinLock *tl;
jsword me;
@ -754,43 +740,38 @@ js_SetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot, jsval v)
* Native object locking is inlined here to optimize the single-threaded
* and contention-free multi-threaded cases.
*/
scope = obj->scope();
title = &scope->title;
JS_ASSERT(title->ownercx != cx);
JS_ASSERT(slot < scope->freeslot);
JS_ASSERT(obj->title.ownercx != cx);
JS_ASSERT(slot < obj->freeslot);
/*
* Avoid locking if called from the GC. Also avoid locking an object
* owning a sealed scope. If neither of those special cases applies, try
* to claim scope's flyweight lock from whatever context may have had it in
* an earlier request.
* Avoid locking if called from the GC. Also avoid locking a sealed
* object. If neither of those special cases applies, try to claim obj's
* flyweight lock from whatever context may have had it in an earlier
* request.
*/
if (CX_THREAD_IS_RUNNING_GC(cx) ||
scope->sealed() ||
(title->ownercx && ClaimTitle(title, cx))) {
obj->sealed() ||
(obj->title.ownercx && ClaimTitle(&obj->title, cx))) {
obj->lockedSetSlot(slot, Valueify(v));
return;
}
#ifndef NSPR_LOCK
tl = &title->lock;
tl = &obj->title.lock;
me = CX_THINLOCK_ID(cx);
JS_ASSERT(CURRENT_THREAD_IS_ME(me));
if (NativeCompareAndSwap(&tl->owner, 0, me)) {
if (scope == obj->scope()) {
obj->lockedSetSlot(slot, Valueify(v));
if (!NativeCompareAndSwap(&tl->owner, me, 0)) {
/* Assert that scope locks never revert to flyweight. */
JS_ASSERT(title->ownercx != cx);
LOGIT(title, '1');
title->u.count = 1;
js_UnlockObj(cx, obj);
}
return;
obj->lockedSetSlot(slot, Valueify(v));
if (!NativeCompareAndSwap(&tl->owner, me, 0)) {
/* Assert that scope locks never revert to flyweight. */
JS_ASSERT(obj->title.ownercx != cx);
LOGIT(obj->title, '1');
obj->title.u.count = 1;
js_UnlockObj(cx, obj);
}
if (!NativeCompareAndSwap(&tl->owner, me, 0))
js_Dequeue(tl);
} else if (Thin_RemoveWait(ReadWord(tl->owner)) == me) {
return;
}
if (Thin_RemoveWait(ReadWord(tl->owner)) == me) {
obj->lockedSetSlot(slot, Valueify(v));
return;
}
@ -802,9 +783,8 @@ js_SetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot, jsval v)
/*
* Same drill as above, in js_GetSlotThreadSafe.
*/
title = &obj->scope()->title;
if (title->ownercx != cx)
js_UnlockTitle(cx, title);
if (obj->title.ownercx != cx)
js_UnlockTitle(cx, &obj->title);
}
#ifndef NSPR_LOCK
@ -1188,12 +1168,12 @@ js_LockTitle(JSContext *cx, JSTitle *title)
if (Thin_RemoveWait(ReadWord(title->lock.owner)) == me) {
JS_ASSERT(title->u.count > 0);
LOGIT(scope, '+');
LOGIT(title, '+');
title->u.count++;
} else {
ThinLock(&title->lock, me);
JS_ASSERT(title->u.count == 0);
LOGIT(scope, '1');
LOGIT(title, '1');
title->u.count = 1;
}
}
@ -1242,84 +1222,32 @@ js_UnlockTitle(JSContext *cx, JSTitle *title)
ThinUnlock(&title->lock, me);
}
/*
* NB: oldtitle may be null if our caller is js_GetMutableScope and it just
* dropped the last reference to oldtitle.
*/
void
js_DropAllEmptyScopeLocks(JSContext *cx, JSScope *scope)
{
JS_ASSERT(!CX_OWNS_SCOPE_TITLE(cx,scope));
JS_ASSERT(scope->isSharedEmpty());
JS_ASSERT(JS_IS_TITLE_LOCKED(cx, &scope->title));
/*
* Shared empty scope cannot be sealed so we do not need to deal with
* cx->lockedSealedTitle.
*/
JS_ASSERT(!scope->sealed());
JS_ASSERT(cx->thread->lockedSealedTitle != &scope->title);
/*
* Special case in js_LockTitle and js_UnlockTitle for the GC calling
* code that locks, unlocks, or mutates. Nothing to do in these cases,
* because title and newtitle were "locked" by the GC thread, so neither
* was actually locked.
*/
if (CX_THREAD_IS_RUNNING_GC(cx))
return;
/*
* The title cannot be owned at this point by another cx on this or
* another thread as that would imply a missing JS_LOCK_OBJ call.
*/
JS_ASSERT(!scope->title.ownercx);
LOGIT(&scope->title, '0');
scope->title.u.count = 0;
ThinUnlock(&scope->title.lock, CX_THINLOCK_ID(cx));
}
void
js_LockObj(JSContext *cx, JSObject *obj)
{
JSScope *scope;
JSTitle *title;
JS_ASSERT(obj->isNative());
/*
* We must test whether the GC is calling and return without mutating any
* state, especially cx->lockedSealedScope. Note asymmetry with respect to
* js_UnlockObj, which is a thin-layer on top of js_UnlockTitle.
* state, especially cx->thread->lockedSealedScope. Note asymmetry with
* respect to js_UnlockObj, which is a thin-layer on top of js_UnlockTitle.
*/
if (CX_THREAD_IS_RUNNING_GC(cx))
return;
for (;;) {
scope = obj->scope();
title = &scope->title;
if (scope->sealed() && !cx->thread->lockedSealedTitle) {
cx->thread->lockedSealedTitle = title;
return;
}
js_LockTitle(cx, title);
/* If obj still has this scope, we're done. */
if (scope == obj->scope())
return;
/* Lost a race with a mutator; retry with obj's new scope. */
js_UnlockTitle(cx, title);
if (obj->sealed() && !cx->thread->lockedSealedTitle) {
cx->thread->lockedSealedTitle = &obj->title;
return;
}
js_LockTitle(cx, &obj->title);
}
void
js_UnlockObj(JSContext *cx, JSObject *obj)
{
JS_ASSERT(obj->isNative());
js_UnlockTitle(cx, &obj->scope()->title);
js_UnlockTitle(cx, &obj->title);
}
void
@ -1334,11 +1262,6 @@ js_InitTitle(JSContext *cx, JSTitle *title)
* null pointer has a non-zero integer representation.
*/
title->u.link = NULL;
#ifdef JS_DEBUG_TITLE_LOCKS
title->file[0] = title->file[1] = title->file[2] = title->file[3] = NULL;
title->line[0] = title->line[1] = title->line[2] = title->line[3] = 0;
#endif
#endif
}
@ -1368,7 +1291,7 @@ js_IsRuntimeLocked(JSRuntime *rt)
JSBool
js_IsObjLocked(JSContext *cx, JSObject *obj)
{
return js_IsTitleLocked(cx, &obj->scope()->title);
return js_IsTitleLocked(cx, &obj->title);
}
JSBool
@ -1378,7 +1301,7 @@ js_IsTitleLocked(JSContext *cx, JSTitle *title)
if (CX_THREAD_IS_RUNNING_GC(cx))
return JS_TRUE;
/* Special case: locked object owning a sealed scope, see js_LockObj. */
/* Special case: locked object is sealed (ES5 frozen) -- see js_LockObj. */
if (cx->thread->lockedSealedTitle == title)
return JS_TRUE;
@ -1395,19 +1318,5 @@ js_IsTitleLocked(JSContext *cx, JSTitle *title)
((JSThread *)Thin_RemoveWait(ReadWord(title->lock.owner)))->id;
}
#ifdef JS_DEBUG_TITLE_LOCKS
void
js_SetScopeInfo(JSScope *scope, const char *file, int line)
{
JSTitle *title = &scope->title;
if (!title->ownercx) {
jsrefcount count = title->u.count;
JS_ASSERT_IF(!scope->sealed(), count > 0);
JS_ASSERT(count <= 4);
title->file[count - 1] = file;
title->line[count - 1] = line;
}
}
#endif /* JS_DEBUG_TITLE_LOCKS */
#endif /* DEBUG */
#endif /* JS_THREADSAFE */

Просмотреть файл

@ -97,17 +97,13 @@ struct JSTitle {
jsrefcount count; /* lock entry count for reentrancy */
JSTitle *link; /* next link in rt->titleSharingTodo */
} u;
#ifdef JS_DEBUG_TITLE_LOCKS
const char *file[4]; /* file where lock was (re-)taken */
unsigned int line[4]; /* line where lock was (re-)taken */
#endif
};
/*
* Title structure is always allocated as a field of JSScope.
* Title structure is always allocated as a field of JSObject.
*/
#define TITLE_TO_SCOPE(title) \
((JSScope *)((uint8 *) (title) - offsetof(JSScope, title)))
#define TITLE_TO_OBJECT(title) \
((JSObject *)((uint8 *) (title) - offsetof(JSObject, title)))
/*
* Atomic increment and decrement for a reference counter, given jsrefcount *p.
@ -131,16 +127,6 @@ struct JSTitle {
#define JS_NOTIFY_CONDVAR(cv) PR_NotifyCondVar(cv)
#define JS_NOTIFY_ALL_CONDVAR(cv) PR_NotifyAllCondVar(cv)
#ifdef JS_DEBUG_TITLE_LOCKS
#define JS_SET_OBJ_INFO(obj_, file_, line_) \
JS_SET_SCOPE_INFO((obj_)->scope(), file_, line_)
#define JS_SET_SCOPE_INFO(scope_, file_, line_) \
js_SetScopeInfo(scope_, file_, line_)
#endif
#define JS_LOCK(cx, tl) js_Lock(cx, tl)
#define JS_UNLOCK(cx, tl) js_Unlock(cx, tl)
@ -152,12 +138,12 @@ struct JSTitle {
* (objects for which obj->isNative() returns true). All uses of these macros in
* the engine are predicated on obj->isNative or equivalent checks.
*/
#define CX_OWNS_SCOPE_TITLE(cx,scope) ((scope)->title.ownercx == (cx))
#define CX_OWNS_OBJECT_TITLE(cx,obj) ((obj)->title.ownercx == (cx))
#define JS_LOCK_OBJ(cx,obj) \
JS_BEGIN_MACRO \
JSObject *obj_ = (obj); \
if (!CX_OWNS_SCOPE_TITLE(cx, obj_->scope())) { \
if (!CX_OWNS_OBJECT_TITLE(cx, obj_)) { \
js_LockObj(cx, obj_); \
JS_SET_OBJ_INFO(obj_, __FILE__, __LINE__); \
} \
@ -166,7 +152,7 @@ struct JSTitle {
#define JS_UNLOCK_OBJ(cx,obj) \
JS_BEGIN_MACRO \
JSObject *obj_ = (obj); \
if (!CX_OWNS_SCOPE_TITLE(cx, obj_->scope())) \
if (!CX_OWNS_OBJECT_TITLE(cx, obj_)) \
js_UnlockObj(cx, obj_); \
JS_END_MACRO
@ -178,16 +164,6 @@ struct JSTitle {
#define JS_UNLOCK_TITLE(cx,title) ((title)->ownercx == (cx) ? (void)0 \
: js_UnlockTitle(cx, title))
#define JS_LOCK_SCOPE(cx,scope) JS_LOCK_TITLE(cx,&(scope)->title)
#define JS_UNLOCK_SCOPE(cx,scope) JS_UNLOCK_TITLE(cx,&(scope)->title)
#define JS_DROP_ALL_EMPTY_SCOPE_LOCKS(cx,scope) \
JS_BEGIN_MACRO \
JS_ASSERT((scope)->isSharedEmpty()); \
if (!CX_OWNS_SCOPE_TITLE(cx, scope)) \
js_DropAllEmptyScopeLocks(cx, scope); \
JS_END_MACRO
extern void js_Lock(JSContext *cx, JSThinLock *tl);
extern void js_Unlock(JSContext *cx, JSThinLock *tl);
extern void js_LockRuntime(JSRuntime *rt);
@ -200,7 +176,6 @@ extern void js_LockTitle(JSContext *cx, JSTitle *title);
extern void js_UnlockTitle(JSContext *cx, JSTitle *title);
extern int js_SetupLocks(int,int);
extern void js_CleanupLocks();
extern void js_DropAllEmptyScopeLocks(JSContext *cx, JSScope *scope);
extern JS_FRIEND_API(jsval)
js_GetSlotThreadSafe(JSContext *, JSObject *, uint32);
extern void js_SetSlotThreadSafe(JSContext *, JSObject *, uint32, jsval);
@ -222,9 +197,6 @@ js_ShareWaitingTitles(JSContext *cx);
extern JSBool js_IsRuntimeLocked(JSRuntime *rt);
extern JSBool js_IsObjLocked(JSContext *cx, JSObject *obj);
extern JSBool js_IsTitleLocked(JSContext *cx, JSTitle *title);
#ifdef JS_DEBUG_TITLE_LOCKS
extern void js_SetScopeInfo(JSScope *scope, const char *file, int line);
#endif
#else
@ -257,14 +229,10 @@ extern void js_SetScopeInfo(JSScope *scope, const char *file, int line);
#define JS_LOCK_RUNTIME(rt) ((void)0)
#define JS_UNLOCK_RUNTIME(rt) ((void)0)
#define CX_OWNS_SCOPE_TITLE(cx,obj) true
#define JS_LOCK_OBJ(cx,obj) ((void)0)
#define JS_UNLOCK_OBJ(cx,obj) ((void)0)
#define JS_LOCK_SCOPE(cx,scope) ((void)0)
#define JS_UNLOCK_SCOPE(cx,scope) ((void)0)
#define JS_DROP_ALL_EMPTY_SCOPE_LOCKS(cx,scope) ((void)0)
#define CX_OWNS_OBJECT_TITLE(cx,obj) 1
#define JS_IS_RUNTIME_LOCKED(rt) 1
#define JS_IS_OBJ_LOCKED(cx,obj) 1
#define JS_IS_TITLE_LOCKED(cx,title) 1
@ -286,8 +254,6 @@ extern void js_SetScopeInfo(JSScope *scope, const char *file, int line);
JS_NO_TIMEOUT)
#define JS_NOTIFY_REQUEST_DONE(rt) JS_NOTIFY_CONDVAR((rt)->requestDone)
#define CX_OWNS_OBJECT_TITLE(cx,obj) CX_OWNS_SCOPE_TITLE(cx, (obj)->scope())
#ifndef JS_SET_OBJ_INFO
#define JS_SET_OBJ_INFO(obj,f,l) ((void)0)
#endif

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -52,6 +52,7 @@
#include "jshash.h" /* Added by JSIFY */
#include "jspubtd.h"
#include "jsprvtd.h"
#include "jslock.h"
#include "jsvalue.h"
#include "jsvector.h"
@ -175,11 +176,11 @@ typedef Vector<PropDesc, 1> PropDescArray;
struct JSObjectMap {
static JS_FRIEND_DATA(const JSObjectMap) sharedNonNative;
uint32 shape; /* shape identifier */
uint32 shape; /* shape identifier */
explicit JSObjectMap(uint32 shape) : shape(shape) {}
enum { SHAPELESS = 0xffffffff };
enum { INVALID_SHAPE = 0x8fffffff, SHAPELESS = 0xffffffff };
bool isNative() const { return this != &sharedNonNative; }
@ -246,6 +247,9 @@ struct JSFunction;
* 64 bytes on 64-bit systems. The JSFunction struct is an extension of this
* struct allocated from a larger GC size-class.
*
* The clasp member stores the js::Class pointer for this object. We do *not*
* synchronize updates of clasp or flags -- API clients must take care.
*
* An object is a delegate if it is on another object's prototype (the proto
* field) or scope chain (the parent field), and therefore the delegate might
* be asked implicitly to get or set a property on behalf of another object.
@ -261,8 +265,8 @@ struct JSFunction;
* to be complementary to this bit, but it is up to the API client to implement
* any such association.
*
* Both these flags are initially zero; they may be set or queried using the
* (is|set)(Delegate|System) inline methods.
* Both these flag bits are initially zero; they may be set or queried using
* the (is|set)(Delegate|System) inline methods.
*
* The dslots member is null or a pointer into a dynamically allocated vector
* of Values for reserved and dynamic slots. If dslots is not null, dslots[-1]
@ -275,25 +279,83 @@ struct JSObject {
*/
friend class js::TraceRecorder;
JSObjectMap *map; /* property map, see jsscope.h */
js::Class *clasp; /* class pointer */
jsuword flags; /* see above */
/*
* Private pointer to the last added property and methods to manipulate the
* list it links among properties in this scope. The {remove,insert} pair
* for DictionaryProperties assert that the scope is in dictionary mode and
* any reachable properties are flagged as dictionary properties.
*
* NB: these private methods do *not* update this scope's shape to track
* lastProp->shape after they finish updating the linked list in the case
* where lastProp is updated. It is up to calling code in jsscope.cpp to
* call updateShape(cx) after updating lastProp.
*/
union {
js::Shape *lastProp;
JSObjectMap *map;
};
js::Class *clasp;
private:
inline void setLastProperty(const js::Shape *shape);
inline void removeLastProperty();
public:
inline const js::Shape *lastProperty() const;
inline js::Shape **nativeSearch(jsid id, bool adding = false);
inline const js::Shape *nativeLookup(jsid id);
inline bool nativeContains(jsid id);
inline bool nativeContains(const js::Shape &shape);
enum {
DELEGATE = 0x01,
SYSTEM = 0x02,
SEALED = 0x04,
BRANDED = 0x08,
GENERIC = 0x10,
METHOD_BARRIER = 0x20,
INDEXED = 0x40,
OWN_SHAPE = 0x80
};
enum {
JS_NSLOTS_BITS = 24,
JS_NSLOTS_LIMIT = JS_BIT(JS_NSLOTS_BITS)
};
uint32 flags: 32-JS_NSLOTS_BITS, /* flags */
freeslot: JS_NSLOTS_BITS; /* next free slot in abstract slot space */
uint32 objShape; /* copy of lastProp->shape, or override if different */
JSObject *proto; /* object's prototype */
JSObject *parent; /* object's parent */
js::Value *dslots; /* dynamically allocated slots */
/* Empty shape of kids if prototype, located here to align fslots on 32 bit targets. */
js::EmptyShape *emptyShape;
js::Value fslots[JS_INITIAL_NSLOTS]; /* small number of fixed slots */
#ifdef JS_THREADSAFE
JSTitle title;
#endif
bool isNative() const {
return map->isNative();
}
/*
* Return an immutable, shareable, empty scope with the same ops as this
* and the same freeslot as this had when empty.
*
* If |this| is the scope of an object |proto|, the resulting scope can be
* used as the scope of a new object whose prototype is |proto|.
*/
inline bool canProvideEmptyShape(js::Class *clasp);
inline js::EmptyShape *getEmptyShape(JSContext *cx, js::Class *aclasp);
js::Class *getClass() const {
return clasp;
}
bool isNative() const { return map->isNative(); }
JSClass *getJSClass() const {
return Jsvalify(clasp);
}
js::Class *getClass() const { return clasp; }
JSClass *getJSClass() const { return Jsvalify(clasp); }
bool hasClass(const js::Class *c) const {
return c == clasp;
@ -303,32 +365,157 @@ struct JSObject {
return &getClass()->ops;
}
inline JSScope *scope() const;
inline uint32 shape() const;
inline void trace(JSTracer *trc);
bool isDelegate() const {
return (flags & jsuword(1)) != jsuword(0);
uint32 shape() const {
JS_ASSERT(objShape != JSObjectMap::INVALID_SHAPE);
return objShape;
}
void setDelegate() {
flags |= jsuword(1);
}
bool isDelegate() const { return !!(flags & DELEGATE); }
void setDelegate() { flags |= DELEGATE; }
static void setDelegateNullSafe(JSObject *obj) {
if (obj)
obj->setDelegate();
}
bool isSystem() const {
return (flags & jsuword(2)) != jsuword(0);
bool isSystem() const { return !!(flags & SYSTEM); }
void setSystem() { flags |= SYSTEM; }
/*
* Don't define clearSealed, as it can't be done safely because JS_LOCK_OBJ
* will avoid taking the lock if the object owns its scope and the scope is
* sealed.
*/
bool sealed() { return !!(flags & SEALED); }
void seal(JSContext *cx);
/*
* A branded object contains plain old methods (function-valued properties
* without magic getters and setters), and its shape evolves whenever a
* function value changes.
*/
bool branded() { return !!(flags & BRANDED); }
bool brand(JSContext *cx, uint32 slot, js::Value v);
bool unbrand(JSContext *cx);
bool generic() { return !!(flags & GENERIC); }
void setGeneric() { flags |= GENERIC; }
private:
void generateOwnShape(JSContext *cx);
void setOwnShape(uint32 s) { flags |= OWN_SHAPE; objShape = s; }
void clearOwnShape() { flags &= ~OWN_SHAPE; objShape = map->shape; }
public:
inline bool nativeEmpty() const;
bool hasOwnShape() const { return !!(flags & OWN_SHAPE); }
void setMap(JSObjectMap *amap) {
JS_ASSERT(!hasOwnShape());
map = amap;
objShape = map->shape;
}
void setSystem() {
flags |= jsuword(2);
void setSharedNonNativeMap() {
setMap(const_cast<JSObjectMap *>(&JSObjectMap::sharedNonNative));
}
void deletingShapeChange(JSContext *cx, const js::Shape &shape);
bool methodShapeChange(JSContext *cx, const js::Shape &shape);
bool methodShapeChange(JSContext *cx, uint32 slot);
void protoShapeChange(JSContext *cx);
void shadowingShapeChange(JSContext *cx, const js::Shape &shape);
bool globalObjectOwnShapeChange(JSContext *cx);
/*
* A scope has a method barrier when some compiler-created "null closure"
* function objects (functions that do not use lexical bindings above their
* scope, only free variable names) that have a correct JSSLOT_PARENT value
* thanks to the COMPILE_N_GO optimization are stored as newly added direct
* property values of the scope's object.
*
* The de-facto standard JS language requires each evaluation of such a
* closure to result in a unique (according to === and observable effects)
* function object. ES3 tried to allow implementations to "join" such
* objects to a single compiler-created object, but this makes an overt
* mutation hazard, also an "identity hazard" against interoperation among
* implementations that join and do not join.
*
* To stay compatible with the de-facto standard, we store the compiler-
* created function object as the method value and set the METHOD_BARRIER
* flag.
*
* The method value is part of the method property tree node's identity, so
* it effectively brands the scope with a predictable shape corresponding
* to the method value, but without the overhead of setting the BRANDED
* flag, which requires assigning a new shape peculiar to each branded
* scope. Instead the shape is shared via the property tree among all the
* scopes referencing the method property tree node.
*
* Then when reading from a scope for which scope->hasMethodBarrier() is
* true, we count on the scope's qualified/guarded shape being unique and
* add a read barrier that clones the compiler-created function object on
* demand, reshaping the scope.
*
* This read barrier is bypassed when evaluating the callee sub-expression
* of a call expression (see the JOF_CALLOP opcodes in jsopcode.tbl), since
* such ops do not present an identity or mutation hazard. The compiler
* performs this optimization only for null closures that do not use their
* own name or equivalent built-in references (arguments.callee).
*
* The BRANDED write barrier, JSObject::methodWriteBarrer, must check for
* METHOD_BARRIER too, and regenerate this scope's shape if the method's
* value is in fact changing.
*/
bool hasMethodBarrier() { return !!(flags & METHOD_BARRIER); }
void setMethodBarrier() { flags |= METHOD_BARRIER; }
/*
* Test whether this object may be branded due to method calls, which means
* any assignment to a function-valued property must regenerate shape; else
* test whether this object has method properties, which require a method
* write barrier.
*/
bool brandedOrHasMethodBarrier() { return !!(flags & (BRANDED | METHOD_BARRIER)); }
/*
* Read barrier to clone a joined function object stored as a method.
* Defined in jsobjinlines.h, but not declared inline per standard style in
* order to avoid gcc warnings.
*/
bool methodReadBarrier(JSContext *cx, const js::Shape &shape, js::Value *vp);
/*
* Write barrier to check for a change of method value. Defined inline in
* jsobjinlines.h after methodReadBarrier. The slot flavor is required by
* JSOP_*GVAR, which deals in slots not shapes, while not deoptimizing to
* map slot to shape unless JSObject::flags show that this is necessary.
* The methodShapeChange overload (directly below) parallels this.
*/
bool methodWriteBarrier(JSContext *cx, const js::Shape &shape, const js::Value &v);
bool methodWriteBarrier(JSContext *cx, uint32 slot, const js::Value &v);
bool isIndexed() const { return !!(flags & INDEXED); }
void setIndexed() { flags |= INDEXED; }
/*
* Return true if this object is a native one that has been converted from
* shared-immutable prototype-rooted shape storage to dictionary-shapes in
* a doubly-linked list.
*/
inline bool inDictionaryMode() const;
inline uint32 propertyCount() const;
inline bool hasPropertyTable() const;
uint32 numSlots(void) const {
return dslots ? dslots[-1].toPrivateUint32() : (uint32)JS_INITIAL_NSLOTS;
return dslots ? dslots[-1].toPrivateUint32() : uint32(JS_INITIAL_NSLOTS);
}
private:
@ -347,6 +534,35 @@ struct JSObject {
bool growSlots(JSContext *cx, size_t nslots);
void shrinkSlots(JSContext *cx, size_t nslots);
/*
* Ensure that the object has at least JSCLASS_RESERVED_SLOTS(clasp) +
* nreserved slots.
*
* This method may be called only for native objects freshly created using
* NewObject or one of its variant where the new object will both (a) never
* escape to script and (b) never be extended with ad-hoc properties that
* would try to allocate higher slots without the fresh object first having
* its map set to a shape path that maps those slots.
*
* Block objects satisfy (a) and (b), as there is no evil eval-based way to
* add ad-hoc properties to a Block instance. Call objects satisfy (a) and
* (b) as well, because the compiler-created Shape path that covers args,
* vars, and upvars, stored in their callee function in u.i.names, becomes
* their initial map.
*/
bool ensureInstanceReservedSlots(JSContext *cx, size_t nreserved);
/*
* NB: ensureClassReservedSlotsForEmptyObject asserts that nativeEmpty()
* Use ensureClassReservedSlots for any object, either empty or already
* extended with properties.
*/
bool ensureClassReservedSlotsForEmptyObject(JSContext *cx);
inline bool ensureClassReservedSlots(JSContext *cx);
bool containsSlot(uint32 slot) const { return slot < freeslot; }
js::Value& getSlotRef(uintN slot) {
return (slot < JS_INITIAL_NSLOTS)
? fslots[slot]
@ -384,13 +600,15 @@ struct JSObject {
inline js::Value getReservedSlot(uintN index) const;
JSObject *getProto() const {
return proto;
}
/* Defined in jsscopeinlines.h to avoid including implementation dependencies here. */
inline void updateShape(JSContext *cx);
inline void updateFlags(const js::Shape *shape, bool isDefinitelyAtom = false);
void clearProto() {
proto = NULL;
}
/* Extend this object to have shape as its last-added property. */
inline void extend(JSContext *cx, const js::Shape *shape, bool isDefinitelyAtom = false);
JSObject *getProto() const { return proto; }
void clearProto() { proto = NULL; }
void setProto(JSObject *newProto) {
#ifdef DEBUG
@ -469,7 +687,7 @@ struct JSObject {
inline void staticAssertArrayLengthIsInPrivateSlot();
public:
static const uint32 DENSE_ARRAY_FIXED_RESERVED_SLOTS = 3;
static const uint32 DENSE_ARRAY_CLASS_RESERVED_SLOTS = 3;
inline uint32 getArrayLength() const;
inline void setArrayLength(uint32 length);
@ -499,25 +717,31 @@ struct JSObject {
/*
* Reserved slot structure for Arguments objects:
*
* JSSLOT_PRIVATE - the corresponding frame until the frame exits.
* JSSLOT_PRIVATE - the function's stack frame until the function
* returns; also, JS_ARGUMENTS_OBJECT_ON_TRACE if
* arguments was created on trace
* JSSLOT_ARGS_LENGTH - the number of actual arguments and a flag
* indicating whether arguments.length was
* overwritten. This slot is not used to represent
* overwritten. This slot is not used to represent
* arguments.length after that property has been
* assigned, even if the new value is integral: it's
* always the original length.
* JSSLOT_ARGS_CALLEE - the arguments.callee value or JSVAL_HOLE if that
* was overwritten.
* JSSLOT_ARGS_DATA - pointer to an ArgumentsData structure containing
* the arguments.callee value or JSVAL_HOLE if that
* was overwritten, and the values of all arguments
* once the function has returned (or as soon as a
* strict arguments object has been created).
*
* Argument index i is stored in dslots[i], accessible via
* Argument index i is stored in ArgumentsData.slots[i], accessible via
* {get,set}ArgsElement().
*/
static const uint32 JSSLOT_ARGS_LENGTH = JSSLOT_PRIVATE + 1;
static const uint32 JSSLOT_ARGS_CALLEE = JSSLOT_PRIVATE + 2;
static const uint32 JSSLOT_ARGS_DATA = JSSLOT_PRIVATE + 2;
public:
/* Number of extra fixed slots besides JSSLOT_PRIVATE. */
static const uint32 ARGS_FIXED_RESERVED_SLOTS = 2;
/* Number of extra fixed arguments object slots besides JSSLOT_PRIVATE. */
static const uint32 ARGS_CLASS_RESERVED_SLOTS = 2;
static const uint32 ARGS_FIRST_FREE_SLOT = JSSLOT_PRIVATE + ARGS_CLASS_RESERVED_SLOTS + 1;
/* Lower-order bit stolen from the length slot. */
static const uint32 ARGS_LENGTH_OVERRIDDEN_BIT = 0x1;
@ -537,6 +761,9 @@ struct JSObject {
inline void setArgsLengthOverridden();
inline bool isArgsLengthOverridden() const;
inline js::ArgumentsData *getArgsData() const;
inline void setArgsData(js::ArgumentsData *data);
inline const js::Value &getArgsCallee() const;
inline void setArgsCallee(const js::Value &callee);
@ -548,7 +775,6 @@ struct JSObject {
* Date-specific getters and setters.
*/
public:
static const uint32 JSSLOT_DATE_UTC_TIME = JSSLOT_PRIVATE;
/*
@ -579,6 +805,16 @@ struct JSObject {
private:
friend struct JSFunction;
/*
* Flat closures with one or more upvars snapshot the upvars' values into a
* vector of js::Values referenced from this slot.
*/
static const uint32 JSSLOT_FLAT_CLOSURE_UPVARS = JSSLOT_PRIVATE + 1;
/*
* Null closures set or initialized as methods have these slots. See the
* "method barrier" comments and methods.
*/
static const uint32 JSSLOT_FUN_METHOD_ATOM = JSSLOT_PRIVATE + 1;
static const uint32 JSSLOT_FUN_METHOD_OBJ = JSSLOT_PRIVATE + 2;
@ -586,16 +822,19 @@ struct JSObject {
static const uint32 JSSLOT_BOUND_FUNCTION_ARGS_COUNT = JSSLOT_PRIVATE + 2;
public:
static const uint32 FUN_FIXED_RESERVED_SLOTS = 2;
static const uint32 FUN_CLASS_RESERVED_SLOTS = 2;
inline JSFunction *getFunctionPrivate() const;
inline js::Value *getFlatClosureUpvars() const;
inline js::Value getFlatClosureUpvar(uint32 i) const;
inline void setFlatClosureUpvars(js::Value *upvars);
inline bool hasMethodObj(const JSObject& obj) const;
inline void setMethodObj(JSObject& obj);
inline JSFunction *getFunctionPrivate() const;
inline bool
initBoundFunction(JSContext *cx, const js::Value &thisArg,
const js::Value *args, uintN argslen);
inline bool initBoundFunction(JSContext *cx, const js::Value &thisArg,
const js::Value *args, uintN argslen);
inline JSObject *getBoundFunctionTarget() const;
inline const js::Value &getBoundFunctionThis() const;
@ -609,7 +848,7 @@ struct JSObject {
static const uint32 JSSLOT_REGEXP_LAST_INDEX = JSSLOT_PRIVATE + 1;
public:
static const uint32 REGEXP_FIXED_RESERVED_SLOTS = 1;
static const uint32 REGEXP_CLASS_RESERVED_SLOTS = 1;
inline const js::Value &getRegExpLastIndex() const;
inline void setRegExpLastIndex(const js::Value &v);
@ -643,8 +882,8 @@ struct JSObject {
static const uint32 JSSLOT_QNAME_LOCAL_NAME = JSSLOT_PRIVATE + 2;
public:
static const uint32 NAMESPACE_FIXED_RESERVED_SLOTS = 3;
static const uint32 QNAME_FIXED_RESERVED_SLOTS = 3;
static const uint32 NAMESPACE_CLASS_RESERVED_SLOTS = 3;
static const uint32 QNAME_CLASS_RESERVED_SLOTS = 3;
inline jsval getNamePrefix() const;
inline void setNamePrefix(jsval prefix);
@ -675,41 +914,76 @@ struct JSObject {
/*
* Back to generic stuff.
*/
inline bool isCallable();
/* The map field is not initialized here and should be set separately. */
void init(js::Class *aclasp, JSObject *proto, JSObject *parent,
const js::Value &privateSlotValue) {
JS_STATIC_ASSERT(JSSLOT_PRIVATE + 3 == JS_INITIAL_NSLOTS);
inline void init(js::Class *aclasp, JSObject *proto, JSObject *parent,
const js::Value &privateSlotValue, JSContext *cx);
clasp = aclasp;
flags = 0;
JS_ASSERT(!isDelegate());
JS_ASSERT(!isSystem());
setProto(proto);
setParent(parent);
fslots[JSSLOT_PRIVATE] = privateSlotValue;
fslots[JSSLOT_PRIVATE + 1].setUndefined();
fslots[JSSLOT_PRIVATE + 2].setUndefined();
dslots = NULL;
}
inline void finish(JSContext *cx);
/*
* Like init, but also initializes map. The catch: proto must be the result
* of a call to js_InitClass(...clasp, ...).
*/
inline void initSharingEmptyScope(js::Class *clasp,
inline void initSharingEmptyShape(js::Class *clasp,
JSObject *proto,
JSObject *parent,
const js::Value &privateSlotValue);
const js::Value &privateSlotValue,
JSContext *cx);
inline bool hasSlotsArray() const { return !!dslots; }
/* This method can only be called when hasSlotsArray() returns true. */
inline void freeSlotsArray(JSContext *cx);
inline bool hasProperty(JSContext *cx, jsid id, bool *foundp, uintN flags = 0);
bool allocSlot(JSContext *cx, uint32 *slotp);
void freeSlot(JSContext *cx, uint32 slot);
private:
void reportReadOnlyScope(JSContext *cx);
js::Shape *getChildProperty(JSContext *cx, js::Shape *parent, js::Shape &child);
const js::Shape *addPropertyCommon(JSContext *cx, jsid id,
js::PropertyOp getter, js::PropertyOp setter,
uint32 slot, uintN attrs,
uintN flags, intN shortid,
js::Shape **spp);
bool toDictionaryMode(JSContext *cx);
public:
/* Add a property whose id is not yet in this scope. */
const js::Shape *addProperty(JSContext *cx, jsid id,
js::PropertyOp getter, js::PropertyOp setter,
uint32 slot, uintN attrs,
uintN flags, intN shortid);
/* Add a data property whose id is not yet in this scope. */
const js::Shape *addDataProperty(JSContext *cx, jsid id, uint32 slot, uintN attrs) {
JS_ASSERT(!(attrs & (JSPROP_GETTER | JSPROP_SETTER)));
return addProperty(cx, id, NULL, NULL, slot, attrs, 0, 0);
}
/* Add or overwrite a property for id in this scope. */
const js::Shape *putProperty(JSContext *cx, jsid id,
js::PropertyOp getter, js::PropertyOp setter,
uint32 slot, uintN attrs,
uintN flags, intN shortid);
/* Change the given property into a sibling with the same id in this scope. */
const js::Shape *changeProperty(JSContext *cx, const js::Shape *shape, uintN attrs, uintN mask,
js::PropertyOp getter, js::PropertyOp setter);
/* Remove id from this object. */
bool removeProperty(JSContext *cx, jsid id);
/* Clear the scope, making it empty. */
void clear(JSContext *cx);
JSBool lookupProperty(JSContext *cx, jsid id, JSObject **objp, JSProperty **propp) {
JSLookupPropOp op = getOps()->lookupProperty;
return (op ? op : js_LookupProperty)(cx, this, id, objp, propp);
@ -793,6 +1067,7 @@ struct JSObject {
inline bool isObject() const;
inline bool isWith() const;
inline bool isBlock() const;
inline bool isCall() const;
inline bool isRegExp() const;
inline bool isXML() const;
inline bool isXMLId() const;
@ -806,8 +1081,6 @@ struct JSObject {
JS_FRIEND_API(bool) isWrapper() const;
JS_FRIEND_API(JSObject *) unwrap(uintN *flagsp = NULL);
inline bool unbrand(JSContext *cx);
inline void initArrayClass();
};
@ -829,8 +1102,7 @@ JS_STATIC_ASSERT(sizeof(JSObject) % JS_GCTHING_ALIGN == 0);
#define MAX_DSLOTS_LENGTH (~size_t(0) / sizeof(js::Value) - 1)
#define MAX_DSLOTS_LENGTH32 (~uint32(0) / sizeof(js::Value) - 1)
#define OBJ_CHECK_SLOT(obj,slot) \
(JS_ASSERT((obj)->isNative()), JS_ASSERT(slot < (obj)->scope()->freeslot))
#define OBJ_CHECK_SLOT(obj,slot) (JS_ASSERT(slot < (obj)->freeslot))
#ifdef JS_THREADSAFE
@ -914,7 +1186,7 @@ extern JSBool
js_DefineBlockVariable(JSContext *cx, JSObject *obj, jsid id, intN index);
#define OBJ_BLOCK_COUNT(cx,obj) \
((OBJ_IS_CLONED_BLOCK(obj) ? obj->getProto() : obj)->scope()->entryCount)
((OBJ_IS_CLONED_BLOCK(obj) ? obj->getProto() : obj)->propertyCount())
#define OBJ_BLOCK_DEPTH(cx,obj) \
obj->getSlot(JSSLOT_BLOCK_DEPTH).toInt32()
#define OBJ_SET_BLOCK_DEPTH(cx,obj,depth) \
@ -1073,21 +1345,6 @@ extern JSObject *
js_ConstructObject(JSContext *cx, js::Class *clasp, JSObject *proto,
JSObject *parent, uintN argc, js::Value *argv);
extern JSBool
js_AllocSlot(JSContext *cx, JSObject *obj, uint32 *slotp);
extern void
js_FreeSlot(JSContext *cx, JSObject *obj, uint32 slot);
/*
* Ensure that the object has at least JSCLASS_RESERVED_SLOTS(clasp)+nreserved
* slots. The function can be called only for native objects just created with
* js_NewObject or its forms. In particular, the object should not be shared
* between threads and its dslots array must be null.
*/
bool
js_EnsureReservedSlots(JSContext *cx, JSObject *obj, size_t nreserved);
extern jsid
js_CheckForStringIndex(jsid id);
@ -1111,19 +1368,19 @@ js_PurgeScopeChain(JSContext *cx, JSObject *obj, jsid id)
* Find or create a property named by id in obj's scope, with the given getter
* and setter, slot, attributes, and other members.
*/
extern JSScopeProperty *
extern const js::Shape *
js_AddNativeProperty(JSContext *cx, JSObject *obj, jsid id,
js::PropertyOp getter, js::PropertyOp setter, uint32 slot,
uintN attrs, uintN flags, intN shortid);
/*
* Change sprop to have the given attrs, getter, and setter in scope, morphing
* it into a potentially new JSScopeProperty. Return a pointer to the changed
* Change shape to have the given attrs, getter, and setter in scope, morphing
* it into a potentially new js::Shape. Return a pointer to the changed
* or identical property.
*/
extern JSScopeProperty *
extern const js::Shape *
js_ChangeNativePropertyAttrs(JSContext *cx, JSObject *obj,
JSScopeProperty *sprop, uintN attrs, uintN mask,
const js::Shape *shape, uintN attrs, uintN mask,
js::PropertyOp getter, js::PropertyOp setter);
extern JSBool
@ -1136,8 +1393,8 @@ js_DefineOwnProperty(JSContext *cx, JSObject *obj, jsid id,
const uintN JSDNP_CACHE_RESULT = 1; /* an interpreter call from JSOP_INITPROP */
const uintN JSDNP_DONT_PURGE = 2; /* suppress js_PurgeScopeChain */
const uintN JSDNP_SET_METHOD = 4; /* js_{DefineNativeProperty,SetPropertyHelper}
must pass the JSScopeProperty::METHOD
flag on to js_AddScopeProperty */
must pass the js::Shape::METHOD
flag on to JSObject::{add,put}Property */
const uintN JSDNP_UNQUALIFIED = 8; /* Unqualified property set. Only used in
the defineHow argument of
js_SetPropertyHelper. */
@ -1225,17 +1482,17 @@ const uintN JSGET_METHOD_BARRIER = 0; // get can leak joined function object
const uintN JSGET_NO_METHOD_BARRIER = 2; // call to joined function can't leak
/*
* NB: js_NativeGet and js_NativeSet are called with the scope containing sprop
* NB: js_NativeGet and js_NativeSet are called with the scope containing shape
* (pobj's scope for Get, obj's for Set) locked, and on successful return, that
* scope is again locked. But on failure, both functions return false with the
* scope containing sprop unlocked.
* scope containing shape unlocked.
*/
extern JSBool
js_NativeGet(JSContext *cx, JSObject *obj, JSObject *pobj,
JSScopeProperty *sprop, uintN getHow, js::Value *vp);
js_NativeGet(JSContext *cx, JSObject *obj, JSObject *pobj, const js::Shape *shape, uintN getHow,
js::Value *vp);
extern JSBool
js_NativeSet(JSContext *cx, JSObject *obj, JSScopeProperty *sprop, bool added,
js_NativeSet(JSContext *cx, JSObject *obj, const js::Shape *shape, bool added,
js::Value *vp);
extern JSBool
@ -1266,7 +1523,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow,
* that obj is locked and this function always unlocks obj on return.
*/
extern JSBool
js_SetNativeAttributes(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
js_SetNativeAttributes(JSContext *cx, JSObject *obj, js::Shape *shape,
uintN attrs);
namespace js {

Просмотреть файл

@ -41,18 +41,25 @@
#ifndef jsobjinlines_h___
#define jsobjinlines_h___
#include "jsbool.h"
#include <new>
#include "jsdate.h"
#include "jsfun.h"
#include "jsiter.h"
#include "jslock.h"
#include "jsobj.h"
#include "jspropertytree.h"
#include "jsscope.h"
#include "jsstaticcheck.h"
#include "jsxml.h"
#include "jsdtracef.h"
/* Headers included for inline implementations used by this header. */
#include "jsbool.h"
#include "jscntxt.h"
#include "jsnum.h"
#include "jsscopeinlines.h"
#include "jsstr.h"
inline void
JSObject::dropProperty(JSContext *cx, JSProperty *prop)
@ -62,23 +69,139 @@ JSObject::dropProperty(JSContext *cx, JSProperty *prop)
JS_UNLOCK_OBJ(cx, this);
}
inline void
JSObject::seal(JSContext *cx)
{
JS_ASSERT(!sealed());
if (isNative())
generateOwnShape(cx);
flags |= SEALED;
}
inline bool
JSObject::brand(JSContext *cx, uint32 slot, js::Value v)
{
JS_ASSERT(!generic());
JS_ASSERT(!branded());
JS_ASSERT(isNative());
generateOwnShape(cx);
if (js_IsPropertyCacheDisabled(cx)) // check for rt->shapeGen overflow
return false;
flags |= BRANDED;
return true;
}
inline bool
JSObject::unbrand(JSContext *cx)
{
JS_ASSERT(isNative());
if (!branded())
setGeneric();
return true;
}
/*
* Property read barrier for deferred cloning of compiler-created function
* objects optimized as typically non-escaping, ad-hoc methods in obj.
*/
inline bool
JSObject::methodReadBarrier(JSContext *cx, const js::Shape &shape, js::Value *vp)
{
JS_ASSERT(canHaveMethodBarrier());
JS_ASSERT(hasMethodBarrier());
JS_ASSERT(nativeContains(shape));
JS_ASSERT(shape.isMethod());
JS_ASSERT(&shape.methodObject() == &vp->toObject());
JSObject *funobj = &vp->toObject();
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
JS_ASSERT(fun == funobj && FUN_NULL_CLOSURE(fun));
funobj = CloneFunctionObject(cx, fun, funobj->getParent());
if (!funobj)
return false;
funobj->setMethodObj(*this);
vp->setObject(*funobj);
if (!js_SetPropertyHelper(cx, this, shape.id, 0, vp))
return false;
#ifdef DEBUG
if (cx->runtime->functionMeterFilename) {
JS_FUNCTION_METER(cx, mreadbarrier);
typedef JSRuntime::FunctionCountMap HM;
HM &h = cx->runtime->methodReadBarrierCountMap;
HM::AddPtr p = h.lookupForAdd(fun);
if (!p) {
h.add(p, fun, 1);
} else {
JS_ASSERT(p->key == fun);
++p->value;
}
}
#endif
return true;
}
static JS_ALWAYS_INLINE bool
ChangesMethodValue(const js::Value &prev, const js::Value &v)
{
JSObject *prevObj;
return prev.isObject() && (prevObj = &prev.toObject())->isFunction() &&
(!v.isObject() || &v.toObject() != prevObj);
}
inline bool
JSObject::methodWriteBarrier(JSContext *cx, const js::Shape &shape, const js::Value &v)
{
if (flags & (BRANDED | METHOD_BARRIER)) {
const js::Value &prev = lockedGetSlot(shape.slot);
if (ChangesMethodValue(prev, v)) {
JS_FUNCTION_METER(cx, mwritebarrier);
return methodShapeChange(cx, shape);
}
}
return true;
}
inline bool
JSObject::methodWriteBarrier(JSContext *cx, uint32 slot, const js::Value &v)
{
if (flags & (BRANDED | METHOD_BARRIER)) {
const js::Value &prev = lockedGetSlot(slot);
if (ChangesMethodValue(prev, v)) {
JS_FUNCTION_METER(cx, mwslotbarrier);
return methodShapeChange(cx, slot);
}
}
return true;
}
inline bool
JSObject::ensureClassReservedSlots(JSContext *cx)
{
return !nativeEmpty() || ensureClassReservedSlotsForEmptyObject(cx);
}
inline js::Value
JSObject::getSlotMT(JSContext *cx, uintN slot)
{
#ifdef JS_THREADSAFE
/*
* If thread-safe, define a getSlotMT() that bypasses, for a native
* object, the lock-free "fast path" test of
* (obj->scope()->ownercx == cx), to avoid needlessly switching from
* lock-free to lock-full scope when doing GC on a different context
* from the last one to own the scope. The caller in this case is
* probably a Class.mark function, e.g., fun_mark, or maybe a
* finalizer.
* If thread-safe, define a getSlotMT() that bypasses, for a native object,
* the lock-free "fast path" test of (obj->title.ownercx == cx), to avoid
* needlessly switching from lock-free to lock-full scope when doing GC on
* a different context from the last one to own the scope. The caller in
* this case is probably a JSClass.mark function, e.g., fun_mark, or maybe
* a finalizer.
*/
OBJ_CHECK_SLOT(this, slot);
return (scope()->title.ownercx == cx)
? this->lockedGetSlot(slot)
: js::Valueify(js_GetSlotThreadSafe(cx, this, slot));
return (title.ownercx == cx)
? this->lockedGetSlot(slot)
: js::Valueify(js_GetSlotThreadSafe(cx, this, slot));
#else
return this->lockedGetSlot(slot);
#endif
@ -90,7 +213,7 @@ JSObject::setSlotMT(JSContext *cx, uintN slot, const js::Value &value)
#ifdef JS_THREADSAFE
/* Thread-safe way to set a slot. */
OBJ_CHECK_SLOT(this, slot);
if (scope()->title.ownercx == cx)
if (title.ownercx == cx)
this->lockedSetSlot(slot, value);
else
js_SetSlotThreadSafe(cx, this, slot, js::Jsvalify(value));
@ -125,7 +248,7 @@ JSObject::getPrimitiveThis() const
return fslots[JSSLOT_PRIMITIVE_THIS];
}
inline void
inline void
JSObject::setPrimitiveThis(const js::Value &pthis)
{
JS_ASSERT(isPrimitive());
@ -145,7 +268,7 @@ JSObject::getArrayLength() const
return fslots[JSSLOT_ARRAY_LENGTH].toPrivateUint32();
}
inline void
inline void
JSObject::setArrayLength(uint32 length)
{
JS_ASSERT(isArray());
@ -208,7 +331,7 @@ JSObject::freeDenseArrayElements(JSContext *cx)
}
}
inline void
inline void
JSObject::voidDenseOnlyArraySlots()
{
JS_ASSERT(isDenseArray());
@ -249,42 +372,54 @@ JSObject::isArgsLengthOverridden() const
return v.toInt32() & ARGS_LENGTH_OVERRIDDEN_BIT;
}
inline const js::Value &
JSObject::getArgsCallee() const
inline js::ArgumentsData *
JSObject::getArgsData() const
{
JS_ASSERT(isArguments());
return fslots[JSSLOT_ARGS_CALLEE];
return (js::ArgumentsData *) fslots[JSSLOT_ARGS_DATA].toPrivate();
}
inline void
JSObject::setArgsCallee(const js::Value &callee)
inline void
JSObject::setArgsData(js::ArgumentsData *data)
{
JS_ASSERT(isArguments());
fslots[JSSLOT_ARGS_CALLEE] = callee;
fslots[JSSLOT_ARGS_DATA].setPrivate(data);
}
inline const js::Value &
JSObject::getArgsCallee() const
{
return getArgsData()->callee;
}
inline void
JSObject::setArgsCallee(const js::Value &callee)
{
getArgsData()->callee = callee;
}
inline const js::Value &
JSObject::getArgsElement(uint32 i) const
{
JS_ASSERT(isArguments());
JS_ASSERT(i < numSlots() - JS_INITIAL_NSLOTS);
return dslots[i];
JS_ASSERT(i < getArgsInitialLength());
return getArgsData()->slots[i];
}
inline js::Value *
JSObject::addressOfArgsElement(uint32 i) const
{
JS_ASSERT(isArguments());
JS_ASSERT(i < numSlots() - JS_INITIAL_NSLOTS);
return &dslots[i];
JS_ASSERT(i < getArgsInitialLength());
return &getArgsData()->slots[i];
}
inline void
JSObject::setArgsElement(uint32 i, const js::Value &v)
{
JS_ASSERT(isArguments());
JS_ASSERT(i < numSlots() - JS_INITIAL_NSLOTS);
dslots[i] = v;
JS_ASSERT(i < getArgsInitialLength());
getArgsData()->slots[i] = v;
}
inline const js::Value &
@ -301,6 +436,35 @@ JSObject::setDateUTCTime(const js::Value &time)
fslots[JSSLOT_DATE_UTC_TIME] = time;
}
inline JSFunction *
JSObject::getFunctionPrivate() const
{
JS_ASSERT(isFunction());
return reinterpret_cast<JSFunction *>(getPrivate());
}
inline js::Value *
JSObject::getFlatClosureUpvars() const
{
JS_ASSERT(isFunction());
JS_ASSERT(FUN_FLAT_CLOSURE(getFunctionPrivate()));
return (js::Value *) fslots[JSSLOT_FLAT_CLOSURE_UPVARS].toPrivate();
}
inline js::Value
JSObject::getFlatClosureUpvar(uint32 i) const
{
return getFlatClosureUpvars()[i];
}
inline void
JSObject::setFlatClosureUpvars(js::Value *upvars)
{
JS_ASSERT(isFunction());
JS_ASSERT(FUN_FLAT_CLOSURE(getFunctionPrivate()));
fslots[JSSLOT_FLAT_CLOSURE_UPVARS].setPrivate(upvars);
}
inline bool
JSObject::hasMethodObj(const JSObject& obj) const
{
@ -314,13 +478,6 @@ JSObject::setMethodObj(JSObject& obj)
fslots[JSSLOT_FUN_METHOD_OBJ].setObject(obj);
}
inline JSFunction *
JSObject::getFunctionPrivate() const
{
JS_ASSERT(isFunction());
return reinterpret_cast<JSFunction *>(getPrivate());
}
inline NativeIterator *
JSObject::getNativeIterator() const
{
@ -402,14 +559,67 @@ JSObject::setWithThis(JSObject *thisp)
}
inline void
JSObject::initSharingEmptyScope(js::Class *clasp, JSObject *proto, JSObject *parent,
const js::Value &privateSlotValue)
JSObject::init(js::Class *aclasp, JSObject *proto, JSObject *parent,
const js::Value &privateSlotValue, JSContext *cx)
{
init(clasp, proto, parent, privateSlotValue);
JS_STATIC_ASSERT(JSSLOT_PRIVATE + 3 == JS_INITIAL_NSLOTS);
JSEmptyScope *emptyScope = proto->scope()->emptyScope;
JS_ASSERT(emptyScope->clasp == clasp);
map = emptyScope->hold();
clasp = aclasp;
flags = 0;
freeslot = JSSLOT_START(aclasp);
#ifdef DEBUG
/*
* NB: objShape must not be set here; rather, the caller must call setMap
* or setSharedNonNativeMap after calling init. To defend this requirement
* we set map to null in DEBUG builds, and set objShape to a value we then
* assert obj->shape() never returns.
*/
map = NULL;
objShape = JSObjectMap::INVALID_SHAPE;
#endif
setProto(proto);
setParent(parent);
fslots[JSSLOT_PRIVATE] = privateSlotValue;
fslots[JSSLOT_PRIVATE + 1].setUndefined();
fslots[JSSLOT_PRIVATE + 2].setUndefined();
dslots = NULL;
#ifdef JS_THREADSAFE
js_InitTitle(cx, &title);
#endif
emptyShape = NULL;
}
inline void
JSObject::finish(JSContext *cx)
{
#ifdef DEBUG
if (isNative())
JS_LOCK_RUNTIME_VOID(cx->runtime, cx->runtime->liveObjectProps -= propertyCount());
#endif
if (hasSlotsArray())
freeSlotsArray(cx);
#ifdef JS_THREADSAFE
js_FinishTitle(cx, &title);
#endif
}
inline void
JSObject::initSharingEmptyShape(js::Class *aclasp,
JSObject *proto,
JSObject *parent,
const js::Value &privateSlotValue,
JSContext *cx)
{
init(aclasp, proto, parent, privateSlotValue, cx);
js::EmptyShape *empty = proto->emptyShape;
JS_ASSERT(empty->getClass() == aclasp);
setMap(empty);
}
inline void
@ -421,24 +631,31 @@ JSObject::freeSlotsArray(JSContext *cx)
}
inline bool
JSObject::unbrand(JSContext *cx)
JSObject::hasProperty(JSContext *cx, jsid id, bool *foundp, uintN flags)
{
if (this->isNative()) {
JS_LOCK_OBJ(cx, this);
JSScope *scope = this->scope();
if (scope->isSharedEmpty()) {
scope = js_GetMutableScope(cx, this);
if (!scope) {
JS_UNLOCK_OBJ(cx, this);
return false;
}
}
scope->unbrand(cx);
JS_UNLOCK_SCOPE(cx, scope);
}
JSObject *pobj;
JSProperty *prop;
JSAutoResolveFlags rf(cx, flags);
if (!lookupProperty(cx, id, &pobj, &prop))
return false;
*foundp = !!prop;
if (prop)
pobj->dropProperty(cx, prop);
return true;
}
inline bool
JSObject::isCallable()
{
return isFunction() || getClass()->call;
}
static inline bool
js_IsCallable(const js::Value &v)
{
return v.isObject() && v.toObject().isCallable();
}
namespace js {
class AutoPropDescArrayRooter : private AutoGCRooter
@ -475,7 +692,9 @@ class AutoPropertyDescriptorRooter : private AutoGCRooter, public PropertyDescri
value.setUndefined();
}
AutoPropertyDescriptorRooter(JSContext *cx, PropertyDescriptor *desc) : AutoGCRooter(cx, DESCRIPTOR) {
AutoPropertyDescriptorRooter(JSContext *cx, PropertyDescriptor *desc)
: AutoGCRooter(cx, DESCRIPTOR)
{
obj = desc->obj;
attrs = desc->attrs;
getter = desc->getter;
@ -492,40 +711,34 @@ InitScopeForObject(JSContext* cx, JSObject* obj, js::Class *clasp, JSObject* pro
JS_ASSERT(clasp->isNative());
JS_ASSERT(proto == obj->getProto());
/* Share proto's emptyScope only if obj is similar to proto. */
JSScope *scope = NULL;
/* Share proto's emptyShape only if obj is similar to proto. */
js::EmptyShape *empty = NULL;
if (proto && proto->isNative()) {
if (proto) {
JS_LOCK_OBJ(cx, proto);
scope = proto->scope();
if (scope->canProvideEmptyScope(clasp)) {
JSScope *emptyScope = scope->getEmptyScope(cx, clasp);
JS_UNLOCK_SCOPE(cx, scope);
if (!emptyScope)
if (proto->canProvideEmptyShape(clasp)) {
empty = proto->getEmptyShape(cx, clasp);
JS_UNLOCK_OBJ(cx, proto);
if (!empty)
goto bad;
scope = emptyScope;
} else {
JS_UNLOCK_SCOPE(cx, scope);
scope = NULL;
JS_UNLOCK_OBJ(cx, proto);
}
}
if (!scope) {
scope = JSScope::create(cx, clasp, obj, js_GenerateShape(cx, false));
if (!scope)
goto bad;
if (!empty) {
uint32 freeslot = JSSLOT_FREE(clasp);
JS_ASSERT(freeslot >= scope->freeslot);
JS_ASSERT(freeslot >= JSSLOT_PRIVATE);
empty = js::EmptyShape::create(cx, clasp);
if (!empty)
goto bad;
if (freeslot > JS_INITIAL_NSLOTS && !obj->allocSlots(cx, freeslot))
goto bad;
scope->freeslot = freeslot;
#ifdef DEBUG
if (freeslot < obj->numSlots())
obj->setSlot(freeslot, UndefinedValue());
#endif
obj->freeslot = freeslot;
}
obj->map = scope;
obj->setMap(empty);
return true;
bad:
@ -560,19 +773,17 @@ NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto, JSObject *p
* Default parent to the parent of the prototype, which was set from
* the parent of the prototype's constructor.
*/
obj->init(clasp, proto, parent, JSObject::defaultPrivate(clasp));
obj->init(clasp, proto, parent, JSObject::defaultPrivate(clasp), cx);
JS_LOCK_OBJ(cx, proto);
JSScope *scope = proto->scope();
JS_ASSERT(scope->canProvideEmptyScope(clasp));
scope = scope->getEmptyScope(cx, clasp);
JS_ASSERT(proto->canProvideEmptyShape(clasp));
js::EmptyShape *empty = proto->getEmptyShape(cx, clasp);
JS_UNLOCK_OBJ(cx, proto);
if (!scope) {
if (empty)
obj->setMap(empty);
else
obj = NULL;
} else {
obj->map = scope;
}
}
objectCreationScope.handleCreation(obj);
@ -691,10 +902,7 @@ NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent)
*
* The should be specialized by the template.
*/
JSObject* obj = isFunction
? (JSObject *)js_NewGCFunction(cx)
: js_NewGCObject(cx);
JSObject* obj = isFunction ? js_NewGCFunction(cx) : js_NewGCObject(cx);
if (!obj)
goto out;
@ -702,10 +910,10 @@ NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent)
* Default parent to the parent of the prototype, which was set from
* the parent of the prototype's constructor.
*/
obj->init(clasp,
proto,
obj->init(clasp, proto,
(!parent && proto) ? proto->getParent() : parent,
JSObject::defaultPrivate(clasp));
JSObject::defaultPrivate(clasp),
cx);
if (clasp->isNative()) {
if (!InitScopeForObject(cx, obj, clasp, proto)) {
@ -713,7 +921,7 @@ NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent)
goto out;
}
} else {
obj->map = const_cast<JSObjectMap *>(&JSObjectMap::sharedNonNative);
obj->setSharedNonNativeMap();
}
out:

Просмотреть файл

@ -297,15 +297,18 @@ ToDisassemblySource(JSContext *cx, jsval v)
if (clasp == &js_BlockClass) {
char *source = JS_sprintf_append(NULL, "depth %d {", OBJ_BLOCK_DEPTH(cx, obj));
for (JSScopeProperty *sprop = obj->scope()->lastProperty();
sprop;
sprop = sprop->parent) {
const char *bytes = js_AtomToPrintableString(cx, JSID_TO_ATOM(sprop->id));
Shape::Range r = obj->lastProperty()->all();
while (!r.empty()) {
const Shape &shape = r.front();
const char *bytes = js_AtomToPrintableString(cx, JSID_TO_ATOM(shape.id));
if (!bytes)
return NULL;
r.popFront();
source = JS_sprintf_append(source, "%s: %d%s",
bytes, sprop->shortid,
sprop->parent ? ", " : "");
bytes, shape.shortid,
!r.empty() ? ", " : "");
}
source = JS_sprintf_append(source, "}");
@ -784,7 +787,7 @@ js_NewPrinter(JSContext *cx, const char *name, JSFunction *fun,
jp->fun = fun;
jp->localNames = NULL;
if (fun && FUN_INTERPRETED(fun) && fun->hasLocalNames()) {
jp->localNames = js_GetLocalNameArray(cx, fun, &jp->pool);
jp->localNames = fun->getLocalNameArray(cx, &jp->pool);
if (!jp->localNames) {
js_DestroyPrinter(jp);
return NULL;
@ -1303,19 +1306,9 @@ GetArgOrVarAtom(JSPrinter *jp, uintN slot)
const char *
GetLocal(SprintStack *ss, jsint i)
{
ptrdiff_t off;
JSContext *cx;
JSScript *script;
jsatomid j, n;
JSAtom *atom;
JSObject *obj;
jsint depth, count;
JSScopeProperty *sprop;
const char *rval;
#define LOCAL_ASSERT(expr) LOCAL_ASSERT_RV(expr, "")
off = ss->offsets[i];
ptrdiff_t off = ss->offsets[i];
if (off >= 0)
return OFF2STR(&ss->sprinter, off);
@ -1329,35 +1322,41 @@ GetLocal(SprintStack *ss, jsint i)
* none of the script's object literals are blocks), or the stack slot i is
* not in a block. In either case, return GetStr(ss, i).
*/
cx = ss->sprinter.context;
script = ss->printer->script;
JSScript *script = ss->printer->script;
if (script->objectsOffset == 0)
return GetStr(ss, i);
for (j = 0, n = script->objects()->length; ; j++) {
if (j == n)
return GetStr(ss, i);
obj = script->getObject(j);
if (obj->getClass() == &js_BlockClass) {
depth = OBJ_BLOCK_DEPTH(cx, obj);
count = OBJ_BLOCK_COUNT(cx, obj);
if ((jsuint)(i - depth) < (jsuint)count)
for (jsatomid j = 0, n = script->objects()->length; j != n; j++) {
JSObject *obj = script->getObject(j);
if (obj->isBlock()) {
jsint depth = OBJ_BLOCK_DEPTH(cx, obj);
jsint count = OBJ_BLOCK_COUNT(cx, obj);
if (jsuint(i - depth) < jsuint(count)) {
jsint slot = i - depth;
for (Shape::Range r(obj->lastProperty()); !r.empty(); r.popFront()) {
const Shape &shape = r.front();
if (shape.shortid == slot) {
LOCAL_ASSERT(JSID_IS_ATOM(shape.id));
JSAtom *atom = JSID_TO_ATOM(shape.id);
const char *rval = QuoteString(&ss->sprinter, ATOM_TO_STRING(atom), 0);
if (!rval)
return NULL;
RETRACT(&ss->sprinter, rval);
return rval;
}
}
break;
}
}
}
i -= depth;
for (sprop = obj->scope()->lastProperty(); sprop; sprop = sprop->parent) {
if (sprop->shortid == i)
break;
}
LOCAL_ASSERT(sprop && JSID_IS_ATOM(sprop->id));
atom = JSID_TO_ATOM(sprop->id);
rval = QuoteString(&ss->sprinter, ATOM_TO_STRING(atom), 0);
if (!rval)
return NULL;
RETRACT(&ss->sprinter, rval);
return rval;
return GetStr(ss, i);
#undef LOCAL_ASSERT
}
@ -2650,7 +2649,6 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop)
case JSOP_ENTERBLOCK:
{
JSAtom **atomv, *smallv[5];
JSScopeProperty *sprop;
LOAD_OBJECT(0);
argc = OBJ_BLOCK_COUNT(cx, obj);
@ -2665,12 +2663,13 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop)
MUST_FLOW_THROUGH("enterblock_out");
#define LOCAL_ASSERT_OUT(expr) LOCAL_ASSERT_CUSTOM(expr, ok = JS_FALSE; \
goto enterblock_out)
for (sprop = obj->scope()->lastProperty(); sprop;
sprop = sprop->parent) {
if (!sprop->hasShortID())
for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
const Shape &shape = r.front();
if (!shape.hasShortID())
continue;
LOCAL_ASSERT_OUT(sprop->shortid < argc);
atomv[sprop->shortid] = JSID_TO_ATOM(sprop->id);
LOCAL_ASSERT_OUT(shape.shortid < argc);
atomv[shape.shortid] = JSID_TO_ATOM(shape.id);
}
ok = JS_TRUE;
for (i = 0; i < argc; i++) {
@ -2843,8 +2842,8 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop)
case JSOP_CALLUPVAR:
case JSOP_GETUPVAR_DBG:
case JSOP_CALLUPVAR_DBG:
case JSOP_GETDSLOT:
case JSOP_CALLDSLOT:
case JSOP_GETFCSLOT:
case JSOP_CALLFCSLOT:
{
if (!jp->fun) {
JS_ASSERT(jp->script->savedCallerFun);
@ -2852,7 +2851,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop)
}
if (!jp->localNames)
jp->localNames = js_GetLocalNameArray(cx, jp->fun, &jp->pool);
jp->localNames = jp->fun->getLocalNameArray(cx, &jp->pool);
uintN index = GET_UINT16(pc);
if (index < jp->fun->u.i.nupvars) {
@ -4031,7 +4030,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop)
if (!fun->hasLocalNames()) {
innerLocalNames = NULL;
} else {
innerLocalNames = js_GetLocalNameArray(cx, fun, &cx->tempPool);
innerLocalNames = fun->getLocalNameArray(cx, &cx->tempPool);
if (!innerLocalNames)
return NULL;
}

Просмотреть файл

@ -351,12 +351,13 @@ OPDEF(JSOP_TRY, 132,"try", NULL, 1, 0, 0, 0, JOF_BYTE)
OPDEF(JSOP_FINALLY, 133,"finally", NULL, 1, 0, 2, 0, JOF_BYTE)
/*
* Get a dynamic slot from an object known to have at least one greater than
* the slot index number of values at obj->dslots. The CALL variant computes
* the callee and this-object in preparation for a JSOP_CALL.
* Get a slot from a flat closure function object that contains a snapshot of
* the closure-invariant upvar values. The immediate operand indexes the upvar
* in the function's u.i.script->upvars() array. The CALL variant computes the
* callee and this-object in preparation for a JSOP_CALL.
*/
OPDEF(JSOP_GETDSLOT, 134,"getdslot", NULL, 3, 0, 1, 19, JOF_UINT16|JOF_NAME)
OPDEF(JSOP_CALLDSLOT, 135,"calldslot", NULL, 3, 0, 2, 19, JOF_UINT16|JOF_NAME|JOF_CALLOP)
OPDEF(JSOP_GETFCSLOT, 134,"getfcslot", NULL, 3, 0, 1, 19, JOF_UINT16|JOF_NAME)
OPDEF(JSOP_CALLFCSLOT, 135,"callfcslot", NULL, 3, 0, 2, 19, JOF_UINT16|JOF_NAME|JOF_CALLOP)
/*
* Bytecodes that avoid making an arguments object in most cases:

Просмотреть файл

@ -1209,8 +1209,8 @@ CheckStrictFormals(JSContext *cx, JSTreeContext *tc, JSFunction *fun,
if (tc->flags & (TCF_FUN_PARAM_ARGUMENTS | TCF_FUN_PARAM_EVAL)) {
JSAtomState *atoms = &cx->runtime->atomState;
atom = (tc->flags & TCF_FUN_PARAM_ARGUMENTS
? atoms->argumentsAtom : atoms->evalAtom);
atom = (tc->flags & TCF_FUN_PARAM_ARGUMENTS) ? atoms->argumentsAtom : atoms->evalAtom;
/* The definition's source position will be more precise. */
JSDefinition *dn = ALE_DEFN(tc->decls.lookup(atom));
JS_ASSERT(dn->pn_atom == atom);
@ -1546,7 +1546,11 @@ Compiler::compileFunctionBody(JSContext *cx, JSFunction *fun, JSPrincipals *prin
uintN nargs = fun->nargs;
if (nargs) {
jsuword *names = js_GetLocalNameArray(cx, fun, &cx->tempPool);
/*
* NB: do not use AutoLocalNameArray because it will release space
* allocated from cx->tempPool by DefineArg.
*/
jsuword *names = fun->getLocalNameArray(cx, &cx->tempPool);
if (!names) {
fn = NULL;
} else {
@ -1643,7 +1647,7 @@ BindLocalVariable(JSContext *cx, JSFunction *fun, JSAtom *atom,
if (atom == cx->runtime->atomState.argumentsAtom && !isArg)
return JS_TRUE;
return js_AddLocal(cx, fun, atom, localKind);
return fun->addLocal(cx, atom, localKind);
}
#if JS_HAS_DESTRUCTURING
@ -1661,7 +1665,7 @@ BindDestructuringArg(JSContext *cx, BindData *data, JSAtom *atom,
JS_ASSERT(tc->inFunction());
JSLocalKind localKind = js_LookupLocal(cx, tc->fun, atom, NULL);
JSLocalKind localKind = tc->fun->lookupLocal(cx, atom, NULL);
if (localKind != JSLOCAL_NONE) {
ReportCompileErrorNumber(cx, TS(tc->parser), NULL, JSREPORT_ERROR,
JSMSG_DESTRUCT_DUP_ARG);
@ -2149,7 +2153,7 @@ Parser::setFunctionKinds(JSFunctionBox *funbox, uint32& tcflags)
* also classifies enclosing functions holding upvars referenced in
* those descendants' bodies. So now we can check our "methods".
*
* Despecialize from branded method-identity-based shape to sprop-
* Despecialize from branded method-identity-based shape to shape-
* or slot-based shape if this function smells like a constructor
* and too many of its methods are *not* joinable null closures
* (i.e., they have one or more upvars fetched via the display).
@ -2325,9 +2329,9 @@ Parser::setFunctionKinds(JSFunctionBox *funbox, uint32& tcflags)
if (FUN_KIND(fun) == JSFUN_INTERPRETED && pn->pn_type == TOK_UPVARS) {
/*
* One or more upvars cannot be safely snapshot into a flat
* closure's dslot (see JSOP_GETDSLOT), so we loop again over
* all upvars, and for each non-free upvar, ensure that its
* containing function has been flagged as heavyweight.
* closure's non-reserved slot (see JSOP_GETFCSLOT), so we loop
* again over all upvars, and for each non-free upvar, ensure that
* its containing function has been flagged as heavyweight.
*
* The emitter must see TCF_FUN_HEAVYWEIGHT accurately before
* generating any code for a tree of nested functions.
@ -2569,6 +2573,7 @@ Parser::functionArguments(JSTreeContext &funtc, JSFunctionBox *funbox, JSFunctio
bool destructuringArg = false;
JSParseNode *list = NULL;
#endif
do {
switch (TokenKind tt = tokenStream.getToken()) {
#if JS_HAS_DESTRUCTURING
@ -2599,7 +2604,7 @@ Parser::functionArguments(JSTreeContext &funtc, JSFunctionBox *funbox, JSFunctio
* parameter that is to be destructured.
*/
jsint slot = fun->nargs;
if (!js_AddLocal(context, fun, NULL, JSLOCAL_ARG))
if (!fun->addLocal(context, NULL, JSLOCAL_ARG))
return false;
/*
@ -2615,7 +2620,8 @@ Parser::functionArguments(JSTreeContext &funtc, JSFunctionBox *funbox, JSFunctio
rhs->pn_cookie.set(funtc.staticLevel, uint16(slot));
rhs->pn_dflags |= PND_BOUND;
JSParseNode *item = JSParseNode::newBinaryOrAppend(TOK_ASSIGN, JSOP_NOP, lhs, rhs, &funtc);
JSParseNode *item =
JSParseNode::newBinaryOrAppend(TOK_ASSIGN, JSOP_NOP, lhs, rhs, &funtc);
if (!item)
return false;
if (!list) {
@ -2645,15 +2651,15 @@ Parser::functionArguments(JSTreeContext &funtc, JSFunctionBox *funbox, JSFunctio
*
* Duplicates are warned about (strict option) or cause errors (strict
* mode code), but we do those tests in one place below, after having
* parsed the body.
* parsed the body in case it begins with a "use strict"; directive.
*/
if (js_LookupLocal(context, fun, atom, NULL) != JSLOCAL_NONE) {
if (fun->lookupLocal(context, atom, NULL) != JSLOCAL_NONE) {
duplicatedArg = atom;
if (destructuringArg)
goto report_dup_and_destructuring;
}
#endif
if (!js_AddLocal(context, fun, atom, JSLOCAL_ARG))
if (!fun->addLocal(context, atom, JSLOCAL_ARG))
return false;
break;
}
@ -2788,12 +2794,12 @@ Parser::functionDef(JSAtom *funAtom, FunctionType type, uintN lambda)
* we add a variable even if a parameter with the given name
* already exists.
*/
localKind = js_LookupLocal(context, tc->fun, funAtom, &index);
localKind = tc->fun->lookupLocal(context, funAtom, &index);
switch (localKind) {
case JSLOCAL_NONE:
case JSLOCAL_ARG:
index = tc->fun->u.i.nvars;
if (!js_AddLocal(context, tc->fun, funAtom, JSLOCAL_VAR))
if (!tc->fun->addLocal(context, funAtom, JSLOCAL_VAR))
return NULL;
/* FALL THROUGH */
@ -3243,7 +3249,7 @@ BindLet(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
uintN slot = JSSLOT_FREE(&js_BlockClass) + n;
if (slot >= blockObj->numSlots() && !blockObj->growSlots(cx, slot + 1))
return false;
blockObj->scope()->freeslot = slot + 1;
blockObj->freeslot = slot + 1;
blockObj->setSlot(slot, PrivateValue(pn));
return true;
}
@ -3255,11 +3261,10 @@ PopStatement(JSTreeContext *tc)
if (stmt->flags & SIF_SCOPE) {
JSObject *obj = stmt->blockObj;
JSScope *scope = obj->scope();
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj));
for (JSScopeProperty *sprop = scope->lastProperty(); sprop; sprop = sprop->parent) {
JSAtom *atom = JSID_TO_ATOM(sprop->id);
for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
JSAtom *atom = JSID_TO_ATOM(r.front().id);
/* Beware the empty destructuring dummy. */
if (atom == tc->parser->context->runtime->atomState.emptyAtom)
@ -3489,7 +3494,7 @@ BindVarOrConst(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
return JS_TRUE;
}
JSLocalKind localKind = js_LookupLocal(cx, tc->fun, atom, NULL);
JSLocalKind localKind = tc->fun->lookupLocal(cx, atom, NULL);
if (localKind == JSLOCAL_NONE) {
/*
* Property not found in current variable scope: we have not seen this
@ -3965,7 +3970,7 @@ CheckDestructuring(JSContext *cx, BindData *data,
JSPROP_ENUMERATE |
JSPROP_PERMANENT |
JSPROP_SHARED,
JSScopeProperty::HAS_SHORTID, 0, NULL);
Shape::HAS_SHORTID, 0, NULL);
if (!ok)
goto out;
}
@ -6039,9 +6044,8 @@ JSParseNode *
Parser::bitXorExpr()
{
JSParseNode *pn = bitAndExpr();
while (pn && tokenStream.matchToken(TOK_BITXOR)) {
while (pn && tokenStream.matchToken(TOK_BITXOR))
pn = JSParseNode::newBinaryOrAppend(TOK_BITXOR, JSOP_BITXOR, pn, bitAndExpr(), tc);
}
return pn;
}

Просмотреть файл

@ -901,9 +901,9 @@ struct JSFunctionBox : public JSObjectBox
* be joined to one compiler-created null closure shared among N different
* closure environments.
*
* We despecialize from caching function objects, caching slots or sprops
* We despecialize from caching function objects, caching slots or shapes
* instead, because an unbranded object may still have joined methods (for
* which sprop->isMethod), since PropertyCache::fill gives precedence to
* which shape->isMethod), since PropertyCache::fill gives precedence to
* joined methods over branded methods.
*/
bool shouldUnbrand(uintN methods, uintN slowMethods) const;

Просмотреть файл

@ -39,8 +39,10 @@
* ***** END LICENSE BLOCK ***** */
#include "jspropertycache.h"
#include "jspropertycacheinlines.h"
#include "jscntxt.h"
#include "jsnum.h"
#include "jsobjinlines.h"
#include "jspropertycacheinlines.h"
using namespace js;
@ -48,10 +50,9 @@ JS_STATIC_ASSERT(sizeof(PCVal) == sizeof(jsuword));
JS_REQUIRES_STACK PropertyCacheEntry *
PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoIndex,
JSObject *pobj, JSScopeProperty *sprop, JSBool adding)
JSObject *pobj, const Shape *shape, JSBool adding)
{
jsbytecode *pc;
JSScope *scope;
jsuword kshape, vshape;
JSOp op;
const JSCodeSpec *cs;
@ -67,11 +68,10 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
}
/*
* Check for fill from js_SetPropertyHelper where the setter removed sprop
* from pobj's scope (via unwatch or delete, e.g.).
* Check for fill from js_SetPropertyHelper where the setter removed shape
* from pobj (via unwatch or delete, e.g.).
*/
scope = pobj->scope();
if (!scope->hasProperty(sprop)) {
if (!pobj->nativeContains(*shape)) {
PCMETER(oddfills++);
return JS_NO_PROP_CACHE_FILL;
}
@ -81,7 +81,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
* and setter hooks can change the prototype chain using JS_SetPrototype
* after js_LookupPropertyWithFlags has returned the nominal protoIndex,
* we have to validate protoIndex if it is non-zero. If it is zero, then
* we know thanks to the scope->hasProperty test above, combined with the
* we know thanks to the pobj->nativeContains test above, combined with the
* fact that obj == pobj, that protoIndex is invariant.
*
* The scopeIndex can't be wrong. We require JS_SetParent calls to happen
@ -138,37 +138,38 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
* getter, so get of a function is idempotent.
*/
if (cs->format & JOF_CALLOP) {
if (sprop->isMethod()) {
if (shape->isMethod()) {
/*
* A compiler-created function object, AKA a method, already
* memoized in the property tree.
*/
JS_ASSERT(scope->hasMethodBarrier());
JSObject &funobj = sprop->methodObject();
JS_ASSERT(&funobj == &pobj->lockedGetSlot(sprop->slot).toObject());
JS_ASSERT(pobj->hasMethodBarrier());
JSObject &funobj = shape->methodObject();
JS_ASSERT(&funobj == &pobj->lockedGetSlot(shape->slot).toObject());
vword.setFunObj(funobj);
break;
}
if (!scope->generic() &&
sprop->hasDefaultGetter() &&
SPROP_HAS_VALID_SLOT(sprop, scope)) {
const Value &v = pobj->lockedGetSlot(sprop->slot);
if (!pobj->generic() &&
shape->hasDefaultGetter() &&
pobj->containsSlot(shape->slot)) {
const Value &v = pobj->lockedGetSlot(shape->slot);
JSObject *funobj;
if (IsFunctionObject(v, &funobj)) {
/*
* Great, we have a function-valued prototype property
* where the getter is JS_PropertyStub. The type id in
* pobj's scope does not evolve with changes to property
* values, however.
* pobj does not evolve with changes to property values,
* however.
*
* So here, on first cache fill for this method, we brand
* the scope with a new shape and set the JSScope::BRANDED
* flag. Once this flag is set, any property assignment
* that changes the value from or to a different function
* object will result in shape being regenerated.
* obj with a new shape and set the JSObject::BRANDED flag.
* Once this flag is set, any property assignment that
* changes the value from or to a different function object
* will result in shape being regenerated.
*/
if (!scope->branded()) {
if (!pobj->branded()) {
PCMETER(brandfills++);
#ifdef DEBUG_notme
fprintf(stderr,
@ -178,7 +179,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
JS_GetFunctionName(GET_FUNCTION_PRIVATE(cx, JSVAL_TO_OBJECT(v))),
obj->shape());
#endif
if (!scope->brand(cx, sprop->slot, v))
if (!pobj->brand(cx, shape->slot, v))
return JS_NO_PROP_CACHE_FILL;
}
vword.setFunObj(*funobj);
@ -192,24 +193,23 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
* with stub getters and setters, we can cache the slot.
*/
if (!(cs->format & (JOF_SET | JOF_FOR)) &&
(!(cs->format & JOF_INCDEC) || sprop->hasDefaultSetter()) &&
sprop->hasDefaultGetter() &&
SPROP_HAS_VALID_SLOT(sprop, scope)) {
/* Great, let's cache sprop's slot and use it on cache hit. */
vword.setSlot(sprop->slot);
(!(cs->format & JOF_INCDEC) || shape->hasDefaultSetter()) &&
shape->hasDefaultGetter() &&
pobj->containsSlot(shape->slot)) {
/* Great, let's cache shape's slot and use it on cache hit. */
vword.setSlot(shape->slot);
} else {
/* Best we can do is to cache sprop (still a nice speedup). */
vword.setSprop(sprop);
/* Best we can do is to cache shape (still a nice speedup). */
vword.setShape(shape);
if (adding &&
sprop == scope->lastProperty() &&
scope->shape == sprop->shape) {
pobj->shape() == shape->shape) {
/*
* Our caller added a new property. We also know that a setter
* that js_NativeSet could have run has not mutated the scope,
* so the added property is still the last one added, and the
* scope is not branded.
* that js_NativeSet might have run has not mutated pobj, so
* the added property is still the last one added, and pobj is
* not branded.
*
* We want to cache under scope's shape before the property
* We want to cache under pobj's shape before the property
* addition to bias for the case when the mutator opcode
* always adds the same property. This allows us to optimize
* periodic execution of object initializers or other explicit
@ -230,26 +230,10 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
* that on the third and subsequent iterations the cache will
* be hit because the shape is no longer updated.
*/
JS_ASSERT(!scope->isSharedEmpty());
if (sprop->parent) {
kshape = sprop->parent->shape;
} else {
/*
* If obj had its own empty scope before, with a unique
* shape, that is lost. Here we only attempt to find a
* matching empty scope. In unusual cases involving
* __proto__ assignment we may not find one.
*/
JSObject *proto = obj->getProto();
if (!proto || !proto->isNative())
return JS_NO_PROP_CACHE_FILL;
JSScope *protoscope = proto->scope();
if (!protoscope->emptyScope ||
protoscope->emptyScope->clasp != obj->getClass()) {
return JS_NO_PROP_CACHE_FILL;
}
kshape = protoscope->emptyScope->shape;
}
JS_ASSERT(shape == pobj->lastProperty());
JS_ASSERT(!pobj->nativeEmpty());
kshape = shape->previous()->shape;
/*
* When adding we predict no prototype object will later gain a
@ -262,7 +246,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
if (kshape == 0) {
kshape = obj->shape();
vshape = scope->shape;
vshape = pobj->shape();
}
JS_ASSERT(kshape < SHAPE_OVERFLOW_BIT);
@ -281,11 +265,10 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
* Make sure that a later shadowing assignment will enter
* PurgeProtoChain and invalidate this entry, bug 479198.
*
* This is thread-safe even though obj is not locked. Only the
* DELEGATE bit of obj->classword can change at runtime, given that
* obj is native; and the bit is only set, never cleared. And on
* platforms where another CPU can fail to see this write, it's OK
* because the property cache and JIT cache are thread-local.
* This is not thread-safe but we are about to make all objects
* except multi-threaded wrappers (bug 566951) single-threaded.
* And multi-threaded wrappers are non-native Proxy instances, so
* they won't use the property cache.
*/
obj->setDelegate();
}
@ -363,7 +346,7 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject
return atom;
}
if (entry->kshape != obj->map->shape) {
if (entry->kshape != obj->shape()) {
PCMETER(kshapemisses++);
return GetAtomFromBytecode(cx, pc, op, cs);
}
@ -401,8 +384,7 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject
jsid id = ATOM_TO_JSID(atom);
id = js_CheckForStringIndex(id);
JS_ASSERT(pobj->scope()->lookup(id));
JS_ASSERT_IF(pobj->scope()->object, pobj->scope()->object == pobj);
JS_ASSERT(pobj->nativeContains(id));
#endif
*pobjp = pobj;
return NULL;

Просмотреть файл

@ -71,7 +71,7 @@ const uint32 SHAPE_OVERFLOW_BIT = JS_BIT(32 - PCVCAP_TAGBITS);
/*
* Property cache value. This is simply a tagged union:
* PCVal = (JSObject * | uint32 | JSScopeProperty *).
* PCVal = (JSObject * | uint32 | js::Shape *).
* It is the type of PropertyCacheEntry::vword and combines with the tag bits
* of PropertyCacheEntry::vcap to tell how to get or set the property, once a
* property cache hit is validated.
@ -84,7 +84,7 @@ class PCVal
enum {
OBJECT = 0,
SLOT = 1,
SPROP = 2,
SHAPE = 2,
TAG = 3
};
@ -95,16 +95,27 @@ class PCVal
void setNull() { v = 0; }
bool isFunObj() const { return (v & TAG) == OBJECT; }
JSObject &toFunObj() const { JS_ASSERT(isFunObj()); return *reinterpret_cast<JSObject *>(v); }
void setFunObj(JSObject &obj) { v = reinterpret_cast<jsuword>(&obj); }
JSObject &toFunObj() const {
JS_ASSERT(isFunObj());
return *reinterpret_cast<JSObject *>(v);
}
void setFunObj(JSObject &obj) {
v = reinterpret_cast<jsuword>(&obj);
}
bool isSlot() const { return v & SLOT; }
uint32 toSlot() const { JS_ASSERT(isSlot()); return uint32(v) >> 1; }
void setSlot(uint32 slot) { v = (jsuword(slot) << 1) | SLOT; }
bool isSprop() const { return (v & TAG) == SPROP; }
JSScopeProperty *toSprop() const { JS_ASSERT(isSprop()); return reinterpret_cast<JSScopeProperty *>(v & ~TAG); }
void setSprop(JSScopeProperty *sprop) { JS_ASSERT(sprop); v = reinterpret_cast<jsuword>(sprop) | SPROP; }
bool isShape() const { return (v & TAG) == SHAPE; }
const js::Shape *toShape() const {
JS_ASSERT(isShape());
return reinterpret_cast<js::Shape *>(v & ~TAG);
}
void setShape(const js::Shape *shape) {
JS_ASSERT(shape);
v = reinterpret_cast<jsuword>(shape) | SHAPE;
}
};
struct PropertyCacheEntry
@ -197,7 +208,7 @@ class PropertyCache
/*
* Add kshape rather than xor it to avoid collisions between nearby bytecode
* that are evolving an object by setting successive properties, incrementing
* the object's scope->shape on each set.
* the object's shape on each set.
*/
static inline jsuword
hash(jsbytecode *pc, jsuword kshape)
@ -239,18 +250,18 @@ class PropertyCache
/*
* Test for cached information about creating a new own data property on obj at pc.
*
* On a hit, set *spropp to an sprop from the property tree describing the
* On a hit, set *shapep to an shape from the property tree describing the
* new property as well as all existing properties on obj and return
* true. Otherwise return false.
*
* Hit or miss, *entryp receives a pointer to the property cache entry.
*/
JS_ALWAYS_INLINE bool testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj, JSScope *scope,
JSScopeProperty **spropp, PropertyCacheEntry **entryp);
JS_ALWAYS_INLINE bool testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj,
const js::Shape **shapep, PropertyCacheEntry **entryp);
/*
* Fill property cache entry for key cx->fp->pc, optimized value word
* computed from obj and sprop, and entry capability forged from 24-bit
* computed from obj and shape, and entry capability forged from 24-bit
* obj->shape(), 4-bit scopeIndex, and 4-bit protoIndex.
*
* Return the filled cache entry or JS_NO_PROP_CACHE_FILL if caching was
@ -258,7 +269,7 @@ class PropertyCache
*/
JS_REQUIRES_STACK PropertyCacheEntry *fill(JSContext *cx, JSObject *obj, uintN scopeIndex,
uintN protoIndex, JSObject *pobj,
JSScopeProperty *sprop, JSBool adding = false);
const js::Shape *shape, JSBool adding = false);
void purge(JSContext *cx);
void purgeForScript(JSScript *script);

Просмотреть файл

@ -42,6 +42,7 @@
#ifndef jspropertycacheinlines_h___
#define jspropertycacheinlines_h___
#include "jslock.h"
#include "jspropertycache.h"
#include "jsscope.h"
@ -66,7 +67,7 @@ PropertyCache::matchShape(JSContext *cx, JSObject *obj, uint32 shape)
*
* We must lock pobj on a hit in order to close races with threads that might
* be deleting a property from its scope, or otherwise invalidating property
* caches (on all threads) by re-generating scope->shape.
* caches (on all threads) by re-generating JSObject::shape().
*/
JS_ALWAYS_INLINE void
PropertyCache::test(JSContext *cx, jsbytecode *pc, JSObject *&obj,
@ -74,7 +75,7 @@ PropertyCache::test(JSContext *cx, jsbytecode *pc, JSObject *&obj,
{
JS_ASSERT(this == &JS_PROPERTY_CACHE(cx));
uint32 kshape = obj->map->shape;
uint32 kshape = obj->shape();
entry = &table[hash(pc, kshape)];
PCMETER(pctestentry = entry);
PCMETER(tests++);
@ -104,14 +105,14 @@ JS_ALWAYS_INLINE bool
PropertyCache::testForSet(JSContext *cx, jsbytecode *pc, JSObject *obj,
PropertyCacheEntry **entryp, JSObject **obj2p, JSAtom **atomp)
{
uint32 shape = obj->map->shape;
uint32 shape = obj->shape();
PropertyCacheEntry *entry = &table[hash(pc, shape)];
*entryp = entry;
PCMETER(pctestentry = entry);
PCMETER(tests++);
PCMETER(settests++);
JS_ASSERT(entry->kshape < SHAPE_OVERFLOW_BIT);
if (entry->kpc == pc && entry->kshape == shape && matchShape(cx, obj, shape))
if (entry->kpc == pc && entry->kshape == shape && CX_OWNS_OBJECT_TITLE(cx, obj))
return true;
#ifdef DEBUG
@ -129,12 +130,12 @@ PropertyCache::testForSet(JSContext *cx, jsbytecode *pc, JSObject *obj,
}
JS_ALWAYS_INLINE bool
PropertyCache::testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj, JSScope *scope,
JSScopeProperty **spropp, PropertyCacheEntry **entryp)
PropertyCache::testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj,
const js::Shape **shapep, PropertyCacheEntry **entryp)
{
JS_ASSERT(scope->object == obj);
JS_ASSERT(!scope->sealed());
uint32 kshape = scope->shape;
JS_ASSERT(obj->freeslot >= JSSLOT_FREE(obj->getClass()));
JS_ASSERT(!obj->sealed());
uint32 kshape = obj->shape();
PropertyCacheEntry *entry = &table[hash(pc, kshape)];
*entryp = entry;
PCMETER(pctestentry = entry);
@ -148,8 +149,8 @@ PropertyCache::testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj, JSScope
PCMETER(pchits++);
PCMETER(inipchits++);
JS_ASSERT(entry->vcapTag() == 0);
*spropp = entry->vword.toSprop();
JS_ASSERT((*spropp)->writable());
*shapep = entry->vword.toShape();
JS_ASSERT((*shapep)->writable());
return true;
}
return false;

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -41,40 +41,106 @@
#define jspropertytree_h___
#include "jsarena.h"
#include "jsdhash.h"
#include "jshashtable.h"
#include "jsprvtd.h"
struct JSScope;
namespace js {
JSDHashOperator RemoveNodeIfDead(JSDHashTable *table, JSDHashEntryHdr *hdr,
uint32 number, void *arg);
enum {
MAX_KIDS_PER_CHUNK = 10U,
CHUNK_HASH_THRESHOLD = 30U
};
void SweepScopeProperties(JSContext *cx);
struct KidsChunk {
js::Shape *kids[MAX_KIDS_PER_CHUNK];
KidsChunk *next;
static KidsChunk *create(JSContext *cx);
static KidsChunk *destroy(JSContext *cx, KidsChunk *chunk);
};
struct ShapeHasher {
typedef js::Shape *Key;
typedef const js::Shape *Lookup;
static HashNumber hash(const Lookup l);
static bool match(Key k, Lookup l);
};
typedef HashSet<js::Shape *, ShapeHasher, SystemAllocPolicy> KidsHash;
class KidsPointer {
private:
enum {
SHAPE = 0,
CHUNK = 1,
HASH = 2,
TAG = 3
};
jsuword w;
public:
bool isNull() const { return !w; }
void setNull() { w = 0; }
bool isShapeOrNull() const { return (w & TAG) == SHAPE; }
bool isShape() const { return (w & TAG) == SHAPE && !isNull(); }
js::Shape *toShape() const {
JS_ASSERT(isShape());
return reinterpret_cast<js::Shape *>(w & ~jsuword(TAG));
}
void setShape(js::Shape *shape) {
JS_ASSERT(shape);
JS_ASSERT((reinterpret_cast<jsuword>(shape) & TAG) == 0);
w = reinterpret_cast<jsuword>(shape) | SHAPE;
}
bool isChunk() const { return (w & TAG) == CHUNK; }
KidsChunk *toChunk() const {
JS_ASSERT(isChunk());
return reinterpret_cast<KidsChunk *>(w & ~jsuword(TAG));
}
void setChunk(KidsChunk *chunk) {
JS_ASSERT(chunk);
JS_ASSERT((reinterpret_cast<jsuword>(chunk) & TAG) == 0);
w = reinterpret_cast<jsuword>(chunk) | CHUNK;
}
bool isHash() const { return (w & TAG) == HASH; }
KidsHash *toHash() const {
JS_ASSERT(isHash());
return reinterpret_cast<KidsHash *>(w & ~jsuword(TAG));
}
void setHash(KidsHash *hash) {
JS_ASSERT(hash);
JS_ASSERT((reinterpret_cast<jsuword>(hash) & TAG) == 0);
w = reinterpret_cast<jsuword>(hash) | HASH;
}
};
class PropertyTree
{
friend struct ::JSScope;
friend void js::SweepScopeProperties(JSContext *cx);
friend struct ::JSFunction;
JSDHashTable hash;
JSScopeProperty *freeList;
JSArenaPool arenaPool;
uint32 emptyShapeChanges;
JSArenaPool arenaPool;
js::Shape *freeList;
bool insertChild(JSContext *cx, JSScopeProperty *parent, JSScopeProperty *child);
void removeChild(JSContext *cx, JSScopeProperty *child);
void emptyShapeChange(uint32 oldEmptyShape, uint32 newEmptyShape);
bool insertChild(JSContext *cx, js::Shape *parent, js::Shape *child);
void removeChild(JSContext *cx, js::Shape *child);
public:
bool init();
void finish();
JSScopeProperty *newScopeProperty(JSContext *cx, bool gcLocked = false);
js::Shape *newShape(JSContext *cx, bool gcLocked = false);
js::Shape *getChild(JSContext *cx, js::Shape *parent, const js::Shape &child);
JSScopeProperty *getChild(JSContext *cx, JSScopeProperty *parent, uint32 shape,
const JSScopeProperty &child);
static void orphanKids(JSContext *cx, js::Shape *shape);
static void sweepShapes(JSContext *cx);
#ifdef DEBUG
static void meter(JSBasicStats *bs, js::Shape *node);
#endif
};
} /* namespace js */

Просмотреть файл

@ -130,7 +130,7 @@ JSProxyHandler::get(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id,
}
if (desc.attrs & JSPROP_SHORTID)
id = INT_TO_JSID(desc.shortid);
return callJSPropertyOp(cx, desc.getter, proxy, id, vp);
return CallJSPropertyOp(cx, desc.getter, proxy, id, vp);
}
bool
@ -149,7 +149,7 @@ JSProxyHandler::set(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id,
}
if (desc.attrs & JSPROP_SHORTID)
id = INT_TO_JSID(desc.shortid);
return callJSPropertyOpSetter(cx, desc.setter, proxy, id, vp);
return CallJSPropertyOpSetter(cx, desc.setter, proxy, id, vp);
}
if (desc.attrs & JSPROP_READONLY)
return true;
@ -166,7 +166,7 @@ JSProxyHandler::set(JSContext *cx, JSObject *proxy, JSObject *receiver, jsid id,
}
if (desc.attrs & JSPROP_SHORTID)
id = INT_TO_JSID(desc.shortid);
return callJSPropertyOpSetter(cx, desc.setter, proxy, id, vp);
return CallJSPropertyOpSetter(cx, desc.setter, proxy, id, vp);
}
if (desc.attrs & JSPROP_READONLY)
return true;
@ -1005,14 +1005,16 @@ NewProxyObject(JSContext *cx, JSProxyHandler *handler, const Value &priv, JSObje
bool fun = call || construct;
Class *clasp = fun ? &FunctionProxyClass : &ObjectProxyClass;
JSObject *obj = NewNonFunction<WithProto::Given>(cx, clasp, proto, parent);
if (!obj || (construct && !js_EnsureReservedSlots(cx, obj, 0)))
if (!obj || (construct && !obj->ensureInstanceReservedSlots(cx, 0)))
return NULL;
obj->setSlot(JSSLOT_PROXY_HANDLER, PrivateValue(handler));
obj->setSlot(JSSLOT_PROXY_PRIVATE, priv);
if (fun) {
obj->setSlot(JSSLOT_PROXY_CALL, call ? ObjectValue(*call) : UndefinedValue());
if (construct)
obj->setSlot(JSSLOT_PROXY_CONSTRUCT, construct ? ObjectValue(*construct) : UndefinedValue());
if (construct) {
obj->setSlot(JSSLOT_PROXY_CONSTRUCT,
construct ? ObjectValue(*construct) : UndefinedValue());
}
}
return obj;
}

Просмотреть файл

@ -74,19 +74,6 @@ typedef uint8 jsbytecode;
typedef uint8 jssrcnote;
typedef uint32 jsatomid;
#ifdef __cplusplus
/* Class and struct forward declarations in namespace js. */
extern "C++" {
namespace js {
struct Parser;
struct Compiler;
class RegExp;
}
}
#endif
/* Struct typedefs. */
typedef struct JSArgumentFormatMap JSArgumentFormatMap;
typedef struct JSCodeGenerator JSCodeGenerator;
@ -98,7 +85,6 @@ typedef struct JSObjectBox JSObjectBox;
typedef struct JSParseNode JSParseNode;
typedef struct JSProperty JSProperty;
typedef struct JSSharpObjectMap JSSharpObjectMap;
typedef struct JSEmptyScope JSEmptyScope;
typedef struct JSThread JSThread;
typedef struct JSThreadData JSThreadData;
typedef struct JSTreeContext JSTreeContext;
@ -113,9 +99,6 @@ typedef struct JSAtomState JSAtomState;
typedef struct JSCodeSpec JSCodeSpec;
typedef struct JSPrinter JSPrinter;
typedef struct JSRegExpStatics JSRegExpStatics;
typedef struct JSScope JSScope;
typedef struct JSScopeOps JSScopeOps;
typedef struct JSScopeProperty JSScopeProperty;
typedef struct JSStackHeader JSStackHeader;
typedef struct JSSubString JSSubString;
typedef struct JSNativeTraceInfo JSNativeTraceInfo;
@ -136,6 +119,8 @@ extern "C++" {
namespace js {
struct ArgumentsData;
class RegExp;
class RegExpStatics;
class AutoStringRooter;
@ -147,6 +132,8 @@ struct TraceMonitor;
class StackSpace;
class StackSegment;
struct Compiler;
struct Parser;
class TokenStream;
struct Token;
struct TokenPos;
@ -179,6 +166,8 @@ class DeflatedStringCache;
class PropertyCache;
struct PropertyCacheEntry;
struct Shape;
struct EmptyShape;
} /* namespace js */
@ -290,7 +279,7 @@ typedef struct JSDebugHooks {
void *debugErrorHookData;
} JSDebugHooks;
/* JSObjectOps function pointer typedefs. */
/* js::ObjectOps function pointer typedefs. */
/*
* Look for id in obj and its prototype chain, returning false on error or

Просмотреть файл

@ -170,7 +170,7 @@ typedef class JSWrapper JSWrapper;
typedef class JSCrossCompartmentWrapper JSCrossCompartmentWrapper;
#endif
/* JSClass (and JSObjectOps where appropriate) function pointer typedefs. */
/* JSClass (and js::ObjectOps where appropriate) function pointer typedefs. */
/*
* Add, delete, get or set a property named by id in obj. Note the jsid id

Просмотреть файл

@ -655,7 +655,7 @@ regexp_enumerate(JSContext *cx, JSObject *obj)
js::Class js_RegExpClass = {
js_RegExp_str,
JSCLASS_HAS_PRIVATE | JSCLASS_NEW_RESOLVE |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::REGEXP_FIXED_RESERVED_SLOTS) |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::REGEXP_CLASS_RESERVED_SLOTS) |
JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_RegExp),
PropertyStub, /* addProperty */
PropertyStub, /* delProperty */

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -40,6 +40,8 @@
#ifndef jsscopeinlines_h___
#define jsscopeinlines_h___
#include <new>
#include "jsbool.h"
#include "jscntxt.h"
#include "jsdbgapi.h"
#include "jsfun.h"
@ -48,212 +50,122 @@
#include "jscntxtinlines.h"
inline JSEmptyScope *
JSScope::createEmptyScope(JSContext *cx, js::Class *clasp)
inline void
js::Shape::freeTable(JSContext *cx)
{
JS_ASSERT(!isSharedEmpty());
JS_ASSERT(!emptyScope);
emptyScope = cx->create<JSEmptyScope>(cx, clasp);
return emptyScope;
if (table) {
cx->destroy(table);
table = NULL;
}
}
inline JSEmptyScope *
JSScope::getEmptyScope(JSContext *cx, js::Class *clasp)
inline js::EmptyShape *
JSObject::getEmptyShape(JSContext *cx, js::Class *aclasp)
{
if (emptyScope) {
JS_ASSERT(clasp == emptyScope->clasp);
return emptyScope->hold();
}
return createEmptyScope(cx, clasp);
if (emptyShape)
JS_ASSERT(aclasp == emptyShape->getClass());
else
emptyShape = js::EmptyShape::create(cx, aclasp);
return emptyShape;
}
inline bool
JSScope::ensureEmptyScope(JSContext *cx, js::Class *clasp)
JSObject::canProvideEmptyShape(js::Class *aclasp)
{
if (emptyScope) {
JS_ASSERT(clasp == emptyScope->clasp);
return true;
}
if (!createEmptyScope(cx, clasp))
return false;
/* We are going to have only single ref to the scope. */
JS_ASSERT(emptyScope->nrefs == 2);
emptyScope->nrefs = 1;
return true;
return !emptyShape || emptyShape->getClass() == aclasp;
}
inline void
JSScope::updateShape(JSContext *cx)
JSObject::updateShape(JSContext *cx)
{
JS_ASSERT(object);
js::LeaveTraceIfGlobalObject(cx, object);
shape = (hasOwnShape() || !lastProp) ? js_GenerateShape(cx, false) : lastProp->shape;
JS_ASSERT(isNative());
js::LeaveTraceIfGlobalObject(cx, this);
if (hasOwnShape())
setOwnShape(js_GenerateShape(cx, false));
else
objShape = lastProp->shape;
}
inline void
JSScope::updateFlags(const JSScopeProperty *sprop, bool isDefinitelyAtom)
JSObject::updateFlags(const js::Shape *shape, bool isDefinitelyAtom)
{
jsuint index;
if (!isDefinitelyAtom && js_IdIsIndex(sprop->id, &index))
setIndexedProperties();
if (!isDefinitelyAtom && js_IdIsIndex(shape->id, &index))
setIndexed();
if (sprop->isMethod())
if (shape->isMethod())
setMethodBarrier();
}
inline void
JSScope::extend(JSContext *cx, JSScopeProperty *sprop, bool isDefinitelyAtom)
JSObject::extend(JSContext *cx, const js::Shape *shape, bool isDefinitelyAtom)
{
++entryCount;
setLastProperty(sprop);
setLastProperty(shape);
updateFlags(shape, isDefinitelyAtom);
updateShape(cx);
updateFlags(sprop, isDefinitelyAtom);
}
/*
* Property read barrier for deferred cloning of compiler-created function
* objects optimized as typically non-escaping, ad-hoc methods in obj.
*/
inline bool
JSScope::methodReadBarrier(JSContext *cx, JSScopeProperty *sprop, js::Value *vp)
{
JS_ASSERT(hasMethodBarrier());
JS_ASSERT(hasProperty(sprop));
JS_ASSERT(sprop->isMethod());
JS_ASSERT(&vp->toObject() == &sprop->methodObject());
JS_ASSERT(object->canHaveMethodBarrier());
JSObject *funobj = &vp->toObject();
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
JS_ASSERT(fun == funobj && FUN_NULL_CLOSURE(fun));
funobj = CloneFunctionObject(cx, fun, funobj->getParent());
if (!funobj)
return false;
funobj->setMethodObj(*object);
vp->setObject(*funobj);
if (!js_SetPropertyHelper(cx, object, sprop->id, 0, vp))
return false;
#ifdef DEBUG
if (cx->runtime->functionMeterFilename) {
JS_FUNCTION_METER(cx, mreadbarrier);
typedef JSRuntime::FunctionCountMap HM;
HM &h = cx->runtime->methodReadBarrierCountMap;
HM::AddPtr p = h.lookupForAdd(fun);
if (!p) {
h.add(p, fun, 1);
} else {
JS_ASSERT(p->key == fun);
++p->value;
}
}
#endif
return true;
}
static JS_ALWAYS_INLINE bool
ChangesMethodValue(const js::Value &prev, const js::Value &v)
{
JSObject *prevObj;
return prev.isObject() && (prevObj = &prev.toObject())->isFunction() &&
(!v.isObject() || &v.toObject() != prevObj);
}
inline bool
JSScope::methodWriteBarrier(JSContext *cx, JSScopeProperty *sprop,
const js::Value &v)
{
if (flags & (BRANDED | METHOD_BARRIER)) {
const js::Value &prev = object->lockedGetSlot(sprop->slot);
if (ChangesMethodValue(prev, v)) {
JS_FUNCTION_METER(cx, mwritebarrier);
return methodShapeChange(cx, sprop);
}
}
return true;
}
inline bool
JSScope::methodWriteBarrier(JSContext *cx, uint32 slot, const js::Value &v)
{
if (flags & (BRANDED | METHOD_BARRIER)) {
const js::Value &prev = object->lockedGetSlot(slot);
if (ChangesMethodValue(prev, v)) {
JS_FUNCTION_METER(cx, mwslotbarrier);
return methodShapeChange(cx, slot);
}
}
return true;
}
inline void
JSScope::trace(JSTracer *trc)
JSObject::trace(JSTracer *trc)
{
JSContext *cx = trc->context;
JSScopeProperty *sprop = lastProp;
uint8 regenFlag = cx->runtime->gcRegenShapesScopeFlag;
if (emptyShape)
emptyShape->trace(trc);
if (IS_GC_MARKING_TRACER(trc) && cx->runtime->gcRegenShapes && !hasRegenFlag(regenFlag)) {
if (!isNative())
return;
JSContext *cx = trc->context;
js::Shape *shape = lastProp;
if (IS_GC_MARKING_TRACER(trc) && cx->runtime->gcRegenShapes) {
/*
* Either this scope has its own shape, which must be regenerated, or
* Either this object has its own shape, which must be regenerated, or
* it must have the same shape as lastProp.
*/
uint32 newShape;
if (sprop) {
if (!sprop->hasRegenFlag()) {
sprop->shape = js_RegenerateShapeForGC(cx);
sprop->setRegenFlag();
}
newShape = sprop->shape;
if (!shape->hasRegenFlag()) {
shape->shape = js_RegenerateShapeForGC(cx);
shape->setRegenFlag();
}
if (!sprop || hasOwnShape()) {
uint32 newShape = shape->shape;
if (hasOwnShape()) {
newShape = js_RegenerateShapeForGC(cx);
JS_ASSERT_IF(sprop, newShape != sprop->shape);
}
shape = newShape;
flags ^= JSScope::SHAPE_REGEN;
/* Also regenerate the shapes of this scope's empty scope, if there is one. */
JSScope *empty = emptyScope;
if (empty) {
JS_ASSERT(!empty->emptyScope);
if (!empty->hasRegenFlag(regenFlag)) {
uint32 newEmptyShape = js_RegenerateShapeForGC(cx);
JS_PROPERTY_TREE(cx).emptyShapeChange(empty->shape, newEmptyShape);
empty->shape = newEmptyShape;
empty->flags ^= JSScope::SHAPE_REGEN;
}
JS_ASSERT(newShape != shape->shape);
}
objShape = newShape;
}
if (sprop) {
JS_ASSERT(hasProperty(sprop));
/* Trace scope's property tree ancestor line. */
do {
sprop->trace(trc);
} while ((sprop = sprop->parent) != NULL);
}
/* Trace our property tree or dictionary ancestor line. */
do {
shape->trace(trc);
} while ((shape = shape->parent) != NULL);
}
namespace js {
inline
JSScopeProperty::JSScopeProperty(jsid id, js::PropertyOp getter, js::PropertyOp setter,
uint32 slot, uintN attrs, uintN flags, intN shortid)
: id(id), rawGetter(getter), rawSetter(setter), slot(slot), attrs(uint8(attrs)),
flags(uint8(flags)), shortid(int16(shortid))
Shape::Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter,
uint32 slot, uintN attrs, uintN flags, intN shortid)
: JSObjectMap(0), table(NULL),
id(id), rawGetter(getter), rawSetter(setter), slot(slot), attrs(uint8(attrs)),
flags(uint8(flags)), shortid(int16(shortid)), parent(NULL)
{
JS_ASSERT_IF(getter && (attrs & JSPROP_GETTER), getterObj->isCallable());
JS_ASSERT_IF(setter && (attrs & JSPROP_SETTER), setterObj->isCallable());
kids.setNull();
}
inline
Shape::Shape(JSContext *cx, Class *aclasp)
: JSObjectMap(js_GenerateShape(cx, false)), table(NULL),
id(JSID_EMPTY), clasp(aclasp), rawSetter(NULL), slot(JSSLOT_FREE(aclasp)), attrs(0),
flags(SHARED_EMPTY), shortid(0), parent(NULL)
{
kids.setNull();
}
inline JSDHashNumber
JSScopeProperty::hash() const
Shape::hash() const
{
JSDHashNumber hash = 0;
@ -272,18 +184,18 @@ JSScopeProperty::hash() const
}
inline bool
JSScopeProperty::matches(const JSScopeProperty *p) const
Shape::matches(const js::Shape *other) const
{
JS_ASSERT(!JSID_IS_VOID(id));
JS_ASSERT(!JSID_IS_VOID(p->id));
return id == p->id &&
matchesParamsAfterId(p->rawGetter, p->rawSetter, p->slot, p->attrs, p->flags,
p->shortid);
JS_ASSERT(!JSID_IS_VOID(other->id));
return id == other->id &&
matchesParamsAfterId(other->rawGetter, other->rawSetter, other->slot, other->attrs,
other->flags, other->shortid);
}
inline bool
JSScopeProperty::matchesParamsAfterId(js::PropertyOp agetter, js::PropertyOp asetter, uint32 aslot,
uintN aattrs, uintN aflags, intN ashortid) const
Shape::matchesParamsAfterId(js::PropertyOp agetter, js::PropertyOp asetter, uint32 aslot,
uintN aattrs, uintN aflags, intN ashortid) const
{
JS_ASSERT(!JSID_IS_VOID(id));
return rawGetter == agetter &&
@ -295,7 +207,7 @@ JSScopeProperty::matchesParamsAfterId(js::PropertyOp agetter, js::PropertyOp ase
}
inline bool
JSScopeProperty::get(JSContext* cx, JSObject* obj, JSObject *pobj, js::Value* vp)
Shape::get(JSContext* cx, JSObject* obj, JSObject *pobj, js::Value* vp) const
{
JS_ASSERT(!JSID_IS_VOID(this->id));
JS_ASSERT(!hasDefaultGetter());
@ -308,10 +220,7 @@ JSScopeProperty::get(JSContext* cx, JSObject* obj, JSObject *pobj, js::Value* vp
if (isMethod()) {
vp->setObject(methodObject());
JSScope *scope = pobj->scope();
JS_ASSERT(scope->object == pobj);
return scope->methodReadBarrier(cx, this, vp);
return pobj->methodReadBarrier(cx, *this, vp);
}
/*
@ -320,11 +229,11 @@ JSScopeProperty::get(JSContext* cx, JSObject* obj, JSObject *pobj, js::Value* vp
*/
if (obj->getClass() == &js_WithClass)
obj = js_UnwrapWithObject(cx, obj);
return js::callJSPropertyOp(cx, getterOp(), obj, SPROP_USERID(this), vp);
return js::CallJSPropertyOp(cx, getterOp(), obj, SHAPE_USERID(this), vp);
}
inline bool
JSScopeProperty::set(JSContext* cx, JSObject* obj, js::Value* vp)
Shape::set(JSContext* cx, JSObject* obj, js::Value* vp) const
{
JS_ASSERT_IF(hasDefaultSetter(), hasGetterValue());
@ -336,10 +245,22 @@ JSScopeProperty::set(JSContext* cx, JSObject* obj, js::Value* vp)
if (attrs & JSPROP_GETTER)
return js_ReportGetterOnlyAssignment(cx);
/* See the comment in JSScopeProperty::get as to why we check for With. */
/* See the comment in js::Shape::get as to why we check for With. */
if (obj->getClass() == &js_WithClass)
obj = js_UnwrapWithObject(cx, obj);
return js::callJSPropertyOpSetter(cx, setterOp(), obj, SPROP_USERID(this), vp);
return js::CallJSPropertyOpSetter(cx, setterOp(), obj, SHAPE_USERID(this), vp);
}
inline
EmptyShape::EmptyShape(JSContext *cx, js::Class *aclasp)
: js::Shape(cx, aclasp)
{
#ifdef DEBUG
if (cx->runtime->meterEmptyShapes())
cx->runtime->emptyShapes.put(this);
#endif
}
} /* namespace js */
#endif /* jsscopeinlines_h___ */

Просмотреть файл

@ -1034,7 +1034,7 @@ js_NewScriptFromCG(JSContext *cx, JSCodeGenerator *cg)
*/
goto skip_empty;
}
js_FreezeLocalNames(cx, fun);
fun->freezeLocalNames(cx);
fun->u.i.script = empty;
}
@ -1120,7 +1120,7 @@ js_NewScriptFromCG(JSContext *cx, JSCodeGenerator *cg)
else
fun->u.i.nupvars = 0;
js_FreezeLocalNames(cx, fun);
fun->freezeLocalNames(cx);
fun->u.i.script = script;
#ifdef CHECK_SCRIPT_OWNER
script->owner = NULL;

Просмотреть файл

@ -1029,9 +1029,9 @@ js_SkipWhiteSpace(const jschar *s, const jschar *end)
}
/*
* Inflate bytes to JS chars and vice versa. Report out of memory via cx
* and return null on error, otherwise return the jschar or byte vector that
* was JS_malloc'ed. length is updated with the length of the new string in jschars.
* Inflate bytes to JS chars and vice versa. Report out of memory via cx and
* return null on error, otherwise return the jschar or byte vector that was
* JS_malloc'ed. length is updated to the length of the new string in jschars.
*/
extern jschar *
js_InflateString(JSContext *cx, const char *bytes, size_t *length);

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -668,7 +668,7 @@ struct TreeFragment : public LinkableFragment
linkedTrees(alloc),
sideExits(alloc),
gcthings(alloc),
sprops(alloc)
shapes(alloc)
{ }
TreeFragment* first;
@ -694,7 +694,7 @@ struct TreeFragment : public LinkableFragment
unsigned maxCallDepth;
/* All embedded GC things are registered here so the GC can scan them. */
Queue<Value> gcthings;
Queue<JSScopeProperty*> sprops;
Queue<const js::Shape*> shapes;
unsigned maxNativeStackSlots;
inline unsigned nGlobalTypes() {
@ -1014,7 +1014,7 @@ class TraceRecorder
nanojit::LIns* insImmObj(JSObject* obj);
nanojit::LIns* insImmFun(JSFunction* fun);
nanojit::LIns* insImmStr(JSString* str);
nanojit::LIns* insImmSprop(JSScopeProperty* sprop);
nanojit::LIns* insImmShape(const js::Shape* shape);
nanojit::LIns* insImmId(jsid id);
nanojit::LIns* p2i(nanojit::LIns* ins);
@ -1066,7 +1066,7 @@ class TraceRecorder
unsigned callDepth, unsigned ngslots, JSValueType* typeMap);
void trackNativeStackUse(unsigned slots);
JS_REQUIRES_STACK bool isValidSlot(JSScope* scope, JSScopeProperty* sprop);
JS_REQUIRES_STACK bool isValidSlot(JSObject *obj, const js::Shape* shape);
JS_REQUIRES_STACK bool lazilyImportGlobalSlot(unsigned slot);
JS_REQUIRES_STACK void importGlobalSlot(unsigned slot);
@ -1131,7 +1131,7 @@ class TraceRecorder
Value v; // current property value
JSObject *obj; // Call object where name was found
nanojit::LIns *obj_ins; // LIR value for obj
JSScopeProperty *sprop; // sprop name was resolved to
js::Shape *shape; // shape name was resolved to
};
JS_REQUIRES_STACK nanojit::LIns* scopeChain();
@ -1140,7 +1140,7 @@ class TraceRecorder
JS_REQUIRES_STACK JSStackFrame* frameIfInRange(JSObject* obj, unsigned* depthp = NULL) const;
JS_REQUIRES_STACK RecordingStatus traverseScopeChain(JSObject *obj, nanojit::LIns *obj_ins, JSObject *obj2, nanojit::LIns *&obj2_ins);
JS_REQUIRES_STACK AbortableRecordingStatus scopeChainProp(JSObject* obj, Value*& vp, nanojit::LIns*& ins, NameResult& nr);
JS_REQUIRES_STACK RecordingStatus callProp(JSObject* obj, JSProperty* sprop, jsid id, Value*& vp, nanojit::LIns*& ins, NameResult& nr);
JS_REQUIRES_STACK RecordingStatus callProp(JSObject* obj, JSProperty* shape, jsid id, Value*& vp, nanojit::LIns*& ins, NameResult& nr);
JS_REQUIRES_STACK nanojit::LIns* arg(unsigned n);
JS_REQUIRES_STACK void arg(unsigned n, nanojit::LIns* i);
@ -1200,7 +1200,7 @@ class TraceRecorder
void forgetGuardedShapes();
inline nanojit::LIns* map(nanojit::LIns *obj_ins);
inline nanojit::LIns* shape_ins(nanojit::LIns *obj_ins);
JS_REQUIRES_STACK AbortableRecordingStatus test_property_cache(JSObject* obj, nanojit::LIns* obj_ins,
JSObject*& obj2, PCVal& pcval);
JS_REQUIRES_STACK RecordingStatus guardPropertyCacheHit(nanojit::LIns* obj_ins,
@ -1217,7 +1217,8 @@ class TraceRecorder
nanojit::LIns*& dslots_ins, const Value &v, nanojit::LIns* v_ins);
void set_array_fslot(nanojit::LIns *obj_ins, unsigned slot, uint32 val);
nanojit::LIns* stobj_get_const_private_ptr(nanojit::LIns* obj_ins);
nanojit::LIns* stobj_get_const_private_ptr(nanojit::LIns* obj_ins,
unsigned slot = JSSLOT_PRIVATE);
nanojit::LIns* stobj_get_fslot_uint32(nanojit::LIns* obj_ins, unsigned slot);
nanojit::LIns* stobj_get_fslot_ptr(nanojit::LIns* obj_ins, unsigned slot);
nanojit::LIns* unbox_slot(JSObject *obj, nanojit::LIns *obj_ins, uint32 slot,
@ -1261,11 +1262,11 @@ class TraceRecorder
nanojit::LIns* index_ins, Value* outp);
JS_REQUIRES_STACK RecordingStatus getPropertyById(nanojit::LIns* obj_ins, Value* outp);
JS_REQUIRES_STACK RecordingStatus getPropertyWithNativeGetter(nanojit::LIns* obj_ins,
JSScopeProperty* sprop,
const js::Shape* shape,
Value* outp);
JS_REQUIRES_STACK RecordingStatus getPropertyWithScriptGetter(JSObject *obj,
nanojit::LIns* obj_ins,
JSScopeProperty* sprop);
const js::Shape* shape);
JS_REQUIRES_STACK nanojit::LIns* getStringLength(nanojit::LIns* str_ins);
JS_REQUIRES_STACK nanojit::LIns* getStringChars(nanojit::LIns* str_ins);
@ -1276,21 +1277,21 @@ class TraceRecorder
JSOp mode);
JS_REQUIRES_STACK RecordingStatus nativeSet(JSObject* obj, nanojit::LIns* obj_ins,
JSScopeProperty* sprop,
const Value &v, nanojit::LIns* v_ins);
const js::Shape* shape,
const Value &v, nanojit::LIns* v_ins);
JS_REQUIRES_STACK RecordingStatus setProp(Value &l, PropertyCacheEntry* entry,
JSScopeProperty* sprop,
Value &v, nanojit::LIns*& v_ins,
bool isDefinitelyAtom);
const js::Shape* shape,
Value &v, nanojit::LIns*& v_ins,
bool isDefinitelyAtom);
JS_REQUIRES_STACK RecordingStatus setCallProp(JSObject *callobj, nanojit::LIns *callobj_ins,
JSScopeProperty *sprop, nanojit::LIns *v_ins,
const Value &v);
const js::Shape *shape, nanojit::LIns *v_ins,
const Value &v);
JS_REQUIRES_STACK RecordingStatus initOrSetPropertyByName(nanojit::LIns* obj_ins,
Value* idvalp, Value* rvalp,
bool init);
Value* idvalp, Value* rvalp,
bool init);
JS_REQUIRES_STACK RecordingStatus initOrSetPropertyByIndex(nanojit::LIns* obj_ins,
nanojit::LIns* index_ins,
Value* rvalp, bool init);
nanojit::LIns* index_ins,
Value* rvalp, bool init);
JS_REQUIRES_STACK AbortableRecordingStatus setElem(int lval_spindex, int idx_spindex,
int v_spindex);
@ -1379,8 +1380,7 @@ class TraceRecorder
nanojit::LIns *&status_ins);
JS_REQUIRES_STACK RecordingStatus emitNativeCall(JSSpecializedNative* sn, uintN argc,
nanojit::LIns* args[], bool rooted);
JS_REQUIRES_STACK void emitNativePropertyOp(JSScope* scope,
JSScopeProperty* sprop,
JS_REQUIRES_STACK void emitNativePropertyOp(const js::Shape* shape,
nanojit::LIns* obj_ins,
bool setflag,
nanojit::LIns* addr_boxed_val_ins);
@ -1482,8 +1482,9 @@ public:
JS_REQUIRES_STACK AbortableRecordingStatus record_EnterFrame(uintN& inlineCallCount);
JS_REQUIRES_STACK AbortableRecordingStatus record_LeaveFrame();
JS_REQUIRES_STACK AbortableRecordingStatus record_SetPropHit(PropertyCacheEntry* entry,
JSScopeProperty* sprop);
JS_REQUIRES_STACK AbortableRecordingStatus record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
const js::Shape* shape);
JS_REQUIRES_STACK AbortableRecordingStatus record_DefLocalFunSetSlot(uint32 slot,
JSObject* obj);
JS_REQUIRES_STACK AbortableRecordingStatus record_NativeCallComplete();
void forgetGuardedShapesForObject(JSObject* obj);

Просмотреть файл

@ -517,7 +517,7 @@ class TypedArrayTemplate
} else {
JSObject *obj2;
JSProperty *prop;
JSScopeProperty *sprop;
const Shape *shape;
JSObject *proto = obj->getProto();
if (!proto) {
@ -531,8 +531,8 @@ class TypedArrayTemplate
if (prop) {
if (obj2->isNative()) {
sprop = (JSScopeProperty *) prop;
if (!js_NativeGet(cx, obj, obj2, sprop, JSGET_METHOD_BARRIER, vp))
shape = (Shape *) prop;
if (!js_NativeGet(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, vp))
return false;
JS_UNLOCK_OBJ(cx, obj2);
}
@ -878,7 +878,7 @@ class TypedArrayTemplate
return false;
}
// note the usage of JS_NewObject here -- we don't want the
// note the usage of NewObject here -- we don't want the
// constructor to be called!
JS_ASSERT(slowClass() != &js_FunctionClass);
JSObject *nobj = NewNonFunction<WithProto::Class>(cx, slowClass(), NULL, NULL);
@ -985,9 +985,9 @@ class TypedArrayTemplate
makeFastWithPrivate(JSContext *cx, JSObject *obj, ThisTypeArray *tarray)
{
JS_ASSERT(obj->getClass() == slowClass());
obj->setPrivate(tarray);
obj->setSharedNonNativeMap();
obj->clasp = fastClass();
obj->map = const_cast<JSObjectMap *>(&JSObjectMap::sharedNonNative);
obj->setPrivate(tarray);
}
public:

Просмотреть файл

@ -199,7 +199,7 @@ void
JS_DumpHistogram(JSBasicStats *bs, FILE *fp)
{
uintN bin;
uint32 cnt, max, prev, val, i;
uint32 cnt, max;
double sum, mean;
for (bin = 0, max = 0, sum = 0; bin <= 10; bin++) {
@ -209,20 +209,23 @@ JS_DumpHistogram(JSBasicStats *bs, FILE *fp)
sum += cnt;
}
mean = sum / cnt;
for (bin = 0, prev = 0; bin <= 10; bin++, prev = val) {
val = BinToVal(bs->logscale, bin);
for (bin = 0; bin <= 10; bin++) {
uintN val = BinToVal(bs->logscale, bin);
uintN end = (bin == 10) ? 0 : BinToVal(bs->logscale, bin + 1);
cnt = bs->hist[bin];
if (prev + 1 >= val)
if (val + 1 == end)
fprintf(fp, " [%6u]", val);
else if (end != 0)
fprintf(fp, "[%6u, %6u]", val, end - 1);
else
fprintf(fp, "[%6u, %6u]", prev + 1, val);
fprintf(fp, "%s %8u ", (bin == 10) ? "+" : ":", cnt);
fprintf(fp, "[%6u, +inf]", val);
fprintf(fp, ": %8u ", cnt);
if (cnt != 0) {
if (max > 1e6 && mean > 1e3)
cnt = (uint32) ceil(log10((double) cnt));
else if (max > 16 && mean > 8)
cnt = JS_CeilingLog2(cnt);
for (i = 0; i < cnt; i++)
for (uintN i = 0; i < cnt; i++)
putc('*', fp);
}
putc('\n', fp);

Просмотреть файл

@ -256,7 +256,7 @@ public:
*mStatementDone = true;
}
void SetStatementDone(bool *aStatementDone) {
void setStatementDone(bool *aStatementDone) {
mStatementDone = aStatementDone;
}
};
@ -284,7 +284,7 @@ public:
* temporary, but we really intend it as non-const
*/
const_cast<JSGuardObjectNotifier&>(aNotifier).
SetStatementDone(&mStatementDone);
setStatementDone(&mStatementDone);
}
};

Просмотреть файл

@ -410,7 +410,8 @@ JSCompartment::wrap(JSContext *cx, JSObject **objp)
}
bool
JSCompartment::wrapId(JSContext *cx, jsid *idp) {
JSCompartment::wrapId(JSContext *cx, jsid *idp)
{
if (JSID_IS_INT(*idp))
return true;
AutoValueRooter tvr(cx, IdToValue(*idp));
@ -430,7 +431,8 @@ JSCompartment::wrap(JSContext *cx, PropertyOp *propp)
}
bool
JSCompartment::wrap(JSContext *cx, PropertyDescriptor *desc) {
JSCompartment::wrap(JSContext *cx, PropertyDescriptor *desc)
{
return wrap(cx, &desc->obj) &&
(!(desc->attrs & JSPROP_GETTER) || wrap(cx, &desc->getter)) &&
(!(desc->attrs & JSPROP_SETTER) || wrap(cx, &desc->setter)) &&
@ -438,7 +440,8 @@ JSCompartment::wrap(JSContext *cx, PropertyDescriptor *desc) {
}
bool
JSCompartment::wrap(JSContext *cx, AutoIdVector &props) {
JSCompartment::wrap(JSContext *cx, AutoIdVector &props)
{
jsid *vector = props.begin();
jsint length = props.length();
for (size_t n = 0; n < size_t(length); ++n) {
@ -449,7 +452,8 @@ JSCompartment::wrap(JSContext *cx, AutoIdVector &props) {
}
bool
JSCompartment::wrapException(JSContext *cx) {
JSCompartment::wrapException(JSContext *cx)
{
JS_ASSERT(cx->compartment == this);
if (cx->throwing) {

Просмотреть файл

@ -251,7 +251,7 @@ namespace_equality(JSContext *cx, JSObject *obj, const Value *v, JSBool *bp)
JS_FRIEND_DATA(Class) js_NamespaceClass = {
"Namespace",
JSCLASS_CONSTRUCT_PROTOTYPE |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::NAMESPACE_FIXED_RESERVED_SLOTS) |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::NAMESPACE_CLASS_RESERVED_SLOTS) |
JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_Namespace),
PropertyStub, /* addProperty */
PropertyStub, /* delProperty */
@ -370,7 +370,7 @@ qname_equality(JSContext *cx, JSObject *qn, const Value *v, JSBool *bp)
JS_FRIEND_DATA(Class) js_QNameClass = {
"QName",
JSCLASS_CONSTRUCT_PROTOTYPE |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_FIXED_RESERVED_SLOTS) |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_CLASS_RESERVED_SLOTS) |
JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_QName),
PropertyStub, /* addProperty */
PropertyStub, /* delProperty */
@ -405,7 +405,7 @@ JS_FRIEND_DATA(Class) js_QNameClass = {
JS_FRIEND_DATA(Class) js_AttributeNameClass = {
js_AttributeName_str,
JSCLASS_CONSTRUCT_PROTOTYPE |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_FIXED_RESERVED_SLOTS) |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_CLASS_RESERVED_SLOTS) |
JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_AttributeName),
PropertyStub, /* addProperty */
PropertyStub, /* delProperty */
@ -419,7 +419,7 @@ JS_FRIEND_DATA(Class) js_AttributeNameClass = {
JS_FRIEND_DATA(Class) js_AnyNameClass = {
js_AnyName_str,
JSCLASS_CONSTRUCT_PROTOTYPE |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_FIXED_RESERVED_SLOTS) |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::QNAME_CLASS_RESERVED_SLOTS) |
JSCLASS_MARK_IS_TRACE | JSCLASS_HAS_CACHED_PROTO(JSProto_AnyName),
PropertyStub, /* addProperty */
PropertyStub, /* delProperty */
@ -4642,15 +4642,14 @@ xml_trace_vector(JSTracer *trc, JSXML **vec, uint32 len)
/*
* XML objects are native. Thus xml_lookupProperty must return a valid
* JSScopeProperty pointer parameter via *propp to signify "property found".
* Since the only call to xml_lookupProperty is via JSObject::lookupProperty,
* and then only from js_FindProperty (in jsobj.c, called from jsinterp.c) or
* from JSOP_IN case in the interpreter, the only time we add a
* JSScopeProperty here is when an unqualified name is being accessed or when
* "name in xml" is called.
* Shape pointer parameter via *propp to signify "property found". Since the
* only call to xml_lookupProperty is via JSObject::lookupProperty, and then
* only from js_FindProperty (in jsobj.c, called from jsinterp.c) or from
* JSOP_IN case in the interpreter, the only time we add a Shape here is when
* an unqualified name is being accessed or when "name in xml" is called.
*
* This scope property keeps the JSOP_NAME code in js_Interpret happy by
* giving it an sprop with (getter, setter) == (GetProperty, PutProperty).
* giving it an shape with (getter, setter) == (GetProperty, PutProperty).
*
* NB: xml_deleteProperty must take care to remove any property added here.
*
@ -4673,7 +4672,6 @@ xml_lookupProperty(JSContext *cx, JSObject *obj, jsid id, JSObject **objp,
uint32 i;
JSObject *qn;
jsid funid;
JSScopeProperty *sprop;
xml = (JSXML *) obj->getPrivate();
if (js_IdIsIndex(id, &i)) {
@ -4690,16 +4688,17 @@ xml_lookupProperty(JSContext *cx, JSObject *obj, jsid id, JSObject **objp,
*objp = NULL;
*propp = NULL;
} else {
sprop = js_AddNativeProperty(cx, obj, id,
Valueify(GetProperty), Valueify(PutProperty),
SPROP_INVALID_SLOT, JSPROP_ENUMERATE,
0, 0);
if (!sprop)
const Shape *shape =
js_AddNativeProperty(cx, obj, id,
Valueify(GetProperty), Valueify(PutProperty),
SHAPE_INVALID_SLOT, JSPROP_ENUMERATE,
0, 0);
if (!shape)
return JS_FALSE;
JS_LOCK_OBJ(cx, obj);
*objp = obj;
*propp = (JSProperty *) sprop;
*propp = (JSProperty *) shape;
}
return JS_TRUE;
}
@ -4799,7 +4798,7 @@ xml_deleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval)
* property's getter or setter. But now it's time to remove any such
* property, to purge the property cache and remove the scope entry.
*/
if (obj->scope()->object == obj && !js_DeleteProperty(cx, obj, id, rval))
if (!obj->nativeEmpty() && !js_DeleteProperty(cx, obj, id, rval))
return JS_FALSE;
rval->setBoolean(true);
@ -7072,7 +7071,7 @@ js_InitXMLClass(JSContext *cx, JSObject *obj)
JSFunction *fun;
JSXML *xml;
JSProperty *prop;
JSScopeProperty *sprop;
Shape *shape;
jsval cval, vp[3];
/* Define the isXMLName function. */
@ -7105,9 +7104,9 @@ js_InitXMLClass(JSContext *cx, JSObject *obj)
return NULL;
}
JS_ASSERT(prop);
sprop = (JSScopeProperty *) prop;
JS_ASSERT(SPROP_HAS_VALID_SLOT(sprop, pobj->scope()));
cval = Jsvalify(pobj->getSlotMT(cx, sprop->slot));
shape = (Shape *) prop;
JS_ASSERT(pobj->containsSlot(shape->slot));
cval = Jsvalify(pobj->getSlotMT(cx, shape->slot));
JS_UNLOCK_OBJ(cx, pobj);
JS_ASSERT(VALUE_IS_FUNCTION(cx, cval));

Просмотреть файл

@ -1800,7 +1800,7 @@ DisassembleValue(JSContext *cx, jsval v, bool lines, bool recursive)
fputs("\nupvars: {\n", stdout);
void *mark = JS_ARENA_MARK(&cx->tempPool);
jsuword *localNames = js_GetLocalNameArray(cx, fun, &cx->tempPool);
jsuword *localNames = fun->getLocalNameArray(cx, &cx->tempPool);
if (!localNames)
return false;
@ -2047,7 +2047,7 @@ DumpScope(JSContext *cx, JSObject *obj, FILE *fp)
uintN i = 0;
for (JSScopeProperty *sprop = NULL; JS_PropertyIterator(obj, &sprop);) {
fprintf(fp, "%3u %p ", i++, (void *) sprop);
sprop->dump(cx, fp);
((Shape *) sprop)->dump(cx, fp);
}
}

Просмотреть файл

@ -49,7 +49,7 @@ function test1() {}
function test() { test1.call(this); }
test.prototype = new test1();
var length = 1024 * 1024 - 1;
var length = 512 * 1024 - 1;
var obj = new test();
var first = obj;
for(var i = 0 ; i < length ; i++) {

Просмотреть файл

@ -0,0 +1,10 @@
(function(x) {
function f1() { return 1; }
function f2() { return 2; }
function f3() { return 3; }
function f4() { return 4; }
var g = function () { return x; }
var a = [f1, f2, f3, f4, g];
for each (var v in a)
v.adhoc = 42; // Don't assertbotch in jsbuiltins.cpp setting g.adhoc
})(33);