зеркало из https://github.com/mozilla/gecko-dev.git
Eliminate JSObject::freeslot via monotonic lastProp->freeslot (592556, r=jorendorff,dvander).
This commit is contained in:
Родитель
d6f97815f6
Коммит
1f0f283683
|
@ -2965,7 +2965,7 @@ JS_SealObject(JSContext *cx, JSObject *obj, JSBool deep)
|
|||
return true;
|
||||
|
||||
/* Walk slots in obj and if any value is a non-null object, seal it. */
|
||||
for (uint32 i = 0, n = obj->freeslot; i != n; ++i) {
|
||||
for (uint32 i = 0, n = obj->freeslot(); i != n; ++i) {
|
||||
const Value &v = obj->getSlot(i);
|
||||
if (i == JSSLOT_PRIVATE && (obj->getClass()->flags & JSCLASS_HAS_PRIVATE))
|
||||
continue;
|
||||
|
|
|
@ -116,11 +116,14 @@ using namespace js;
|
|||
/* Small arrays are dense, no matter what. */
|
||||
#define MIN_SPARSE_INDEX 256
|
||||
|
||||
/* Iteration depends on all indexes of a dense array to fit into a JSVAL-sized int. */
|
||||
/*
|
||||
* Use the limit on number of object slots for sanity and consistency (see the
|
||||
* assertion in JSObject::makeDenseArraySlow).
|
||||
*/
|
||||
static inline bool
|
||||
INDEX_TOO_BIG(jsuint index)
|
||||
{
|
||||
return index > JS_BIT(29) - 1;
|
||||
return index >= JSObject::NSLOTS_LIMIT;
|
||||
}
|
||||
|
||||
static inline bool
|
||||
|
@ -1058,15 +1061,6 @@ JSObject::makeDenseArraySlow(JSContext *cx)
|
|||
capacity = 0;
|
||||
}
|
||||
|
||||
uint32 nslots = numSlots();
|
||||
if (nslots >= JS_NSLOTS_LIMIT) {
|
||||
setMap(oldMap);
|
||||
JS_ReportOutOfMemory(cx);
|
||||
return false;
|
||||
}
|
||||
|
||||
freeslot = nslots;
|
||||
|
||||
/* Begin with the length property to share more of the property tree. */
|
||||
if (!addProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom),
|
||||
array_length_getter, array_length_setter,
|
||||
|
@ -1088,6 +1082,9 @@ JSObject::makeDenseArraySlow(JSContext *cx)
|
|||
continue;
|
||||
}
|
||||
|
||||
/* Assert that the length covering i fits in the alloted bits. */
|
||||
JS_ASSERT(JS_INITIAL_NSLOTS + i + 1 < NSLOTS_LIMIT);
|
||||
|
||||
if (!addDataProperty(cx, id, JS_INITIAL_NSLOTS + i, JSPROP_ENUMERATE)) {
|
||||
setMap(oldMap);
|
||||
return false;
|
||||
|
|
|
@ -195,12 +195,10 @@ AddPropertyHelper(JSContext* cx, JSObject* obj, Shape* shape, bool isDefinitelyA
|
|||
|
||||
uint32 slot;
|
||||
slot = shape->slot;
|
||||
JS_ASSERT(slot == obj->freeslot);
|
||||
JS_ASSERT(slot == obj->freeslot());
|
||||
|
||||
if (slot < obj->numSlots()) {
|
||||
JS_ASSERT(obj->getSlot(slot).isUndefined());
|
||||
++obj->freeslot;
|
||||
JS_ASSERT(obj->freeslot != 0);
|
||||
} else {
|
||||
if (!obj->allocSlot(cx, &slot))
|
||||
goto exit_trace;
|
||||
|
|
|
@ -234,7 +234,7 @@ UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
|
|||
JS_ASSERT(op == JSOP_ENTERBLOCK);
|
||||
JS_ASSERT(nuses == 0);
|
||||
blockObj = cg->objectList.lastbox->object;
|
||||
JS_ASSERT(blockObj->getClass() == &js_BlockClass);
|
||||
JS_ASSERT(blockObj->isStaticBlock());
|
||||
JS_ASSERT(blockObj->fslots[JSSLOT_BLOCK_DEPTH].isUndefined());
|
||||
|
||||
OBJ_SET_BLOCK_DEPTH(cx, blockObj, cg->stackDepth);
|
||||
|
@ -1592,7 +1592,7 @@ js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt
|
|||
continue;
|
||||
|
||||
JSObject *obj = stmt->blockObj;
|
||||
JS_ASSERT(obj->getClass() == &js_BlockClass);
|
||||
JS_ASSERT(obj->isStaticBlock());
|
||||
|
||||
const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom));
|
||||
if (shape) {
|
||||
|
@ -1867,9 +1867,12 @@ EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
|
|||
#endif
|
||||
}
|
||||
|
||||
if (!blockObj->growSlots(cx, base))
|
||||
return false;
|
||||
blockObj->freeslot = base;
|
||||
/*
|
||||
* Shrink slots to free blockObj->dslots and ensure a prompt safe crash if
|
||||
* by accident some code tries to get a slot from a compiler-created Block
|
||||
* prototype instead of from a clone.
|
||||
*/
|
||||
blockObj->shrinkSlots(cx, base);
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -434,9 +434,9 @@ WrapEscapingClosure(JSContext *cx, JSStackFrame *fp, JSFunction *fun)
|
|||
oplen = js_GetVariableBytecodeLength(pc);
|
||||
|
||||
/*
|
||||
* Rewrite JSOP_{GET,CALL}DSLOT as JSOP_{GET,CALL}UPVAR_DBG for the
|
||||
* Rewrite JSOP_{GET,CALL}FCSLOT as JSOP_{GET,CALL}UPVAR_DBG for the
|
||||
* case where fun is an escaping flat closure. This works because the
|
||||
* UPVAR and DSLOT ops by design have the same format: an upvar index
|
||||
* UPVAR and FCSLOT ops by design have the same format: an upvar index
|
||||
* immediate operand.
|
||||
*/
|
||||
switch (op) {
|
||||
|
@ -968,7 +968,7 @@ NewCallObject(JSContext *cx, JSFunction *fun, JSObject *scopeChain)
|
|||
for (Shape::Range r = callobj->lastProp; !r.empty(); r.popFront()) {
|
||||
const Shape &s = r.front();
|
||||
if (s.slot != SHAPE_INVALID_SLOT) {
|
||||
JS_ASSERT(s.slot + 1 == callobj->freeslot);
|
||||
JS_ASSERT(s.slot + 1 == callobj->freeslot());
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -2415,7 +2415,7 @@ JSObject::initBoundFunction(JSContext *cx, const Value &thisArg,
|
|||
if (!empty)
|
||||
return false;
|
||||
|
||||
empty->slot += argslen;
|
||||
empty->freeslot += argslen;
|
||||
map = empty;
|
||||
|
||||
if (!ensureInstanceReservedSlots(cx, argslen))
|
||||
|
@ -3212,6 +3212,8 @@ JSFunction::addLocal(JSContext *cx, JSAtom *atom, JSLocalKind kind)
|
|||
if (findArgInsertionPoint) {
|
||||
while (parent->parent && parent->getter() != js_GetCallArg) {
|
||||
++parent->slot;
|
||||
JS_ASSERT(parent->slot == parent->freeslot);
|
||||
++parent->freeslot;
|
||||
listp = &parent->parent;
|
||||
parent = *listp;
|
||||
}
|
||||
|
|
|
@ -131,7 +131,7 @@ js_GetScopeChain(JSContext *cx, JSStackFrame *fp)
|
|||
*/
|
||||
JSObject *limitBlock, *limitClone;
|
||||
if (fp->hasFunction() && !fp->hasCallObj()) {
|
||||
JS_ASSERT_IF(fp->getScopeChain()->getClass() == &js_BlockClass,
|
||||
JS_ASSERT_IF(fp->getScopeChain()->isClonedBlock(),
|
||||
fp->getScopeChain()->getPrivate() != js_FloatingFrameIfGenerator(cx, fp));
|
||||
if (!js_GetCallObject(cx, fp))
|
||||
return NULL;
|
||||
|
@ -216,7 +216,7 @@ js_GetScopeChain(JSContext *cx, JSStackFrame *fp)
|
|||
* found it in blockChain.
|
||||
*/
|
||||
JS_ASSERT_IF(limitBlock &&
|
||||
limitBlock->getClass() == &js_BlockClass &&
|
||||
limitBlock->isBlock() &&
|
||||
limitClone->getPrivate() == js_FloatingFrameIfGenerator(cx, fp),
|
||||
sharedBlock);
|
||||
|
||||
|
@ -1215,7 +1215,7 @@ js_UnwindScope(JSContext *cx, jsint stackDepth, JSBool normalUnwind)
|
|||
|
||||
JSStackFrame *fp = cx->fp();
|
||||
for (obj = fp->maybeBlockChain(); obj; obj = obj->getParent()) {
|
||||
JS_ASSERT(obj->getClass() == &js_BlockClass);
|
||||
JS_ASSERT(obj->isStaticBlock());
|
||||
if (OBJ_BLOCK_DEPTH(cx, obj) < stackDepth)
|
||||
break;
|
||||
}
|
||||
|
@ -3693,7 +3693,7 @@ BEGIN_CASE(JSOP_GNAMEDEC)
|
|||
ASSERT_VALID_PROPERTY_CACHE_HIT(0, obj, obj2, entry);
|
||||
if (obj == obj2 && entry->vword.isSlot()) {
|
||||
uint32 slot = entry->vword.toSlot();
|
||||
JS_ASSERT(slot < obj->freeslot);
|
||||
JS_ASSERT(obj->containsSlot(slot));
|
||||
Value &rref = obj->getSlotRef(slot);
|
||||
int32_t tmp;
|
||||
if (JS_LIKELY(rref.isInt32() && CanIncDecWithoutOverflow(tmp = rref.toInt32()))) {
|
||||
|
@ -3933,7 +3933,7 @@ BEGIN_CASE(JSOP_GETXPROP)
|
|||
rval.setObject(entry->vword.toFunObj());
|
||||
} else if (entry->vword.isSlot()) {
|
||||
uint32 slot = entry->vword.toSlot();
|
||||
JS_ASSERT(slot < obj2->freeslot);
|
||||
JS_ASSERT(obj2->containsSlot(slot));
|
||||
rval = obj2->lockedGetSlot(slot);
|
||||
} else {
|
||||
JS_ASSERT(entry->vword.isShape());
|
||||
|
@ -4028,7 +4028,7 @@ BEGIN_CASE(JSOP_CALLPROP)
|
|||
rval.setObject(entry->vword.toFunObj());
|
||||
} else if (entry->vword.isSlot()) {
|
||||
uint32 slot = entry->vword.toSlot();
|
||||
JS_ASSERT(slot < obj2->freeslot);
|
||||
JS_ASSERT(obj2->containsSlot(slot));
|
||||
rval = obj2->lockedGetSlot(slot);
|
||||
} else {
|
||||
JS_ASSERT(entry->vword.isShape());
|
||||
|
@ -4196,7 +4196,7 @@ BEGIN_CASE(JSOP_SETMETHOD)
|
|||
entry->vshape() == rt->protoHazardShape &&
|
||||
shape->hasDefaultSetter()) {
|
||||
slot = shape->slot;
|
||||
JS_ASSERT(slot == obj->freeslot);
|
||||
JS_ASSERT(slot == obj->freeslot());
|
||||
|
||||
/*
|
||||
* Fast path: adding a plain old property that was once at
|
||||
|
@ -4209,8 +4209,6 @@ BEGIN_CASE(JSOP_SETMETHOD)
|
|||
|
||||
if (slot < obj->numSlots()) {
|
||||
JS_ASSERT(obj->getSlot(slot).isUndefined());
|
||||
++obj->freeslot;
|
||||
JS_ASSERT(obj->freeslot != 0);
|
||||
} else {
|
||||
if (!obj->allocSlot(cx, &slot))
|
||||
goto error;
|
||||
|
@ -4683,7 +4681,7 @@ BEGIN_CASE(JSOP_CALLNAME)
|
|||
PUSH_OBJECT(entry->vword.toFunObj());
|
||||
} else if (entry->vword.isSlot()) {
|
||||
uintN slot = entry->vword.toSlot();
|
||||
JS_ASSERT(slot < obj2->freeslot);
|
||||
JS_ASSERT(obj2->containsSlot(slot));
|
||||
PUSH_COPY(obj2->lockedGetSlot(slot));
|
||||
} else {
|
||||
JS_ASSERT(entry->vword.isShape());
|
||||
|
@ -5182,7 +5180,7 @@ BEGIN_CASE(JSOP_CALLGLOBAL)
|
|||
uint32 slot = GET_SLOTNO(regs.pc);
|
||||
slot = script->getGlobalSlot(slot);
|
||||
JSObject *obj = fp->getScopeChain()->getGlobal();
|
||||
JS_ASSERT(slot < obj->freeslot);
|
||||
JS_ASSERT(obj->containsSlot(slot));
|
||||
PUSH_COPY(obj->getSlot(slot));
|
||||
if (op == JSOP_CALLGLOBAL)
|
||||
PUSH_NULL();
|
||||
|
@ -5198,7 +5196,7 @@ BEGIN_CASE(JSOP_FORGLOBAL)
|
|||
uint32 slot = GET_SLOTNO(regs.pc);
|
||||
slot = script->getGlobalSlot(slot);
|
||||
JSObject *obj = fp->getScopeChain()->getGlobal();
|
||||
JS_ASSERT(slot < obj->freeslot);
|
||||
JS_ASSERT(obj->containsSlot(slot));
|
||||
JS_LOCK_OBJ(cx, obj);
|
||||
{
|
||||
if (!obj->methodWriteBarrier(cx, slot, rval)) {
|
||||
|
@ -5217,7 +5215,7 @@ BEGIN_CASE(JSOP_SETGLOBAL)
|
|||
uint32 slot = GET_SLOTNO(regs.pc);
|
||||
slot = script->getGlobalSlot(slot);
|
||||
JSObject *obj = fp->getScopeChain()->getGlobal();
|
||||
JS_ASSERT(slot < obj->freeslot);
|
||||
JS_ASSERT(obj->containsSlot(slot));
|
||||
{
|
||||
JS_LOCK_OBJ(cx, obj);
|
||||
if (!obj->methodWriteBarrier(cx, slot, regs.sp[-1])) {
|
||||
|
@ -5549,7 +5547,6 @@ BEGIN_CASE(JSOP_LAMBDA)
|
|||
JS_ASSERT(lref.isObject());
|
||||
JSObject *obj2 = &lref.toObject();
|
||||
JS_ASSERT(obj2->getClass() == &js_ObjectClass);
|
||||
JS_ASSERT(obj2->freeslot >= JSSLOT_FREE(&js_ObjectClass));
|
||||
#endif
|
||||
|
||||
fun->setMethodAtom(script->getAtom(GET_FULL_INDEX(JSOP_LAMBDA_LENGTH)));
|
||||
|
@ -5864,12 +5861,10 @@ BEGIN_CASE(JSOP_INITMETHOD)
|
|||
/* Fast path. Property cache hit. */
|
||||
uint32 slot = shape->slot;
|
||||
|
||||
JS_ASSERT(slot == obj->freeslot);
|
||||
JS_ASSERT(slot == obj->freeslot());
|
||||
JS_ASSERT(slot >= JSSLOT_FREE(obj->getClass()));
|
||||
if (slot < obj->numSlots()) {
|
||||
JS_ASSERT(obj->getSlot(slot).isUndefined());
|
||||
++obj->freeslot;
|
||||
JS_ASSERT(obj->freeslot != 0);
|
||||
} else {
|
||||
if (!obj->allocSlot(cx, &slot))
|
||||
goto error;
|
||||
|
@ -6483,7 +6478,7 @@ BEGIN_CASE(JSOP_ENTERBLOCK)
|
|||
{
|
||||
JSObject *obj;
|
||||
LOAD_OBJECT(0, obj);
|
||||
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj));
|
||||
JS_ASSERT(obj->isStaticBlock());
|
||||
JS_ASSERT(fp->base() + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp);
|
||||
Value *vp = regs.sp + OBJ_BLOCK_COUNT(cx, obj);
|
||||
JS_ASSERT(regs.sp < vp);
|
||||
|
@ -6508,7 +6503,7 @@ BEGIN_CASE(JSOP_ENTERBLOCK)
|
|||
if (clasp == &js_BlockClass &&
|
||||
obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, fp)) {
|
||||
JSObject *youngestProto = obj2->getProto();
|
||||
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(youngestProto));
|
||||
JS_ASSERT(youngestProto->isStaticBlock());
|
||||
JSObject *parent = obj;
|
||||
while ((parent = parent->getParent()) != youngestProto)
|
||||
JS_ASSERT(parent);
|
||||
|
@ -6523,7 +6518,7 @@ BEGIN_CASE(JSOP_LEAVEBLOCKEXPR)
|
|||
BEGIN_CASE(JSOP_LEAVEBLOCK)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
JS_ASSERT(fp->getBlockChain()->getClass() == &js_BlockClass);
|
||||
JS_ASSERT(fp->getBlockChain()->isStaticBlock());
|
||||
uintN blockDepth = OBJ_BLOCK_DEPTH(cx, fp->getBlockChain());
|
||||
|
||||
JS_ASSERT(blockDepth <= StackDepth(script));
|
||||
|
@ -6535,7 +6530,7 @@ BEGIN_CASE(JSOP_LEAVEBLOCK)
|
|||
*/
|
||||
JSObject *obj = fp->getScopeChain();
|
||||
if (obj->getProto() == fp->getBlockChain()) {
|
||||
JS_ASSERT(obj->getClass() == &js_BlockClass);
|
||||
JS_ASSERT(obj->isClonedBlock());
|
||||
if (!js_PutBlockObject(cx, JS_TRUE))
|
||||
goto error;
|
||||
}
|
||||
|
|
|
@ -507,7 +507,7 @@ FinishSharingTitle(JSContext *cx, JSTitle *title)
|
|||
|
||||
JSObject *obj = TITLE_TO_OBJECT(title);
|
||||
if (obj) {
|
||||
uint32 nslots = obj->freeslot;
|
||||
uint32 nslots = obj->freeslot();
|
||||
JS_ASSERT(nslots >= JSSLOT_START(obj->getClass()));
|
||||
for (uint32 i = JSSLOT_START(obj->getClass()); i != nslots; ++i) {
|
||||
Value v = obj->getSlot(i);
|
||||
|
@ -674,7 +674,7 @@ js_GetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot)
|
|||
* and contention-free multi-threaded cases.
|
||||
*/
|
||||
JS_ASSERT(obj->title.ownercx != cx);
|
||||
JS_ASSERT(slot < obj->freeslot);
|
||||
JS_ASSERT(obj->containsSlot(slot));
|
||||
|
||||
/*
|
||||
* Avoid locking if called from the GC. Also avoid locking a sealed
|
||||
|
@ -752,7 +752,7 @@ js_SetSlotThreadSafe(JSContext *cx, JSObject *obj, uint32 slot, jsval v)
|
|||
* and contention-free multi-threaded cases.
|
||||
*/
|
||||
JS_ASSERT(obj->title.ownercx != cx);
|
||||
JS_ASSERT(slot < obj->freeslot);
|
||||
JS_ASSERT(obj->containsSlot(slot));
|
||||
|
||||
/*
|
||||
* Avoid locking if called from the GC. Also avoid locking a sealed
|
||||
|
|
201
js/src/jsobj.cpp
201
js/src/jsobj.cpp
|
@ -2960,8 +2960,7 @@ js_NewBlockObject(JSContext *cx)
|
|||
JSObject *
|
||||
js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp)
|
||||
{
|
||||
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(proto));
|
||||
JS_ASSERT(proto->getClass() == &js_BlockClass);
|
||||
JS_ASSERT(proto->isStaticBlock());
|
||||
|
||||
JSObject *clone = js_NewGCObject(cx);
|
||||
if (!clone)
|
||||
|
@ -2973,11 +2972,11 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp)
|
|||
clone->init(&js_BlockClass, proto, NULL, priv, cx);
|
||||
clone->fslots[JSSLOT_BLOCK_DEPTH] = proto->fslots[JSSLOT_BLOCK_DEPTH];
|
||||
|
||||
clone->setMap(cx->runtime->emptyBlockShape);
|
||||
JS_ASSERT(OBJ_IS_CLONED_BLOCK(clone));
|
||||
|
||||
clone->setMap(proto->map);
|
||||
if (!clone->ensureInstanceReservedSlots(cx, OBJ_BLOCK_COUNT(cx, proto)))
|
||||
return NULL;
|
||||
|
||||
JS_ASSERT(clone->isClonedBlock());
|
||||
return clone;
|
||||
}
|
||||
|
||||
|
@ -2989,17 +2988,8 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
|
|||
|
||||
JSStackFrame *const fp = cx->fp();
|
||||
JSObject *obj = fp->getScopeChain();
|
||||
JS_ASSERT(obj->getClass() == &js_BlockClass);
|
||||
JS_ASSERT(obj->isClonedBlock());
|
||||
JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp()));
|
||||
JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj));
|
||||
|
||||
/*
|
||||
* Block objects should never be exposed to scripts. Therefore the clone
|
||||
* must not have "own" properties, rather it always delegates property
|
||||
* accesses to its compiler-created prototype Block object, which is the
|
||||
* object that has shapes mapping all the let bindings.
|
||||
*/
|
||||
JS_ASSERT(obj->nativeEmpty());
|
||||
|
||||
/* Block objects should have all reserved slots allocated early. */
|
||||
uintN count = OBJ_BLOCK_COUNT(cx, obj);
|
||||
|
@ -3040,8 +3030,7 @@ block_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
|
|||
* with care. So unlike other getters, this one can assert (rather than
|
||||
* check) certain invariants about obj.
|
||||
*/
|
||||
JS_ASSERT(obj->getClass() == &js_BlockClass);
|
||||
JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj));
|
||||
JS_ASSERT(obj->isClonedBlock());
|
||||
uintN index = (uintN) JSID_TO_INT(id);
|
||||
JS_ASSERT(index < OBJ_BLOCK_COUNT(cx, obj));
|
||||
|
||||
|
@ -3054,20 +3043,15 @@ block_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
|
|||
return true;
|
||||
}
|
||||
|
||||
/* Values are in reserved slots immediately following DEPTH. */
|
||||
uint32 slot = JSSLOT_BLOCK_DEPTH + 1 + index;
|
||||
JS_LOCK_OBJ(cx, obj);
|
||||
JS_ASSERT(slot < obj->numSlots());
|
||||
*vp = obj->getSlot(slot);
|
||||
JS_UNLOCK_OBJ(cx, obj);
|
||||
/* Values are in slots immediately following the class-reserved ones. */
|
||||
JS_ASSERT(obj->getSlot(JSSLOT_FREE(&js_BlockClass) + index) == *vp);
|
||||
return true;
|
||||
}
|
||||
|
||||
static JSBool
|
||||
block_setProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
|
||||
{
|
||||
JS_ASSERT(obj->getClass() == &js_BlockClass);
|
||||
JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj));
|
||||
JS_ASSERT(obj->isClonedBlock());
|
||||
uintN index = (uintN) JSID_TO_INT(id);
|
||||
JS_ASSERT(index < OBJ_BLOCK_COUNT(cx, obj));
|
||||
|
||||
|
@ -3080,27 +3064,29 @@ block_setProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
|
|||
return true;
|
||||
}
|
||||
|
||||
/* Values are in reserved slots immediately following DEPTH. */
|
||||
uint32 slot = JSSLOT_BLOCK_DEPTH + 1 + index;
|
||||
JS_LOCK_OBJ(cx, obj);
|
||||
JS_ASSERT(slot < obj->numSlots());
|
||||
obj->setSlot(slot, *vp);
|
||||
JS_UNLOCK_OBJ(cx, obj);
|
||||
/*
|
||||
* The value in *vp will be written back to the slot in obj that was
|
||||
* allocated when this let binding was defined.
|
||||
*/
|
||||
return true;
|
||||
}
|
||||
|
||||
JSBool
|
||||
js_DefineBlockVariable(JSContext *cx, JSObject *obj, jsid id, intN index)
|
||||
const Shape *
|
||||
JSObject::defineBlockVariable(JSContext *cx, jsid id, intN index)
|
||||
{
|
||||
JS_ASSERT(obj->getClass() == &js_BlockClass);
|
||||
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj));
|
||||
JS_ASSERT(isStaticBlock());
|
||||
|
||||
/* Use JSPROP_ENUMERATE to aid the disassembler. */
|
||||
return js_DefineNativeProperty(cx, obj, id, UndefinedValue(),
|
||||
block_getProperty,
|
||||
block_setProperty,
|
||||
JSPROP_ENUMERATE | JSPROP_PERMANENT | JSPROP_SHARED,
|
||||
Shape::HAS_SHORTID, index, NULL);
|
||||
uint32 slot = JSSLOT_FREE(&js_BlockClass) + index;
|
||||
const Shape *shape = addProperty(cx, id,
|
||||
block_getProperty, block_setProperty,
|
||||
slot, JSPROP_ENUMERATE | JSPROP_PERMANENT,
|
||||
Shape::HAS_SHORTID, index);
|
||||
if (!shape)
|
||||
return NULL;
|
||||
if (slot >= numSlots() && !growSlots(cx, slot + 1))
|
||||
return NULL;
|
||||
return shape;
|
||||
}
|
||||
|
||||
static size_t
|
||||
|
@ -3157,13 +3143,9 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp)
|
|||
JSContext *cx;
|
||||
uint32 parentId;
|
||||
JSObject *obj, *parent;
|
||||
uint16 depth, count, i;
|
||||
uint32 tmp;
|
||||
uintN depth, count;
|
||||
uint32 depthAndCount;
|
||||
const Shape *shape;
|
||||
jsid propid;
|
||||
JSAtom *atom;
|
||||
int16 shortid;
|
||||
JSBool ok;
|
||||
|
||||
cx = xdr->cx;
|
||||
#ifdef __GNUC__
|
||||
|
@ -3178,7 +3160,7 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp)
|
|||
: FindObjectIndex(xdr->script->objects(), parent);
|
||||
depth = (uint16)OBJ_BLOCK_DEPTH(cx, obj);
|
||||
count = (uint16)OBJ_BLOCK_COUNT(cx, obj);
|
||||
tmp = (uint32)(depth << 16) | count;
|
||||
depthAndCount = (uint32)(depth << 16) | count;
|
||||
}
|
||||
#ifdef __GNUC__ /* suppress bogus gcc warnings */
|
||||
else count = 0;
|
||||
|
@ -3208,46 +3190,51 @@ js_XDRBlockObject(JSXDRState *xdr, JSObject **objp)
|
|||
|
||||
AutoObjectRooter tvr(cx, obj);
|
||||
|
||||
if (!JS_XDRUint32(xdr, &tmp))
|
||||
if (!JS_XDRUint32(xdr, &depthAndCount))
|
||||
return false;
|
||||
|
||||
Vector<const Shape *, 8> shapes(cx);
|
||||
shapes.growByUninitialized(count);
|
||||
|
||||
if (xdr->mode == JSXDR_DECODE) {
|
||||
depth = (uint16)(tmp >> 16);
|
||||
count = (uint16)tmp;
|
||||
depth = (uint16)(depthAndCount >> 16);
|
||||
count = (uint16)depthAndCount;
|
||||
obj->setSlot(JSSLOT_BLOCK_DEPTH, Value(Int32Value(depth)));
|
||||
} else {
|
||||
for (Shape::Range r(obj->lastProperty()); !r.empty(); r.popFront()) {
|
||||
shape = &r.front();
|
||||
shapes[shape->shortid] = shape;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* XDR the block object's properties. We know that there are 'count'
|
||||
* properties to XDR, stored as id/shortid pairs. We do not XDR any
|
||||
* non-native properties, only those that the compiler created.
|
||||
* properties to XDR, stored as id/shortid pairs.
|
||||
*/
|
||||
shape = NULL;
|
||||
ok = JS_TRUE;
|
||||
for (i = 0; i < count; i++) {
|
||||
if (xdr->mode == JSXDR_ENCODE) {
|
||||
/* Find a property to XDR. */
|
||||
do {
|
||||
/* If shape is NULL, this is the first property. */
|
||||
shape = shape ? shape->previous() : obj->lastProperty();
|
||||
} while (!shape->hasShortID());
|
||||
for (uintN i = 0; i < count; i++) {
|
||||
JSAtom *atom;
|
||||
uint16 shortid;
|
||||
|
||||
if (xdr->mode == JSXDR_ENCODE) {
|
||||
shape = shapes[i];
|
||||
JS_ASSERT(shape->getter() == block_getProperty);
|
||||
propid = shape->id;
|
||||
|
||||
jsid propid = shape->id;
|
||||
JS_ASSERT(JSID_IS_ATOM(propid));
|
||||
atom = JSID_TO_ATOM(propid);
|
||||
shortid = shape->shortid;
|
||||
JS_ASSERT(shortid >= 0);
|
||||
|
||||
shortid = uint16(shape->shortid);
|
||||
JS_ASSERT(shortid == i);
|
||||
}
|
||||
|
||||
/* XDR the real id, then the shortid. */
|
||||
if (!js_XDRAtom(xdr, &atom) ||
|
||||
!JS_XDRUint16(xdr, (uint16 *)&shortid)) {
|
||||
!JS_XDRUint16(xdr, &shortid)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (xdr->mode == JSXDR_DECODE) {
|
||||
if (!js_DefineBlockVariable(cx, obj, ATOM_TO_JSID(atom), shortid))
|
||||
if (!obj->defineBlockVariable(cx, ATOM_TO_JSID(atom), shortid))
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
@ -3532,8 +3519,8 @@ JSObject::growSlots(JSContext *cx, size_t nslots)
|
|||
if (nslots <= JS_INITIAL_NSLOTS)
|
||||
return true;
|
||||
|
||||
/* Don't let nslots (or JSObject::freeslot) get close to overflowing. */
|
||||
if (nslots >= JS_NSLOTS_LIMIT) {
|
||||
/* Don't let nslots get close to wrapping around uint32. */
|
||||
if (nslots >= NSLOTS_LIMIT) {
|
||||
JS_ReportOutOfMemory(cx);
|
||||
return false;
|
||||
}
|
||||
|
@ -3611,17 +3598,9 @@ JSObject::ensureInstanceReservedSlots(JSContext *cx, size_t nreserved)
|
|||
{
|
||||
JS_ASSERT_IF(isNative(),
|
||||
isBlock() || isCall() || (isFunction() && getFunctionPrivate()->isBound()));
|
||||
JS_ASSERT_IF(isBlock(), nativeEmpty());
|
||||
|
||||
uintN nslots = JSSLOT_FREE(clasp) + nreserved;
|
||||
if (nslots > numSlots() && !allocSlots(cx, nslots))
|
||||
return false;
|
||||
|
||||
JS_ASSERT(freeslot >= JSSLOT_START(clasp));
|
||||
JS_ASSERT(freeslot <= JSSLOT_FREE(clasp));
|
||||
if (freeslot < nslots)
|
||||
freeslot = nslots;
|
||||
return true;
|
||||
return nslots <= numSlots() || allocSlots(cx, nslots);
|
||||
}
|
||||
|
||||
static JSObject *
|
||||
|
@ -3887,12 +3866,22 @@ js_ConstructObject(JSContext *cx, Class *clasp, JSObject *proto, JSObject *paren
|
|||
bool
|
||||
JSObject::allocSlot(JSContext *cx, uint32 *slotp)
|
||||
{
|
||||
JS_ASSERT(freeslot >= JSSLOT_FREE(clasp));
|
||||
uint32 slot = freeslot();
|
||||
JS_ASSERT(slot >= JSSLOT_FREE(clasp));
|
||||
|
||||
/*
|
||||
* If this object is in dictionary mode and it has a property table, try to
|
||||
* pull a free slot from the property table's slot-number freelist.
|
||||
*/
|
||||
if (inDictionaryMode() && lastProp->table) {
|
||||
uint32 &last = lastProp->table->freeslot;
|
||||
if (last != SHAPE_INVALID_SLOT) {
|
||||
JS_ASSERT(last < freeslot);
|
||||
#ifdef DEBUG
|
||||
JS_ASSERT(last < slot);
|
||||
uint32 next = getSlot(last).toPrivateUint32();
|
||||
JS_ASSERT_IF(next != SHAPE_INVALID_SLOT, next < slot);
|
||||
#endif
|
||||
|
||||
*slotp = last;
|
||||
|
||||
Value &vref = getSlotRef(last);
|
||||
|
@ -3902,29 +3891,36 @@ JSObject::allocSlot(JSContext *cx, uint32 *slotp)
|
|||
}
|
||||
}
|
||||
|
||||
if (freeslot >= numSlots() && !growSlots(cx, freeslot + 1))
|
||||
if (slot >= numSlots() && !growSlots(cx, slot + 1))
|
||||
return false;
|
||||
|
||||
/* JSObject::growSlots or JSObject::freeSlot should set the free slots to void. */
|
||||
JS_ASSERT(getSlot(freeslot).isUndefined());
|
||||
*slotp = freeslot++;
|
||||
JS_ASSERT(freeslot != 0);
|
||||
JS_ASSERT(getSlot(slot).isUndefined());
|
||||
*slotp = slot;
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
JSObject::freeSlot(JSContext *cx, uint32 slot)
|
||||
{
|
||||
JS_ASSERT(freeslot > JSSLOT_FREE(clasp));
|
||||
uint32 limit = freeslot();
|
||||
JS_ASSERT(slot < limit);
|
||||
|
||||
Value &vref = getSlotRef(slot);
|
||||
if (freeslot == slot + 1) {
|
||||
freeslot = slot;
|
||||
} else {
|
||||
if (inDictionaryMode() && lastProp->table) {
|
||||
uint32 &last = lastProp->table->freeslot;
|
||||
if (inDictionaryMode() && lastProp->table) {
|
||||
uint32 &last = lastProp->table->freeslot;
|
||||
|
||||
JS_ASSERT_IF(last != SHAPE_INVALID_SLOT, last < freeslot);
|
||||
/* Can't afford to check the whole freelist, but let's check the head. */
|
||||
JS_ASSERT_IF(last != SHAPE_INVALID_SLOT, last < limit && last != slot);
|
||||
|
||||
/*
|
||||
* Freeing a slot other than the last one mapped by this object's
|
||||
* shape: push the slot onto the dictionary table's freelist. We want
|
||||
* to let the last slot be freed by shrinking the dslots vector; see
|
||||
* js_TraceObject.
|
||||
*/
|
||||
if (slot + 1 < limit) {
|
||||
JS_ASSERT_IF(last != SHAPE_INVALID_SLOT, last < freeslot());
|
||||
vref.setPrivateUint32(last);
|
||||
last = slot;
|
||||
return;
|
||||
|
@ -4104,7 +4100,7 @@ js_DefineProperty(JSContext *cx, JSObject *obj, jsid id, const Value *value,
|
|||
|
||||
/*
|
||||
* Backward compatibility requires allowing addProperty hooks to mutate the
|
||||
* nominal initial value of a slot-full property, while GC safety wants that
|
||||
* nominal initial value of a slotful property, while GC safety wants that
|
||||
* value to be stored before the call-out through the hook. Optimize to do
|
||||
* both while saving cycles for classes that stub their addProperty hook.
|
||||
*/
|
||||
|
@ -4508,11 +4504,12 @@ js_FindPropertyHelper(JSContext *cx, jsid id, JSBool cacheResult,
|
|||
JS_ASSERT(pobj->getClass() == clasp);
|
||||
if (clasp == &js_BlockClass) {
|
||||
/*
|
||||
* A block instance on the scope chain is immutable and
|
||||
* the compile-time prototype provides all its properties.
|
||||
* A block instance on the scope chain is immutable and it
|
||||
* shares its shapes with its compile-time prototype.
|
||||
*/
|
||||
JS_ASSERT(pobj == obj->getProto());
|
||||
JS_ASSERT(protoIndex == 1);
|
||||
JS_ASSERT(pobj == obj);
|
||||
JS_ASSERT(pobj->isClonedBlock());
|
||||
JS_ASSERT(protoIndex == 0);
|
||||
} else {
|
||||
/* Call and DeclEnvClass objects have no prototypes. */
|
||||
JS_ASSERT(!obj->getProto());
|
||||
|
@ -4957,7 +4954,7 @@ js_SetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN defineHow,
|
|||
prop = NULL;
|
||||
} else {
|
||||
/* We should never add properties to lexical blocks. */
|
||||
JS_ASSERT(obj->getClass() != &js_BlockClass);
|
||||
JS_ASSERT(!obj->isBlock());
|
||||
|
||||
if (!obj->getParent() &&
|
||||
(defineHow & JSDNP_UNQUALIFIED) &&
|
||||
|
@ -5924,7 +5921,7 @@ js_TraceObject(JSTracer *trc, JSObject *obj)
|
|||
* The !obj->nativeEmpty() guard above is due to the bug described by
|
||||
* the FIXME comment below.
|
||||
*/
|
||||
size_t slots = obj->freeslot;
|
||||
size_t slots = obj->freeslot();
|
||||
if (obj->numSlots() != slots)
|
||||
obj->shrinkSlots(cx, slots);
|
||||
}
|
||||
|
@ -5962,8 +5959,8 @@ js_TraceObject(JSTracer *trc, JSObject *obj)
|
|||
* the general problem occurs in other built-in class implementations).
|
||||
*/
|
||||
uint32 nslots = obj->numSlots();
|
||||
if (!obj->nativeEmpty() && obj->freeslot < nslots)
|
||||
nslots = obj->freeslot;
|
||||
if (!obj->nativeEmpty() && obj->freeslot() < nslots)
|
||||
nslots = obj->freeslot();
|
||||
JS_ASSERT(nslots >= JSSLOT_START(clasp));
|
||||
|
||||
for (uint32 i = JSSLOT_START(clasp); i != nslots; ++i) {
|
||||
|
@ -5990,7 +5987,6 @@ js_ClearNative(JSContext *cx, JSObject *obj)
|
|||
uint32 n = obj->numSlots();
|
||||
for (uint32 i = freeslot; i < n; ++i)
|
||||
obj->setSlot(i, UndefinedValue());
|
||||
obj->freeslot = freeslot;
|
||||
}
|
||||
JS_UNLOCK_OBJ(cx, obj);
|
||||
}
|
||||
|
@ -6032,9 +6028,6 @@ js_SetReservedSlot(JSContext *cx, JSObject *obj, uint32 index, const Value &v)
|
|||
}
|
||||
}
|
||||
|
||||
if (slot >= obj->freeslot)
|
||||
obj->freeslot = slot + 1;
|
||||
|
||||
obj->setSlot(slot, v);
|
||||
GC_POKE(cx, JS_NULL);
|
||||
JS_UNLOCK_OBJ(cx, obj);
|
||||
|
@ -6339,7 +6332,7 @@ js_DumpObject(JSObject *obj)
|
|||
|
||||
fprintf(stderr, "slots:\n");
|
||||
reservedEnd = i + JSCLASS_RESERVED_SLOTS(clasp);
|
||||
slots = obj->freeslot;
|
||||
slots = obj->freeslot();
|
||||
for (; i < slots; i++) {
|
||||
fprintf(stderr, " %3d ", i);
|
||||
if (i < reservedEnd)
|
||||
|
|
|
@ -181,8 +181,10 @@ struct JSObjectMap {
|
|||
static JS_FRIEND_DATA(const JSObjectMap) sharedNonNative;
|
||||
|
||||
uint32 shape; /* shape identifier */
|
||||
uint32 freeslot; /* first free object slot */
|
||||
|
||||
explicit JSObjectMap(uint32 shape) : shape(shape) {}
|
||||
explicit JSObjectMap(uint32 shape) : shape(shape), freeslot(0) {}
|
||||
JSObjectMap(uint32 shape, uint32 freeslot) : shape(shape), freeslot(freeslot) {}
|
||||
|
||||
enum { INVALID_SHAPE = 0x8fffffff, SHAPELESS = 0xffffffff };
|
||||
|
||||
|
@ -325,13 +327,16 @@ struct JSObject {
|
|||
OWN_SHAPE = 0x80
|
||||
};
|
||||
|
||||
/*
|
||||
* Impose a sane upper bound, originally checked only for dense arrays, on
|
||||
* number of slots in an object.
|
||||
*/
|
||||
enum {
|
||||
JS_NSLOTS_BITS = 24,
|
||||
JS_NSLOTS_LIMIT = JS_BIT(JS_NSLOTS_BITS)
|
||||
NSLOTS_BITS = 29,
|
||||
NSLOTS_LIMIT = JS_BIT(NSLOTS_BITS)
|
||||
};
|
||||
|
||||
uint32 flags: 32-JS_NSLOTS_BITS, /* flags */
|
||||
freeslot: JS_NSLOTS_BITS; /* next free slot in abstract slot space */
|
||||
uint32 flags; /* flags */
|
||||
uint32 objShape; /* copy of lastProp->shape, or override if different */
|
||||
|
||||
JSObject *proto; /* object's prototype */
|
||||
|
@ -371,9 +376,6 @@ struct JSObject {
|
|||
|
||||
inline void trace(JSTracer *trc);
|
||||
|
||||
static size_t flagsOffset();
|
||||
uint32 flagsAndFreeslot();
|
||||
|
||||
uint32 shape() const {
|
||||
JS_ASSERT(objShape != JSObjectMap::INVALID_SHAPE);
|
||||
return objShape;
|
||||
|
@ -571,7 +573,9 @@ struct JSObject {
|
|||
|
||||
inline bool ensureClassReservedSlots(JSContext *cx);
|
||||
|
||||
bool containsSlot(uint32 slot) const { return slot < freeslot; }
|
||||
uint32 freeslot() const { return map->freeslot; }
|
||||
|
||||
bool containsSlot(uint32 slot) const { return slot < freeslot(); }
|
||||
|
||||
js::Value& getSlotRef(uintN slot) {
|
||||
return (slot < JS_INITIAL_NSLOTS)
|
||||
|
@ -1061,6 +1065,8 @@ struct JSObject {
|
|||
|
||||
inline JSObject *getThrowTypeError() const;
|
||||
|
||||
const js::Shape *defineBlockVariable(JSContext *cx, jsid id, intN index);
|
||||
|
||||
void swap(JSObject *obj);
|
||||
|
||||
inline bool canHaveMethodBarrier() const;
|
||||
|
@ -1080,6 +1086,8 @@ struct JSObject {
|
|||
inline bool isObject() const;
|
||||
inline bool isWith() const;
|
||||
inline bool isBlock() const;
|
||||
inline bool isStaticBlock() const;
|
||||
inline bool isClonedBlock() const;
|
||||
inline bool isCall() const;
|
||||
inline bool isRegExp() const;
|
||||
inline bool isXML() const;
|
||||
|
@ -1115,7 +1123,7 @@ JS_STATIC_ASSERT(sizeof(JSObject) % JS_GCTHING_ALIGN == 0);
|
|||
#define MAX_DSLOTS_LENGTH (~size_t(0) / sizeof(js::Value) - 1)
|
||||
#define MAX_DSLOTS_LENGTH32 (~uint32(0) / sizeof(js::Value) - 1)
|
||||
|
||||
#define OBJ_CHECK_SLOT(obj,slot) (JS_ASSERT(slot < (obj)->freeslot))
|
||||
#define OBJ_CHECK_SLOT(obj,slot) JS_ASSERT((obj)->containsSlot(slot))
|
||||
|
||||
#ifdef JS_THREADSAFE
|
||||
|
||||
|
@ -1187,23 +1195,26 @@ inline bool JSObject::isBlock() const { return getClass() == &js_BlockClass; }
|
|||
*/
|
||||
static const uint32 JSSLOT_BLOCK_DEPTH = JSSLOT_PRIVATE + 1;
|
||||
|
||||
static inline bool
|
||||
OBJ_IS_CLONED_BLOCK(JSObject *obj)
|
||||
inline bool
|
||||
JSObject::isStaticBlock() const
|
||||
{
|
||||
return obj->getProto() != NULL;
|
||||
return isBlock() && !getProto();
|
||||
}
|
||||
|
||||
inline bool
|
||||
JSObject::isClonedBlock() const
|
||||
{
|
||||
return isBlock() && !!getProto();
|
||||
}
|
||||
|
||||
static const uint32 JSSLOT_WITH_THIS = JSSLOT_PRIVATE + 2;
|
||||
|
||||
extern JSBool
|
||||
js_DefineBlockVariable(JSContext *cx, JSObject *obj, jsid id, intN index);
|
||||
|
||||
#define OBJ_BLOCK_COUNT(cx,obj) \
|
||||
((OBJ_IS_CLONED_BLOCK(obj) ? obj->getProto() : obj)->propertyCount())
|
||||
(obj)->propertyCount()
|
||||
#define OBJ_BLOCK_DEPTH(cx,obj) \
|
||||
obj->getSlot(JSSLOT_BLOCK_DEPTH).toInt32()
|
||||
(obj)->getSlot(JSSLOT_BLOCK_DEPTH).toInt32()
|
||||
#define OBJ_SET_BLOCK_DEPTH(cx,obj,depth) \
|
||||
obj->setSlot(JSSLOT_BLOCK_DEPTH, Value(Int32Value(depth)))
|
||||
(obj)->setSlot(JSSLOT_BLOCK_DEPTH, Value(Int32Value(depth)))
|
||||
|
||||
/*
|
||||
* To make sure this slot is well-defined, always call js_NewWithObject to
|
||||
|
|
|
@ -576,7 +576,6 @@ JSObject::initCommon(js::Class *aclasp, JSObject *proto, JSObject *parent,
|
|||
|
||||
clasp = aclasp;
|
||||
flags = 0;
|
||||
freeslot = JSSLOT_START(aclasp);
|
||||
|
||||
#ifdef DEBUG
|
||||
/*
|
||||
|
@ -706,40 +705,6 @@ js_IsCallable(const js::Value &v)
|
|||
return v.isObject() && v.toObject().isCallable();
|
||||
}
|
||||
|
||||
inline size_t
|
||||
JSObject::flagsOffset()
|
||||
{
|
||||
static size_t offset = 0;
|
||||
if (offset)
|
||||
return offset;
|
||||
|
||||
/*
|
||||
* We can't address a bitfield, so instead we create a struct, set only
|
||||
* the field we care about, then search for it.
|
||||
*/
|
||||
JSObject fakeObj;
|
||||
memset(&fakeObj, 0, sizeof(fakeObj));
|
||||
fakeObj.flags = 1;
|
||||
for (unsigned testOffset = 0; testOffset < sizeof(fakeObj); testOffset += sizeof(uint32)) {
|
||||
uint32 *ptr = reinterpret_cast<uint32 *>(reinterpret_cast<char *>(&fakeObj) + testOffset);
|
||||
if (*ptr) {
|
||||
JS_ASSERT(*ptr == 1);
|
||||
offset = testOffset;
|
||||
return offset;
|
||||
}
|
||||
}
|
||||
JS_NOT_REACHED("memory weirdness");
|
||||
return 0;
|
||||
}
|
||||
|
||||
inline uint32
|
||||
JSObject::flagsAndFreeslot()
|
||||
{
|
||||
size_t offset = flagsOffset();
|
||||
char *ptr = offset + (char*) this;
|
||||
return *(uint32*)ptr;
|
||||
}
|
||||
|
||||
namespace js {
|
||||
|
||||
class AutoPropDescArrayRooter : private AutoGCRooter
|
||||
|
@ -819,7 +784,6 @@ InitScopeForObject(JSContext* cx, JSObject* obj, js::Class *clasp, JSObject* pro
|
|||
goto bad;
|
||||
if (freeslot > JS_INITIAL_NSLOTS && !obj->allocSlots(cx, freeslot))
|
||||
goto bad;
|
||||
obj->freeslot = freeslot;
|
||||
}
|
||||
|
||||
obj->setMap(empty);
|
||||
|
|
|
@ -777,7 +777,7 @@ Compiler::compileScript(JSContext *cx, JSObject *scopeChain, JSStackFrame *calle
|
|||
if (!js_GetClassPrototype(cx, scopeChain, JSProto_Function, &tobj))
|
||||
return NULL;
|
||||
|
||||
globalScope.globalFreeSlot = globalObj->freeslot;
|
||||
globalScope.globalFreeSlot = globalObj->freeslot();
|
||||
}
|
||||
|
||||
/* Null script early in case of error, to reduce our code footprint. */
|
||||
|
@ -947,7 +947,7 @@ Compiler::compileScript(JSContext *cx, JSObject *scopeChain, JSStackFrame *calle
|
|||
}
|
||||
|
||||
if (globalScope.defs.length()) {
|
||||
JS_ASSERT(globalObj->freeslot == globalScope.globalFreeSlot);
|
||||
JS_ASSERT(globalObj->freeslot() == globalScope.globalFreeSlot);
|
||||
JS_ASSERT(!cg.compilingForEval());
|
||||
for (size_t i = 0; i < globalScope.defs.length(); i++) {
|
||||
GlobalScope::GlobalDef &def = globalScope.defs[i];
|
||||
|
@ -3326,23 +3326,21 @@ BindLet(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
|
|||
pn->pn_dflags |= PND_LET | PND_BOUND;
|
||||
|
||||
/*
|
||||
* Define the let binding's property before storing pn in reserved slot at
|
||||
* reserved slot index (NB: not slot number) n.
|
||||
* Define the let binding's property before storing pn in the the binding's
|
||||
* slot indexed by n off the class-reserved slot base.
|
||||
*/
|
||||
if (!js_DefineBlockVariable(cx, blockObj, ATOM_TO_JSID(atom), n))
|
||||
const Shape *shape = blockObj->defineBlockVariable(cx, ATOM_TO_JSID(atom), n);
|
||||
if (!shape)
|
||||
return false;
|
||||
|
||||
/*
|
||||
* Store pn temporarily in what would be reserved slots in a cloned block
|
||||
* object (once the prototype's final population is known, after all 'let'
|
||||
* bindings for this block have been parsed). We will free these reserved
|
||||
* slots in jsemit.cpp:EmitEnterBlock.
|
||||
* Store pn temporarily in what would be shape-mapped slots in a cloned
|
||||
* block object (once the prototype's final population is known, after all
|
||||
* 'let' bindings for this block have been parsed). We free these slots in
|
||||
* jsemit.cpp:EmitEnterBlock so they don't tie up unused space in the so-
|
||||
* called "static" prototype Block.
|
||||
*/
|
||||
uintN slot = JSSLOT_FREE(&js_BlockClass) + n;
|
||||
if (slot >= blockObj->numSlots() && !blockObj->growSlots(cx, slot + 1))
|
||||
return false;
|
||||
blockObj->freeslot = slot + 1;
|
||||
blockObj->setSlot(slot, PrivateValue(pn));
|
||||
blockObj->setSlot(shape->slot, PrivateValue(pn));
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -3353,7 +3351,7 @@ PopStatement(JSTreeContext *tc)
|
|||
|
||||
if (stmt->flags & SIF_SCOPE) {
|
||||
JSObject *obj = stmt->blockObj;
|
||||
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj));
|
||||
JS_ASSERT(!obj->isClonedBlock());
|
||||
|
||||
for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
|
||||
JSAtom *atom = JSID_TO_ATOM(r.front().id);
|
||||
|
@ -4136,12 +4134,9 @@ CheckDestructuring(JSContext *cx, BindData *data,
|
|||
data->binder == BindLet &&
|
||||
OBJ_BLOCK_COUNT(cx, tc->blockChain) == 0) {
|
||||
ok = !!js_DefineNativeProperty(cx, tc->blockChain,
|
||||
ATOM_TO_JSID(cx->runtime->
|
||||
atomState.emptyAtom),
|
||||
ATOM_TO_JSID(cx->runtime->atomState.emptyAtom),
|
||||
UndefinedValue(), NULL, NULL,
|
||||
JSPROP_ENUMERATE |
|
||||
JSPROP_PERMANENT |
|
||||
JSPROP_SHARED,
|
||||
JSPROP_ENUMERATE | JSPROP_PERMANENT,
|
||||
Shape::HAS_SHORTID, 0, NULL);
|
||||
if (!ok)
|
||||
goto out;
|
||||
|
|
|
@ -133,7 +133,7 @@ JS_ALWAYS_INLINE bool
|
|||
PropertyCache::testForInit(JSRuntime *rt, jsbytecode *pc, JSObject *obj,
|
||||
const js::Shape **shapep, PropertyCacheEntry **entryp)
|
||||
{
|
||||
JS_ASSERT(obj->freeslot >= JSSLOT_FREE(obj->getClass()));
|
||||
JS_ASSERT(obj->freeslot() >= JSSLOT_FREE(obj->getClass()));
|
||||
JS_ASSERT(!obj->sealed());
|
||||
uint32 kshape = obj->shape();
|
||||
PropertyCacheEntry *entry = &table[hash(pc, kshape)];
|
||||
|
|
|
@ -151,7 +151,7 @@ PropertyTree::insertChild(JSContext *cx, Shape *parent, Shape *child)
|
|||
JS_ASSERT(!JSID_IS_VOID(parent->id));
|
||||
JS_ASSERT(!JSID_IS_VOID(child->id));
|
||||
|
||||
child->parent = parent;
|
||||
child->setParent(parent);
|
||||
|
||||
KidsPointer *kidp = &parent->kids;
|
||||
if (kidp->isNull()) {
|
||||
|
@ -427,8 +427,7 @@ PropertyTree::getChild(JSContext *cx, Shape *parent, const Shape &child)
|
|||
return NULL;
|
||||
|
||||
new (shape) Shape(child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs,
|
||||
child.flags, child.shortid);
|
||||
shape->shape = js_GenerateShape(cx, true);
|
||||
child.flags, child.shortid, js_GenerateShape(cx, true));
|
||||
|
||||
if (!insertChild(cx, parent, shape))
|
||||
return NULL;
|
||||
|
|
|
@ -102,28 +102,26 @@ JSObject::ensureClassReservedSlotsForEmptyObject(JSContext *cx)
|
|||
|
||||
/*
|
||||
* Subtle rule: objects that call JSObject::ensureInstanceReservedSlots
|
||||
* either must:
|
||||
* must either:
|
||||
*
|
||||
* (a) never escape anywhere an ad-hoc property could be set on them;
|
||||
* (a) never escape anywhere an ad-hoc property could be set on them; or
|
||||
*
|
||||
* (b) have at least JSSLOT_FREE(this->clasp) >= JS_INITIAL_NSLOTS.
|
||||
*
|
||||
* Note that (b) depends on fine-tuning of JS_INITIAL_NSLOTS (3).
|
||||
* (b) protect their instance-reserved slots with shapes, at least a custom
|
||||
* empty shape with the right freeslot member.
|
||||
*
|
||||
* Block objects are the only objects that fall into category (a). While
|
||||
* Call objects cannot escape, they can grow ad-hoc properties via eval
|
||||
* of a var declaration, but they have slots mapped by compiler-created
|
||||
* shapes, and thus no problem predicting first ad-hoc property slot.
|
||||
* of a var declaration, or due to a function statement being evaluated,
|
||||
* but they have slots mapped by compiler-created shapes, and thus (b) no
|
||||
* problem predicting first ad-hoc property slot. Bound Function objects
|
||||
* have a custom empty shape.
|
||||
*
|
||||
* (Note that Block and Call objects are the only native classes that are
|
||||
* allowed to call ensureInstanceReservedSlots.)
|
||||
* (Note that Block, Call, and bound Function objects are the only native
|
||||
* class objects that are allowed to call ensureInstanceReservedSlots.)
|
||||
*/
|
||||
uint32 nfixed = JSSLOT_FREE(getClass());
|
||||
if (nfixed > freeslot) {
|
||||
if (nfixed > numSlots() && !allocSlots(cx, nfixed))
|
||||
return false;
|
||||
freeslot = nfixed;
|
||||
}
|
||||
if (nfixed > numSlots() && !allocSlots(cx, nfixed))
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
@ -469,16 +467,18 @@ JSObject::getChildProperty(JSContext *cx, Shape *parent, Shape &child)
|
|||
child.slot = SHAPE_INVALID_SLOT;
|
||||
} else {
|
||||
/*
|
||||
* We may have set slot from a nearly-matching shape, above.
|
||||
* If so, we're overwriting that nearly-matching shape, so we
|
||||
* can reuse its slot -- we don't need to allocate a new one.
|
||||
* Similarly, we use a specific slot if provided by the caller.
|
||||
* We may have set slot from a nearly-matching shape, above. If so,
|
||||
* we're overwriting that nearly-matching shape, so we can reuse
|
||||
* its slot -- we don't need to allocate a new one. Similarly, we
|
||||
* use a specific slot if provided by the caller.
|
||||
*/
|
||||
if (child.slot == SHAPE_INVALID_SLOT && !allocSlot(cx, &child.slot))
|
||||
return NULL;
|
||||
}
|
||||
}
|
||||
|
||||
Shape *shape;
|
||||
|
||||
if (inDictionaryMode()) {
|
||||
JS_ASSERT(parent == lastProp);
|
||||
if (parent->frozen()) {
|
||||
|
@ -487,22 +487,20 @@ JSObject::getChildProperty(JSContext *cx, Shape *parent, Shape &child)
|
|||
return NULL;
|
||||
JS_ASSERT(!parent->frozen());
|
||||
}
|
||||
if (Shape::newDictionaryShape(cx, child, &lastProp)) {
|
||||
updateFlags(lastProp);
|
||||
updateShape(cx);
|
||||
return lastProp;
|
||||
shape = Shape::newDictionaryShape(cx, child, &lastProp);
|
||||
if (!shape)
|
||||
return NULL;
|
||||
} else {
|
||||
shape = JS_PROPERTY_TREE(cx).getChild(cx, parent, child);
|
||||
if (shape) {
|
||||
JS_ASSERT(shape->parent == parent);
|
||||
JS_ASSERT_IF(parent != lastProp, parent == lastProp->parent);
|
||||
setLastProperty(shape);
|
||||
}
|
||||
return NULL;
|
||||
}
|
||||
|
||||
Shape *shape = JS_PROPERTY_TREE(cx).getChild(cx, parent, child);
|
||||
if (shape) {
|
||||
JS_ASSERT(shape->parent == parent);
|
||||
JS_ASSERT_IF(parent != lastProp, parent == lastProp->parent);
|
||||
setLastProperty(shape);
|
||||
updateFlags(shape);
|
||||
updateShape(cx);
|
||||
}
|
||||
updateFlags(shape);
|
||||
updateShape(cx);
|
||||
return shape;
|
||||
}
|
||||
|
||||
|
@ -529,9 +527,8 @@ Shape::newDictionaryShape(JSContext *cx, const Shape &child, Shape **listp)
|
|||
return NULL;
|
||||
|
||||
new (dprop) Shape(child.id, child.rawGetter, child.rawSetter, child.slot, child.attrs,
|
||||
(child.flags & ~FROZEN) | IN_DICTIONARY,
|
||||
child.shortid);
|
||||
dprop->shape = js_GenerateShape(cx, false);
|
||||
(child.flags & ~FROZEN) | IN_DICTIONARY, child.shortid,
|
||||
js_GenerateShape(cx, false), child.freeslot);
|
||||
|
||||
dprop->listp = NULL;
|
||||
dprop->insertIntoDictionary(listp);
|
||||
|
@ -794,6 +791,16 @@ JSObject::putProperty(JSContext *cx, jsid id,
|
|||
shape->removeFromDictionary(this);
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
if (shape == oldLastProp) {
|
||||
JS_ASSERT(lastProp->freeslot <= shape->freeslot);
|
||||
if (shape->hasSlot())
|
||||
JS_ASSERT(shape->slot < shape->freeslot);
|
||||
if (lastProp->freeslot < numSlots())
|
||||
getSlotRef(lastProp->freeslot).setUndefined();
|
||||
}
|
||||
#endif
|
||||
|
||||
/*
|
||||
* If we fail later on trying to find or create a new shape, we will
|
||||
* restore *spp from |overwriting|. Note that we don't bother to keep
|
||||
|
@ -848,7 +855,7 @@ JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN m
|
|||
|
||||
attrs |= shape->attrs & mask;
|
||||
|
||||
/* Allow only shared (slot-less) => unshared (slot-full) transition. */
|
||||
/* Allow only shared (slotless) => unshared (slotful) transition. */
|
||||
JS_ASSERT(!((attrs ^ shape->attrs) & JSPROP_SHARED) ||
|
||||
!(attrs & JSPROP_SHARED));
|
||||
|
||||
|
@ -863,6 +870,7 @@ JSObject::changeProperty(JSContext *cx, const Shape *shape, uintN attrs, uintN m
|
|||
return shape;
|
||||
|
||||
Shape child(shape->id, getter, setter, shape->slot, attrs, shape->flags, shape->shortid);
|
||||
|
||||
if (inDictionaryMode()) {
|
||||
shape->removeFromDictionary(this);
|
||||
newShape = Shape::newDictionaryShape(cx, child, &lastProp);
|
||||
|
@ -940,7 +948,8 @@ JSObject::removeProperty(JSContext *cx, jsid id)
|
|||
}
|
||||
|
||||
/* First, if shape is unshared and not cleared, free its slot number. */
|
||||
if (containsSlot(shape->slot)) {
|
||||
bool hadSlot = !shape->isAlias() && containsSlot(shape->slot);
|
||||
if (hadSlot) {
|
||||
freeSlot(cx, shape->slot);
|
||||
JS_ATOMIC_INCREMENT(&cx->runtime->propertyRemovals);
|
||||
}
|
||||
|
@ -983,9 +992,29 @@ JSObject::removeProperty(JSContext *cx, jsid id)
|
|||
*/
|
||||
if (shape != lastProp)
|
||||
setOwnShape(lastProp->shape);
|
||||
shape->setTable(NULL);
|
||||
|
||||
Shape *oldLastProp = lastProp;
|
||||
shape->removeFromDictionary(this);
|
||||
lastProp->setTable(table);
|
||||
if (table) {
|
||||
if (shape == oldLastProp) {
|
||||
JS_ASSERT(shape->table == table);
|
||||
JS_ASSERT(shape->parent == lastProp);
|
||||
JS_ASSERT(shape->freeslot >= lastProp->freeslot);
|
||||
JS_ASSERT_IF(hadSlot, shape->slot + 1 <= shape->freeslot);
|
||||
|
||||
/*
|
||||
* If the dictionary table's freelist is non-empty, we must
|
||||
* preserve lastProp->freeslot. We can't reduce freeslot even
|
||||
* by one or we might lose non-decreasing freeslot order.
|
||||
*/
|
||||
if (table->freeslot != SHAPE_INVALID_SLOT)
|
||||
lastProp->freeslot = shape->freeslot;
|
||||
}
|
||||
|
||||
/* Hand off table from old to new lastProp. */
|
||||
oldLastProp->setTable(NULL);
|
||||
lastProp->setTable(table);
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
* Non-dictionary-mode property tables are shared immutables, so all we
|
||||
|
|
|
@ -208,6 +208,8 @@
|
|||
|
||||
#define SHAPE_INVALID_SLOT 0xffffffff
|
||||
|
||||
JS_STATIC_ASSERT(uint32(SHAPE_INVALID_SLOT + 1) == uint32(0));
|
||||
|
||||
namespace js {
|
||||
|
||||
/*
|
||||
|
@ -358,7 +360,58 @@ struct Shape : public JSObjectMap
|
|||
|
||||
bool maybeHash(JSContext *cx);
|
||||
|
||||
void setTable(js::PropertyTable *t) const { table = t; }
|
||||
void setTable(js::PropertyTable *t) const {
|
||||
JS_ASSERT_IF(t && t->freeslot != SHAPE_INVALID_SLOT, t->freeslot < freeslot);
|
||||
table = t;
|
||||
}
|
||||
|
||||
/*
|
||||
* Setter for parent. The challenge is to maintain JSObjectMap::freeslot in
|
||||
* the face of arbitrary slot order.
|
||||
*
|
||||
* By induction, an empty shape has a freeslot member correctly computed as
|
||||
* JSCLASS_FREE(clasp) -- see EmptyShape's constructor in jsscopeinlines.h.
|
||||
* This is the basis case, where p is null.
|
||||
*
|
||||
* Any child shape, whether in a shape tree or in a dictionary list, must
|
||||
* have a freeslot either one greater than its slot value (if the child's
|
||||
* slot is SHAPE_INVALID_SLOT, this will yield 0; the static assertion just
|
||||
* after the SHAPE_INVALID_SLOT definition enforces this), or equal to its
|
||||
* parent p's freeslot, whichever is greater. This is the inductive step.
|
||||
*
|
||||
* If we maintained shape paths such that parent slot was always one less
|
||||
* than child slot, possibly with an exception for SHAPE_INVALID_SLOT slot
|
||||
* values where we would use another way of computing freeslot based on the
|
||||
* PropertyTable (as JSC does), then we would not need to store freeslot in
|
||||
* Shape (to be precise, in its base struct, JSobjectMap).
|
||||
*
|
||||
* But we currently scramble slots along shape paths due to resolve-based
|
||||
* creation of shapes mapping reserved slots, and we do not have the needed
|
||||
* PropertyTable machinery to use as an alternative when parent slot is not
|
||||
* one less than child slot. This machinery is neither simple nor free, as
|
||||
* it must involve creating a table for any slot-less transition and then
|
||||
* pinning the table to its shape.
|
||||
*
|
||||
* Use of 'delete' can scramble slots along the shape lineage too, although
|
||||
* it always switches the target object to dictionary mode, so the cost of
|
||||
* a pinned table is less onerous.
|
||||
*
|
||||
* Note that allocating a uint32 freeslot member in JSObjectMap takes no
|
||||
* net extra space on 64-bit targets (it packs with shape). And on 32-bit
|
||||
* targets, adding freeslot to JSObjectMap takes no gross extra space,
|
||||
* because Shape rounds up to an even number of 32-bit words (required for
|
||||
* GC-thing and js::Value allocation in any event) on 32-bit targets.
|
||||
*
|
||||
* So in terms of space, we can afford to maintain both freeslot and slot,
|
||||
* but it might be better if we eliminated freeslot using slot combined
|
||||
* with an auxiliary mechanism based on table.
|
||||
*/
|
||||
void setParent(js::Shape *p) {
|
||||
if (p)
|
||||
freeslot = JS_MAX(p->freeslot, slot + 1);
|
||||
JS_ASSERT(freeslot < JSObject::NSLOTS_LIMIT);
|
||||
parent = p;
|
||||
}
|
||||
|
||||
void insertFree(js::Shape **freep) {
|
||||
id = JSID_VOID;
|
||||
|
@ -436,8 +489,8 @@ struct Shape : public JSObjectMap
|
|||
FROZEN = 0x10
|
||||
};
|
||||
|
||||
Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot,
|
||||
uintN attrs, uintN flags, intN shortid);
|
||||
Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot, uintN attrs,
|
||||
uintN flags, intN shortid, uint32 shape = INVALID_SHAPE, uint32 freeslot = 0);
|
||||
|
||||
/* Used by EmptyShape (see jsscopeinlines.h). */
|
||||
Shape(JSContext *cx, Class *aclasp);
|
||||
|
@ -706,7 +759,7 @@ Shape::insertIntoDictionary(js::Shape **dictp)
|
|||
JS_ASSERT_IF(*dictp, (*dictp)->listp == dictp);
|
||||
JS_ASSERT_IF(*dictp, !JSID_IS_VOID((*dictp)->id));
|
||||
|
||||
parent = *dictp;
|
||||
setParent(*dictp);
|
||||
if (parent)
|
||||
parent->listp = &parent;
|
||||
listp = dictp;
|
||||
|
|
|
@ -144,12 +144,13 @@ JSObject::trace(JSTracer *trc)
|
|||
namespace js {
|
||||
|
||||
inline
|
||||
Shape::Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter,
|
||||
uint32 slot, uintN attrs, uintN flags, intN shortid)
|
||||
: JSObjectMap(0), table(NULL),
|
||||
id(id), rawGetter(getter), rawSetter(setter), slot(slot), attrs(uint8(attrs)),
|
||||
Shape::Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter, uint32 slot, uintN attrs,
|
||||
uintN flags, intN shortid, uint32 shape, uint32 freeslot)
|
||||
: JSObjectMap(shape, freeslot),
|
||||
table(NULL), id(id), rawGetter(getter), rawSetter(setter), slot(slot), attrs(uint8(attrs)),
|
||||
flags(uint8(flags)), shortid(int16(shortid)), parent(NULL)
|
||||
{
|
||||
JS_ASSERT_IF(freeslot != SHAPE_INVALID_SLOT, freeslot < JSObject::NSLOTS_LIMIT);
|
||||
JS_ASSERT_IF(getter && (attrs & JSPROP_GETTER), getterObj->isCallable());
|
||||
JS_ASSERT_IF(setter && (attrs & JSPROP_SETTER), setterObj->isCallable());
|
||||
kids.setNull();
|
||||
|
@ -157,8 +158,8 @@ Shape::Shape(jsid id, js::PropertyOp getter, js::PropertyOp setter,
|
|||
|
||||
inline
|
||||
Shape::Shape(JSContext *cx, Class *aclasp)
|
||||
: JSObjectMap(js_GenerateShape(cx, false)), table(NULL),
|
||||
id(JSID_EMPTY), clasp(aclasp), rawSetter(NULL), slot(JSSLOT_FREE(aclasp)), attrs(0),
|
||||
: JSObjectMap(js_GenerateShape(cx, false), JSSLOT_FREE(aclasp)), table(NULL),
|
||||
id(JSID_EMPTY), clasp(aclasp), rawSetter(NULL), slot(SHAPE_INVALID_SLOT), attrs(0),
|
||||
flags(SHARED_EMPTY), shortid(0), parent(NULL)
|
||||
{
|
||||
kids.setNull();
|
||||
|
|
|
@ -102,7 +102,7 @@ class UpvarCookie
|
|||
bool isFree() const { return value == FREE_VALUE; }
|
||||
uint32 asInteger() const { return value; }
|
||||
/* isFree check should be performed before using these accessors. */
|
||||
uint16 level() const { JS_ASSERT(!isFree()); return value >> 16; }
|
||||
uint16 level() const { JS_ASSERT(!isFree()); return uint16(value >> 16); }
|
||||
uint16 slot() const { JS_ASSERT(!isFree()); return uint16(value); }
|
||||
|
||||
void set(const UpvarCookie &other) { set(other.level(), other.slot()); }
|
||||
|
|
|
@ -14534,7 +14534,7 @@ TraceRecorder::traverseScopeChain(JSObject *obj, LIns *obj_ins, JSObject *target
|
|||
}
|
||||
}
|
||||
|
||||
JS_ASSERT(obj->getClass() != &js_BlockClass);
|
||||
JS_ASSERT(!obj->isBlock());
|
||||
|
||||
if (obj == targetObj)
|
||||
break;
|
||||
|
@ -14562,7 +14562,7 @@ TraceRecorder::record_JSOP_BINDNAME()
|
|||
|
||||
// In global code, fp->scopeChain can only contain blocks whose values
|
||||
// are still on the stack. We never use BINDNAME to refer to these.
|
||||
while (obj->getClass() == &js_BlockClass) {
|
||||
while (obj->isBlock()) {
|
||||
// The block's values are still on the stack.
|
||||
#ifdef DEBUG
|
||||
// NB: fp2 can't be a generator frame, because !fp->hasFunction.
|
||||
|
|
|
@ -703,7 +703,7 @@ template <class T, size_t N, class AP>
|
|||
inline bool
|
||||
Vector<T,N,AP>::insert(T *p, const T &val)
|
||||
{
|
||||
JS_ASSERT(begin() <= p && p < end());
|
||||
JS_ASSERT(begin() <= p && p <= end());
|
||||
size_t pos = p - begin();
|
||||
JS_ASSERT(pos <= length());
|
||||
size_t oldLength = length();
|
||||
|
|
|
@ -205,7 +205,7 @@ JS_XDRFindClassById(JSXDRState *xdr, uint32 id);
|
|||
* before deserialization of bytecode. If the saved version does not match
|
||||
* the current version, abort deserialization and invalidate the file.
|
||||
*/
|
||||
#define JSXDR_BYTECODE_VERSION (0xb973c0de - 67)
|
||||
#define JSXDR_BYTECODE_VERSION (0xb973c0de - 68)
|
||||
|
||||
/*
|
||||
* Library-private functions.
|
||||
|
|
|
@ -1269,15 +1269,13 @@ mjit::Compiler::jsop_setelem()
|
|||
/*
|
||||
* Check if the object has a prototype with indexed properties,
|
||||
* in which case it might have a setter for this element. For dense
|
||||
* arrays we only need to check Array.prototype and Object.prototype.
|
||||
* arrays we need to check only Array.prototype and Object.prototype.
|
||||
* Indexed properties are indicated by the JSObject::INDEXED flag.
|
||||
*/
|
||||
|
||||
/*
|
||||
* Test for indexed properties in Array.prototype. flags is a one byte
|
||||
* quantity, but will be aligned on 4 bytes.
|
||||
*/
|
||||
/* Test for indexed properties in Array.prototype. */
|
||||
stubcc.masm.loadPtr(Address(baseReg, offsetof(JSObject, proto)), T1);
|
||||
stubcc.masm.loadPtr(Address(T1, JSObject::flagsOffset()), T1);
|
||||
stubcc.masm.loadPtr(Address(T1, offsetof(JSObject, flags)), T1);
|
||||
stubcc.masm.and32(Imm32(JSObject::INDEXED), T1);
|
||||
Jump extendedArray = stubcc.masm.branchTest32(Assembler::NonZero, T1, T1);
|
||||
extendedArray.linkTo(syncTarget, &stubcc.masm);
|
||||
|
@ -1285,7 +1283,7 @@ mjit::Compiler::jsop_setelem()
|
|||
/* Test for indexed properties in Object.prototype. */
|
||||
stubcc.masm.loadPtr(Address(baseReg, offsetof(JSObject, proto)), T1);
|
||||
stubcc.masm.loadPtr(Address(T1, offsetof(JSObject, proto)), T1);
|
||||
stubcc.masm.loadPtr(Address(T1, JSObject::flagsOffset()), T1);
|
||||
stubcc.masm.loadPtr(Address(T1, offsetof(JSObject, flags)), T1);
|
||||
stubcc.masm.and32(Imm32(JSObject::INDEXED), T1);
|
||||
Jump extendedObject = stubcc.masm.branchTest32(Assembler::NonZero, T1, T1);
|
||||
extendedObject.linkTo(syncTarget, &stubcc.masm);
|
||||
|
|
|
@ -326,8 +326,7 @@ class SetPropCompiler : public PICStubCompiler
|
|||
}
|
||||
}
|
||||
|
||||
bool generateStub(uint32 initialShape, uint32 initialFlagsAndFreeslot,
|
||||
const Shape *shape, bool adding)
|
||||
bool generateStub(uint32 initialShape, const Shape *shape, bool adding)
|
||||
{
|
||||
/* Exits to the slow path. */
|
||||
Vector<Jump, 8> slowExits(f.cx);
|
||||
|
@ -372,19 +371,6 @@ class SetPropCompiler : public PICStubCompiler
|
|||
return false;
|
||||
#endif
|
||||
|
||||
Address flagsAndFreeslot(pic.objReg, JSObject::flagsOffset());
|
||||
|
||||
/*
|
||||
* We need to always check the flags match as some object flags can
|
||||
* vary between objects of the same shape (DELEGATE, SYSTEM).
|
||||
* It would be nice if these bits did not vary, so that just the
|
||||
* shape check is sufficient.
|
||||
*/
|
||||
Jump flagsMismatch = masm.branch32(Assembler::NotEqual, flagsAndFreeslot,
|
||||
Imm32(initialFlagsAndFreeslot));
|
||||
if (!slowExits.append(flagsMismatch))
|
||||
return false;
|
||||
|
||||
/* Emit shape guards for the object's prototype chain. */
|
||||
size_t chainLength = 0;
|
||||
JSObject *proto = obj->getProto();
|
||||
|
@ -432,7 +418,7 @@ class SetPropCompiler : public PICStubCompiler
|
|||
return false;
|
||||
|
||||
/* Check capacity. */
|
||||
Address capacity(pic.shapeReg, -sizeof(Value));
|
||||
Address capacity(pic.shapeReg, -ptrdiff_t(sizeof(Value)));
|
||||
masm.load32(masm.payloadOf(capacity), pic.shapeReg);
|
||||
Jump overCapacity = masm.branch32(Assembler::LessThanOrEqual, pic.shapeReg,
|
||||
Imm32(shape->slot));
|
||||
|
@ -452,8 +438,15 @@ class SetPropCompiler : public PICStubCompiler
|
|||
masm.storePtr(ImmPtr(shape), Address(pic.objReg, offsetof(JSObject, lastProp)));
|
||||
masm.store32(Imm32(newShape), Address(pic.objReg, offsetof(JSObject, objShape)));
|
||||
|
||||
/* Write both the object's flags and new freeslot. */
|
||||
masm.store32(Imm32(obj->flagsAndFreeslot()), flagsAndFreeslot);
|
||||
/* If this is a method shape, update the object's flags. */
|
||||
if (shape->isMethod()) {
|
||||
Address flags(pic.objReg, offsetof(JSObject, flags));
|
||||
|
||||
/* Use shapeReg to load, bitwise-or, and store flags. */
|
||||
masm.load32(flags, pic.shapeReg);
|
||||
masm.or32(Imm32(JSObject::METHOD_BARRIER), pic.shapeReg);
|
||||
masm.store32(pic.shapeReg, flags);
|
||||
}
|
||||
} else if (shape->hasDefaultSetter()) {
|
||||
Address address(pic.objReg, offsetof(JSObject, fslots) + shape->slot * sizeof(Value));
|
||||
if (shape->slot >= JS_INITIAL_NSLOTS) {
|
||||
|
@ -615,7 +608,6 @@ class SetPropCompiler : public PICStubCompiler
|
|||
return disable("index");
|
||||
|
||||
uint32 initialShape = obj->shape();
|
||||
uint32 initialFlagsAndFreeslot = obj->flagsAndFreeslot();
|
||||
|
||||
if (!obj->ensureClassReservedSlots(f.cx))
|
||||
return false;
|
||||
|
@ -663,7 +655,7 @@ class SetPropCompiler : public PICStubCompiler
|
|||
if (obj->numSlots() != slots)
|
||||
return disable("insufficient slot capacity");
|
||||
|
||||
return generateStub(initialShape, initialFlagsAndFreeslot, shape, true);
|
||||
return generateStub(initialShape, shape, true);
|
||||
}
|
||||
|
||||
AutoPropertyDropper dropper(f.cx, holder, prop);
|
||||
|
@ -694,7 +686,7 @@ class SetPropCompiler : public PICStubCompiler
|
|||
return patchInline(shape);
|
||||
}
|
||||
|
||||
return generateStub(obj->shape(), 0, shape, false);
|
||||
return generateStub(obj->shape(), shape, false);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -798,9 +790,10 @@ class GetPropCompiler : public PICStubCompiler
|
|||
Address clasp(pic.objReg, offsetof(JSObject, clasp));
|
||||
Jump notArgs = masm.branchPtr(Assembler::NotEqual, clasp, ImmPtr(&js_SlowArrayClass));
|
||||
|
||||
masm.load32(Address(pic.objReg, offsetof(JSObject, fslots) +
|
||||
JSObject::JSSLOT_ARGS_LENGTH * sizeof(Value)),
|
||||
pic.objReg);
|
||||
masm.load32(Address(pic.objReg,
|
||||
offsetof(JSObject, fslots)
|
||||
+ JSObject::JSSLOT_ARGS_LENGTH * sizeof(Value)),
|
||||
pic.objReg);
|
||||
masm.move(pic.objReg, pic.shapeReg);
|
||||
masm.and32(Imm32(1), pic.shapeReg);
|
||||
Jump overridden = masm.branchTest32(Assembler::NonZero, pic.shapeReg, pic.shapeReg);
|
||||
|
@ -843,9 +836,10 @@ class GetPropCompiler : public PICStubCompiler
|
|||
ImmPtr(&js_SlowArrayClass));
|
||||
|
||||
isDense.linkTo(masm.label(), &masm);
|
||||
masm.load32(Address(pic.objReg, offsetof(JSObject, fslots) +
|
||||
JSObject::JSSLOT_ARRAY_LENGTH * sizeof(Value)),
|
||||
pic.objReg);
|
||||
masm.load32(Address(pic.objReg,
|
||||
offsetof(JSObject, fslots)
|
||||
+ JSObject::JSSLOT_ARRAY_LENGTH * sizeof(Value)),
|
||||
pic.objReg);
|
||||
Jump oob = masm.branch32(Assembler::Above, pic.objReg, Imm32(JSVAL_INT_MAX));
|
||||
masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
|
||||
Jump done = masm.jump();
|
||||
|
@ -2232,7 +2226,7 @@ ic::CallProp(VMFrame &f, uint32 index)
|
|||
rval.setObject(entry->vword.toFunObj());
|
||||
} else if (entry->vword.isSlot()) {
|
||||
uint32 slot = entry->vword.toSlot();
|
||||
JS_ASSERT(slot < obj2->freeslot);
|
||||
JS_ASSERT(obj2->containsSlot(slot));
|
||||
rval = obj2->lockedGetSlot(slot);
|
||||
} else {
|
||||
JS_ASSERT(entry->vword.isShape());
|
||||
|
|
|
@ -197,7 +197,7 @@ stubs::SetName(VMFrame &f, JSAtom *origAtom)
|
|||
entry->vshape() == cx->runtime->protoHazardShape &&
|
||||
shape->hasDefaultSetter()) {
|
||||
slot = shape->slot;
|
||||
JS_ASSERT(slot == obj->freeslot);
|
||||
JS_ASSERT(slot == obj->freeslot());
|
||||
|
||||
/*
|
||||
* Fast path: adding a plain old property that was once at
|
||||
|
@ -210,8 +210,6 @@ stubs::SetName(VMFrame &f, JSAtom *origAtom)
|
|||
|
||||
if (slot < obj->numSlots()) {
|
||||
JS_ASSERT(obj->getSlot(slot).isUndefined());
|
||||
++obj->freeslot;
|
||||
JS_ASSERT(obj->freeslot != 0);
|
||||
} else {
|
||||
if (!obj->allocSlot(cx, &slot))
|
||||
THROW();
|
||||
|
@ -318,7 +316,7 @@ NameOp(VMFrame &f, JSObject *obj, bool callname = false)
|
|||
f.regs.sp[-1].setObject(entry->vword.toFunObj());
|
||||
} else if (entry->vword.isSlot()) {
|
||||
uintN slot = entry->vword.toSlot();
|
||||
JS_ASSERT(slot < obj2->freeslot);
|
||||
JS_ASSERT(obj2->containsSlot(slot));
|
||||
f.regs.sp++;
|
||||
f.regs.sp[-1] = obj2->lockedGetSlot(slot);
|
||||
} else {
|
||||
|
@ -1730,7 +1728,7 @@ NameIncDec(VMFrame &f, JSObject *obj, JSAtom *origAtom)
|
|||
if (!atom) {
|
||||
if (obj == obj2 && entry->vword.isSlot()) {
|
||||
uint32 slot = entry->vword.toSlot();
|
||||
JS_ASSERT(slot < obj->freeslot);
|
||||
JS_ASSERT(obj->containsSlot(slot));
|
||||
Value &rref = obj->getSlotRef(slot);
|
||||
int32_t tmp;
|
||||
if (JS_LIKELY(rref.isInt32() && CanIncDecWithoutOverflow(tmp = rref.toInt32()))) {
|
||||
|
@ -1949,7 +1947,7 @@ InlineGetProp(VMFrame &f)
|
|||
rval.setObject(entry->vword.toFunObj());
|
||||
} else if (entry->vword.isSlot()) {
|
||||
uint32 slot = entry->vword.toSlot();
|
||||
JS_ASSERT(slot < obj2->freeslot);
|
||||
JS_ASSERT(obj2->containsSlot(slot));
|
||||
rval = obj2->lockedGetSlot(slot);
|
||||
} else {
|
||||
JS_ASSERT(entry->vword.isShape());
|
||||
|
@ -2027,7 +2025,7 @@ stubs::CallProp(VMFrame &f, JSAtom *origAtom)
|
|||
rval.setObject(entry->vword.toFunObj());
|
||||
} else if (entry->vword.isSlot()) {
|
||||
uint32 slot = entry->vword.toSlot();
|
||||
JS_ASSERT(slot < obj2->freeslot);
|
||||
JS_ASSERT(obj2->containsSlot(slot));
|
||||
rval = obj2->lockedGetSlot(slot);
|
||||
} else {
|
||||
JS_ASSERT(entry->vword.isShape());
|
||||
|
@ -2181,12 +2179,10 @@ InitPropOrMethod(VMFrame &f, JSAtom *atom, JSOp op)
|
|||
/* Fast path. Property cache hit. */
|
||||
uint32 slot = shape->slot;
|
||||
|
||||
JS_ASSERT(slot == obj->freeslot);
|
||||
JS_ASSERT(slot == obj->freeslot());
|
||||
JS_ASSERT(slot >= JSSLOT_FREE(obj->getClass()));
|
||||
if (slot < obj->numSlots()) {
|
||||
JS_ASSERT(obj->getSlot(slot).isUndefined());
|
||||
++obj->freeslot;
|
||||
JS_ASSERT(obj->freeslot != 0);
|
||||
} else {
|
||||
if (!obj->allocSlot(cx, &slot))
|
||||
THROW();
|
||||
|
@ -2386,7 +2382,7 @@ stubs::EnterBlock(VMFrame &f, JSObject *obj)
|
|||
JSFrameRegs ®s = f.regs;
|
||||
JSStackFrame *fp = f.fp();
|
||||
|
||||
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(obj));
|
||||
JS_ASSERT(obj->isStaticBlock());
|
||||
JS_ASSERT(fp->base() + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp);
|
||||
Value *vp = regs.sp + OBJ_BLOCK_COUNT(cx, obj);
|
||||
JS_ASSERT(regs.sp < vp);
|
||||
|
@ -2412,7 +2408,7 @@ stubs::EnterBlock(VMFrame &f, JSObject *obj)
|
|||
if (clasp == &js_BlockClass &&
|
||||
obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, fp)) {
|
||||
JSObject *youngestProto = obj2->getProto();
|
||||
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(youngestProto));
|
||||
JS_ASSERT(youngestProto->isStaticBlock());
|
||||
JSObject *parent = obj;
|
||||
while ((parent = parent->getParent()) != youngestProto)
|
||||
JS_ASSERT(parent);
|
||||
|
|
Загрузка…
Ссылка в новой задаче