Bug 659577 - Don't alias stack variables (r=bhackett)

--HG--
rename : js/src/jit-test/tests/basic/testBug659577.js => js/src/jit-test/tests/basic/testBug659577-1.js
This commit is contained in:
Luke Wagner 2012-02-23 13:59:10 -08:00
Родитель 73c4e60e88
Коммит e89e33891b
61 изменённых файлов: 1674 добавлений и 2352 удалений

Просмотреть файл

@ -843,7 +843,7 @@ EmitAliasedVarOp(JSContext *cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter *bc
maybeBlockIndex = bce->objectList.indexOf(bce->sc->blockChain); maybeBlockIndex = bce->objectList.indexOf(bce->sc->blockChain);
bool decomposed = js_CodeSpec[op].format & JOF_DECOMPOSE; bool decomposed = js_CodeSpec[op].format & JOF_DECOMPOSE;
unsigned n = 2 * sizeof(uint16_t) + sizeof(uint32_t) + sizeof(uint16_t) + (decomposed ? 1 : 0); unsigned n = 2 * sizeof(uint16_t) + sizeof(uint32_t) + (decomposed ? 1 : 0);
JS_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length); JS_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length);
ptrdiff_t off = EmitN(cx, bce, op, n); ptrdiff_t off = EmitN(cx, bce, op, n);
@ -853,11 +853,9 @@ EmitAliasedVarOp(JSContext *cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter *bc
jsbytecode *pc = bce->code(off); jsbytecode *pc = bce->code(off);
SET_UINT16(pc, sc.hops); SET_UINT16(pc, sc.hops);
pc += sizeof(uint16_t); pc += sizeof(uint16_t);
SET_UINT16(pc, sc.binding); SET_UINT16(pc, sc.slot);
pc += sizeof(uint16_t); pc += sizeof(uint16_t);
SET_UINT32_INDEX(pc, maybeBlockIndex); SET_UINT32_INDEX(pc, maybeBlockIndex);
pc += sizeof(uint32_t);
SET_UINT16(pc, sc.frameBinding);
return true; return true;
} }
@ -888,15 +886,13 @@ EmitAliasedVarOp(JSContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *bce)
if (JOF_OPTYPE(pn->getOp()) == JOF_QARG) { if (JOF_OPTYPE(pn->getOp()) == JOF_QARG) {
JS_ASSERT(bce->sc->funIsHeavyweight()); JS_ASSERT(bce->sc->funIsHeavyweight());
sc.hops = ClonedBlockDepth(bce); sc.hops = ClonedBlockDepth(bce);
sc.binding = bce->sc->bindings.argToBinding(pn->pn_cookie.slot()); sc.slot = bce->sc->bindings.argToSlot(pn->pn_cookie.slot());
sc.frameBinding = sc.binding;
} else { } else {
JS_ASSERT(JOF_OPTYPE(pn->getOp()) == JOF_LOCAL || pn->isKind(PNK_FUNCTION)); JS_ASSERT(JOF_OPTYPE(pn->getOp()) == JOF_LOCAL || pn->isKind(PNK_FUNCTION));
unsigned local = pn->pn_cookie.slot(); unsigned local = pn->pn_cookie.slot();
sc.frameBinding = bce->sc->bindings.localToBinding(local);
if (local < bce->sc->bindings.numVars()) { if (local < bce->sc->bindings.numVars()) {
sc.hops = ClonedBlockDepth(bce); sc.hops = ClonedBlockDepth(bce);
sc.binding = sc.frameBinding; sc.slot = bce->sc->bindings.localToSlot(local);
} else { } else {
unsigned depth = local - bce->sc->bindings.numVars(); unsigned depth = local - bce->sc->bindings.numVars();
unsigned hops = 0; unsigned hops = 0;
@ -907,7 +903,7 @@ EmitAliasedVarOp(JSContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *bce)
b = b->enclosingBlock(); b = b->enclosingBlock();
} }
sc.hops = hops; sc.hops = hops;
sc.binding = depth - b->stackDepth(); sc.slot = depth - b->stackDepth();
} }
} }
@ -2637,12 +2633,11 @@ frontend::EmitFunctionScript(JSContext *cx, BytecodeEmitter *bce, ParseNode *bod
if (bce->sc->bindingsAccessedDynamically()) { if (bce->sc->bindingsAccessedDynamically()) {
ScopeCoordinate sc; ScopeCoordinate sc;
sc.hops = 0; sc.hops = 0;
sc.binding = bce->sc->bindings.localToBinding(bce->sc->argumentsLocalSlot()); sc.slot = bce->sc->bindings.localToSlot(bce->sc->argumentsLocal());
sc.frameBinding = sc.binding;
if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce)) if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce))
return false; return false;
} else { } else {
if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, bce->sc->argumentsLocalSlot(), bce)) if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, bce->sc->argumentsLocal(), bce))
return false; return false;
} }
if (Emit1(cx, bce, JSOP_POP) < 0) if (Emit1(cx, bce, JSOP_POP) < 0)

Просмотреть файл

@ -53,7 +53,8 @@ SharedContext::needStrictChecks() {
} }
inline unsigned inline unsigned
SharedContext::argumentsLocalSlot() const { SharedContext::argumentsLocal() const
{
PropertyName *arguments = context->runtime->atomState.argumentsAtom; PropertyName *arguments = context->runtime->atomState.argumentsAtom;
unsigned slot; unsigned slot;
DebugOnly<BindingKind> kind = bindings.lookup(context, arguments, &slot); DebugOnly<BindingKind> kind = bindings.lookup(context, arguments, &slot);

Просмотреть файл

@ -187,7 +187,7 @@ struct SharedContext {
#undef INFUNC #undef INFUNC
unsigned argumentsLocalSlot() const; unsigned argumentsLocal() const;
bool inFunction() const { return !!fun_; } bool inFunction() const { return !!fun_; }

Просмотреть файл

@ -338,6 +338,7 @@ class EncapsulatedValue
bool isDouble() const { return value.isDouble(); } bool isDouble() const { return value.isDouble(); }
bool isString() const { return value.isString(); } bool isString() const { return value.isString(); }
bool isObject() const { return value.isObject(); } bool isObject() const { return value.isObject(); }
bool isMagic() const { return value.isMagic(); }
bool isMagic(JSWhyMagic why) const { return value.isMagic(why); } bool isMagic(JSWhyMagic why) const { return value.isMagic(why); }
bool isGCThing() const { return value.isGCThing(); } bool isGCThing() const { return value.isGCThing(); }
bool isMarkable() const { return value.isMarkable(); } bool isMarkable() const { return value.isMarkable(); }
@ -463,6 +464,14 @@ Valueify(const EncapsulatedValue *array)
return (const Value *)array; return (const Value *)array;
} }
static inline HeapValue *
HeapValueify(Value *v)
{
JS_STATIC_ASSERT(sizeof(HeapValue) == sizeof(Value));
JS_STATIC_ASSERT(sizeof(HeapSlot) == sizeof(Value));
return (HeapValue *)v;
}
class HeapSlotArray class HeapSlotArray
{ {
HeapSlot *array; HeapSlot *array;

Просмотреть файл

@ -129,6 +129,12 @@ MarkValue(JSTracer *trc, EncapsulatedValue *v, const char *name);
void void
MarkValueRange(JSTracer *trc, size_t len, EncapsulatedValue *vec, const char *name); MarkValueRange(JSTracer *trc, size_t len, EncapsulatedValue *vec, const char *name);
inline void
MarkValueRange(JSTracer *trc, HeapValue *begin, HeapValue *end, const char *name)
{
return MarkValueRange(trc, end - begin, begin, name);
}
void void
MarkValueRoot(JSTracer *trc, Value *v, const char *name); MarkValueRoot(JSTracer *trc, Value *v, const char *name);

Просмотреть файл

@ -0,0 +1,11 @@
function f() {
let (x, y, z) {
eval('x = 1; y = 2; z = 3');
for (var i = 0; i < 10000; ++i) {
assertEq(x, 1);
assertEq(y, 2);
assertEq(z, 3);
}
}
}
f();

Просмотреть файл

@ -0,0 +1,15 @@
gczeal(4);
evaluate("\
Date.formatFunctions = {count:0};\
Date.prototype.dateFormat = function(format) {\
var funcName = 'format' + Date.formatFunctions.count++;\
var code = 'Date.prototype.' + funcName + ' = function(){return ';\
var ch = '';\
for (var i = 0; i < format.length; ++i) {\
ch = format.charAt(i);\
eval(code.substring(0, code.length - 3) + ';}');\
}\
};\
var date = new Date('1/1/2007 1:11:11');\
var shortFormat = date.dateFormat('Y-m-d');\
");

Просмотреть файл

@ -0,0 +1,6 @@
// |jit-test| error:TypeError
(function({
l
}) {
eval();
})()

Просмотреть файл

@ -0,0 +1,10 @@
function g(x,y) {
return x + y;
}
function f(x) {
return g.apply(null, arguments);
}
for (var i = 0; i < 100; ++i)
assertEq(f(i, 1), i+1);

Просмотреть файл

@ -0,0 +1,6 @@
// |jit-test| debug
try {
function f() {}
(1 for (x in []))
} catch (e) {}
gc()

Просмотреть файл

@ -0,0 +1,16 @@
var g = newGlobal('new-compartment');
var dbg = new Debugger(g);
var hits = 0;
dbg.onDebuggerStatement = function(frame) {
++hits;
frame.older.eval("escaped = function() { return y }");
}
g.escaped = undefined;
g.eval("function h() { debugger }");
g.eval("(function () { var y = 42; h(); yield })().next();");
assertEq(g.eval("escaped()"), 42);
gc();
assertEq(g.eval("escaped()"), 42);

Просмотреть файл

@ -3,7 +3,7 @@ setDebug(true);
x = "notset"; x = "notset";
function main() { function main() {
/* The JSOP_STOP in main. */ /* The JSOP_STOP in main. */
a = { valueOf: function () { trap(main, 97, "success()"); } }; a = { valueOf: function () { trap(main, 95, "success()"); } };
b = ""; b = "";
eval(); eval();
a + b; a + b;

Просмотреть файл

@ -225,7 +225,7 @@ MSG_DEF(JSMSG_BAD_XML_CHARACTER, 171, 0, JSEXN_SYNTAXERR, "illegal XML char
MSG_DEF(JSMSG_BAD_DEFAULT_XML_NAMESPACE,172,0,JSEXN_SYNTAXERR, "invalid default XML namespace") MSG_DEF(JSMSG_BAD_DEFAULT_XML_NAMESPACE,172,0,JSEXN_SYNTAXERR, "invalid default XML namespace")
MSG_DEF(JSMSG_BAD_XML_NAME_SYNTAX, 173, 0, JSEXN_SYNTAXERR, "invalid XML name") MSG_DEF(JSMSG_BAD_XML_NAME_SYNTAX, 173, 0, JSEXN_SYNTAXERR, "invalid XML name")
MSG_DEF(JSMSG_BRACKET_AFTER_ATTR_EXPR,174, 0, JSEXN_SYNTAXERR, "missing ] after attribute expression") MSG_DEF(JSMSG_BRACKET_AFTER_ATTR_EXPR,174, 0, JSEXN_SYNTAXERR, "missing ] after attribute expression")
MSG_DEF(JSMSG_NESTING_GENERATOR, 175, 1, JSEXN_TYPEERR, "already executing generator {0}") MSG_DEF(JSMSG_NESTING_GENERATOR, 175, 0, JSEXN_TYPEERR, "already executing generator")
MSG_DEF(JSMSG_CURLY_IN_XML_EXPR, 176, 0, JSEXN_SYNTAXERR, "missing } in XML expression") MSG_DEF(JSMSG_CURLY_IN_XML_EXPR, 176, 0, JSEXN_SYNTAXERR, "missing } in XML expression")
MSG_DEF(JSMSG_BAD_XML_NAMESPACE, 177, 1, JSEXN_TYPEERR, "invalid XML namespace {0}") MSG_DEF(JSMSG_BAD_XML_NAMESPACE, 177, 1, JSEXN_TYPEERR, "invalid XML namespace {0}")
MSG_DEF(JSMSG_BAD_XML_ATTR_NAME, 178, 1, JSEXN_TYPEERR, "invalid XML attribute name {0}") MSG_DEF(JSMSG_BAD_XML_ATTR_NAME, 178, 1, JSEXN_TYPEERR, "invalid XML attribute name {0}")

Просмотреть файл

@ -323,14 +323,6 @@ ScriptAnalysis::analyzeBytecode(JSContext *cx)
case JSOP_SETALIASEDVAR: { case JSOP_SETALIASEDVAR: {
JS_ASSERT(!isInlineable); JS_ASSERT(!isInlineable);
usesScopeChain_ = true; usesScopeChain_ = true;
/* XXX: this can be removed after bug 659577. */
ScopeCoordinate sc(pc);
if (script->bindings.bindingIsLocal(sc.frameBinding) &&
script->bindings.bindingToLocal(sc.frameBinding) >= script->nfixed)
{
localsAliasStack_ = true;
}
break; break;
} }
@ -1925,15 +1917,15 @@ ScriptAnalysis::needsArgsObj(NeedsArgsObjState &state, SSAUseChain *use)
if (op == JSOP_POP || op == JSOP_POPN) if (op == JSOP_POP || op == JSOP_POPN)
return false; return false;
#ifdef JS_METHODJIT
/* SplatApplyArgs can read fp->canonicalActualArg(i) directly. */ /* SplatApplyArgs can read fp->canonicalActualArg(i) directly. */
if (state.canOptimizeApply && op == JSOP_FUNAPPLY && GET_ARGC(pc) == 2 && use->u.which == 0) { if (state.canOptimizeApply && op == JSOP_FUNAPPLY && GET_ARGC(pc) == 2 && use->u.which == 0) {
#ifdef JS_METHODJIT
JS_ASSERT(mjit::IsLowerableFunCallOrApply(pc)); JS_ASSERT(mjit::IsLowerableFunCallOrApply(pc));
#endif
state.haveOptimizedApply = true; state.haveOptimizedApply = true;
state.canOptimizeApply = false; state.canOptimizeApply = false;
return false; return false;
} }
#endif
/* arguments[i] can read fp->canonicalActualArg(i) directly. */ /* arguments[i] can read fp->canonicalActualArg(i) directly. */
if (!state.haveOptimizedApply && op == JSOP_GETELEM && use->u.which == 1) { if (!state.haveOptimizedApply && op == JSOP_GETELEM && use->u.which == 1) {
@ -1973,8 +1965,11 @@ ScriptAnalysis::needsArgsObj(JSContext *cx)
* soundly perform this analysis in their presence. Also, debuggers may * soundly perform this analysis in their presence. Also, debuggers may
* want to see 'arguments', so assume every arguments object escapes. * want to see 'arguments', so assume every arguments object escapes.
*/ */
if (script->bindingsAccessedDynamically || localsAliasStack() || cx->compartment->debugMode()) if (script->bindingsAccessedDynamically || script->numClosedArgs() > 0 ||
localsAliasStack() || cx->compartment->debugMode())
{
return true; return true;
}
unsigned pcOff = script->argumentsBytecode() - script->code; unsigned pcOff = script->argumentsBytecode() - script->code;

Просмотреть файл

@ -363,13 +363,14 @@ static inline uint32_t GetBytecodeSlot(JSScript *script, jsbytecode *pc)
case JSOP_CALLALIASEDVAR: case JSOP_CALLALIASEDVAR:
case JSOP_SETALIASEDVAR: case JSOP_SETALIASEDVAR:
{ {
ScopeCoordinate sc = ScopeCoordinate(pc); ScopeCoordinate sc(pc);
return script->bindings.bindingIsArg(sc.frameBinding) if (StaticBlockObject *block = ScopeCoordinateBlockChain(script, pc))
? ArgSlot(script->bindings.bindingToArg(sc.frameBinding)) return LocalSlot(script, block->slotToFrameLocal(script, sc.slot));
: LocalSlot(script, script->bindings.bindingToLocal(sc.frameBinding)); if (script->bindings.slotIsArg(sc.slot))
return ArgSlot(script->bindings.slotToArg(sc.slot));
return LocalSlot(script, script->bindings.slotToLocal(sc.slot));
} }
case JSOP_THIS: case JSOP_THIS:
return ThisSlot(); return ThisSlot();

Просмотреть файл

@ -97,7 +97,7 @@ ExhaustiveTest(const char funcode[])
for (size_t i = 0; i <= ArgCount; i++) { for (size_t i = 0; i <= ArgCount; i++) {
for (size_t j = 0; j <= ArgCount - i; j++) { for (size_t j = 0; j <= ArgCount - i; j++) {
ClearElements(elems); ClearElements(elems);
CHECK(argsobj.getElements(i, j, elems)); CHECK(argsobj.maybeGetElements(i, j, elems));
for (size_t k = 0; k < j; k++) for (size_t k = 0; k < j; k++)
CHECK_SAME(elems[k], INT_TO_JSVAL(i + k)); CHECK_SAME(elems[k], INT_TO_JSVAL(i + k));
for (size_t k = j; k < MAX_ELEMS - 1; k++) for (size_t k = j; k < MAX_ELEMS - 1; k++)

Просмотреть файл

@ -399,7 +399,7 @@ GetElement(JSContext *cx, JSObject *obj, IndexType index, JSBool *hole, Value *v
return JS_TRUE; return JS_TRUE;
} }
if (obj->isArguments()) { if (obj->isArguments()) {
if (obj->asArguments().getElement(uint32_t(index), vp)) { if (obj->asArguments().maybeGetElement(uint32_t(index), vp)) {
*hole = JS_FALSE; *hole = JS_FALSE;
return true; return true;
} }
@ -438,7 +438,7 @@ GetElements(JSContext *cx, HandleObject aobj, uint32_t length, Value *vp)
if (aobj->isArguments()) { if (aobj->isArguments()) {
ArgumentsObject &argsobj = aobj->asArguments(); ArgumentsObject &argsobj = aobj->asArguments();
if (!argsobj.hasOverriddenLength()) { if (!argsobj.hasOverriddenLength()) {
if (argsobj.getElements(0, length, vp)) if (argsobj.maybeGetElements(0, length, vp))
return true; return true;
} }
} }

Просмотреть файл

@ -994,6 +994,7 @@ JSContext::JSContext(JSRuntime *rt)
functionCallback(NULL), functionCallback(NULL),
#endif #endif
enumerators(NULL), enumerators(NULL),
innermostGenerator_(NULL),
#ifdef DEBUG #ifdef DEBUG
stackIterAssertionEnabled(true), stackIterAssertionEnabled(true),
#endif #endif
@ -1079,26 +1080,24 @@ JSContext::wrapPendingException()
setPendingException(v); setPendingException(v);
} }
JSGenerator *
JSContext::generatorFor(StackFrame *fp) const void
JSContext::enterGenerator(JSGenerator *gen)
{ {
JS_ASSERT(stack.containsSlow(fp)); JS_ASSERT(!gen->prevGenerator);
JS_ASSERT(fp->isGeneratorFrame()); gen->prevGenerator = innermostGenerator_;
JS_ASSERT(!fp->isFloatingGenerator()); innermostGenerator_ = gen;
JS_ASSERT(!genStack.empty());
if (JS_LIKELY(fp == genStack.back()->liveFrame()))
return genStack.back();
/* General case; should only be needed for debug APIs. */
for (size_t i = 0; i < genStack.length(); ++i) {
if (genStack[i]->liveFrame() == fp)
return genStack[i];
}
JS_NOT_REACHED("no matching generator");
return NULL;
} }
void
JSContext::leaveGenerator(JSGenerator *gen)
{
JS_ASSERT(innermostGenerator_ == gen);
innermostGenerator_ = innermostGenerator_->prevGenerator;
gen->prevGenerator = NULL;
}
bool bool
JSContext::runningWithTrustedPrincipals() const JSContext::runningWithTrustedPrincipals() const
{ {

Просмотреть файл

@ -1242,29 +1242,12 @@ struct JSContext : js::ContextFriendFields
JSObject *enumerators; JSObject *enumerators;
private: private:
/* /* Innermost-executing generator or null if no generator are executing. */
* To go from a live generator frame (on the stack) to its generator object JSGenerator *innermostGenerator_;
* (see comment js_FloatingFrameIfGenerator), we maintain a stack of active
* generators, pushing and popping when entering and leaving generator
* frames, respectively.
*/
js::Vector<JSGenerator *, 2, js::SystemAllocPolicy> genStack;
public: public:
/* Return the generator object for the given generator frame. */ JSGenerator *innermostGenerator() const { return innermostGenerator_; }
JSGenerator *generatorFor(js::StackFrame *fp) const; void enterGenerator(JSGenerator *gen);
void leaveGenerator(JSGenerator *gen);
/* Early OOM-check. */
inline bool ensureGeneratorStackSpace();
bool enterGenerator(JSGenerator *gen) {
return genStack.append(gen);
}
void leaveGenerator(JSGenerator *gen) {
JS_ASSERT(genStack.back() == gen);
genStack.popBack();
}
inline void* malloc_(size_t bytes) { inline void* malloc_(size_t bytes) {
return runtime->malloc_(bytes, this); return runtime->malloc_(bytes, this);
@ -1296,9 +1279,6 @@ struct JSContext : js::ContextFriendFields
void purge(); void purge();
/* For DEBUG. */
inline void assertValidStackDepth(unsigned depth);
bool isExceptionPending() { bool isExceptionPending() {
return throwing; return throwing;
} }

Просмотреть файл

@ -541,14 +541,6 @@ JSContext::setCompileOptions(unsigned newcopts)
maybeOverrideVersion(newVersion); maybeOverrideVersion(newVersion);
} }
inline void
JSContext::assertValidStackDepth(unsigned depth)
{
#ifdef DEBUG
JS_ASSERT(0 <= regs().sp - fp()->base());
JS_ASSERT(depth <= uintptr_t(regs().sp - fp()->base()));
#endif
}
inline js::LifoAlloc & inline js::LifoAlloc &
JSContext::typeLifoAlloc() JSContext::typeLifoAlloc()
@ -556,15 +548,6 @@ JSContext::typeLifoAlloc()
return compartment->typeLifoAlloc; return compartment->typeLifoAlloc;
} }
inline bool
JSContext::ensureGeneratorStackSpace()
{
bool ok = genStack.reserve(genStack.length() + 1);
if (!ok)
js_ReportOutOfMemory(this);
return ok;
}
inline void inline void
JSContext::setPendingException(js::Value v) { JSContext::setPendingException(js::Value v) {
JS_ASSERT(!IsPoisonedValue(v)); JS_ASSERT(!IsPoisonedValue(v));

Просмотреть файл

@ -565,11 +565,8 @@ JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fpArg)
*/ */
while (o) { while (o) {
ScopeObject &scope = o->asDebugScope().scope(); ScopeObject &scope = o->asDebugScope().scope();
if (scope.isCall()) { if (scope.isCall())
JS_ASSERT_IF(cx->compartment->debugMode() && fp->isNonEvalFunctionFrame(),
fp == scope.asCall().maybeStackFrame());
return o; return o;
}
o = o->enclosingScope(); o = o->enclosingScope();
} }
return NULL; return NULL;
@ -805,10 +802,10 @@ GetPropertyDesc(JSContext *cx, JSObject *obj_, Shape *shape, JSPropertyDesc *pd)
| (!shape->writable() ? JSPD_READONLY : 0) | (!shape->writable() ? JSPD_READONLY : 0)
| (!shape->configurable() ? JSPD_PERMANENT : 0); | (!shape->configurable() ? JSPD_PERMANENT : 0);
pd->spare = 0; pd->spare = 0;
if (shape->getter() == CallObject::getArgOp) { if (shape->setter() == CallObject::setArgOp) {
pd->slot = shape->shortid(); pd->slot = shape->shortid();
pd->flags |= JSPD_ARGUMENT; pd->flags |= JSPD_ARGUMENT;
} else if (shape->getter() == CallObject::getVarOp) { } else if (shape->setter() == CallObject::setVarOp) {
pd->slot = shape->shortid(); pd->slot = shape->shortid();
pd->flags |= JSPD_VARIABLE; pd->flags |= JSPD_VARIABLE;
} else { } else {

Просмотреть файл

@ -127,7 +127,7 @@ fun_getProperty(JSContext *cx, HandleObject obj_, HandleId id, Value *vp)
* innermost function as uninlineable to expand its frame and allow us * innermost function as uninlineable to expand its frame and allow us
* to recover its callee object. * to recover its callee object.
*/ */
JSInlinedSite *inlined; InlinedSite *inlined;
jsbytecode *prevpc = fp->prev()->pcQuadratic(cx->stack, fp, &inlined); jsbytecode *prevpc = fp->prev()->pcQuadratic(cx->stack, fp, &inlined);
if (inlined) { if (inlined) {
mjit::JITChunk *chunk = fp->prev()->jit()->chunk(prevpc); mjit::JITChunk *chunk = fp->prev()->jit()->chunk(prevpc);
@ -696,7 +696,7 @@ js_fun_apply(JSContext *cx, unsigned argc, Value *vp)
args.thisv() = vp[2]; args.thisv() = vp[2];
/* Steps 7-8. */ /* Steps 7-8. */
cx->fp()->forEachCanonicalActualArg(CopyTo(args.array())); cx->fp()->forEachUnaliasedActual(CopyTo(args.array()));
} else { } else {
/* Step 3. */ /* Step 3. */
if (!vp[3].isObject()) { if (!vp[3].isObject()) {

Просмотреть файл

@ -244,9 +244,6 @@ js_ValueToCallableObject(JSContext *cx, js::Value *vp, unsigned flags);
extern void extern void
js_ReportIsNotFunction(JSContext *cx, const js::Value *vp, unsigned flags); js_ReportIsNotFunction(JSContext *cx, const js::Value *vp, unsigned flags);
extern void
js_PutCallObject(js::StackFrame *fp, js::CallObject &callobj);
namespace js { namespace js {
/* /*
@ -278,9 +275,6 @@ JSFunction::toExtended() const
return static_cast<const js::FunctionExtended *>(this); return static_cast<const js::FunctionExtended *>(this);
} }
extern void
js_PutArgsObject(js::StackFrame *fp);
inline bool inline bool
js_IsNamedLambda(JSFunction *fun) { return (fun->flags & JSFUN_LAMBDA) && fun->atom; } js_IsNamedLambda(JSFunction *fun) { return (fun->flags & JSFUN_LAMBDA) && fun->atom; }

Просмотреть файл

@ -3241,7 +3241,7 @@ SweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool *startBackgroundSweep)
/* Finalize unreachable (key,value) pairs in all weak maps. */ /* Finalize unreachable (key,value) pairs in all weak maps. */
WeakMapBase::sweepAll(&rt->gcMarker); WeakMapBase::sweepAll(&rt->gcMarker);
rt->debugScopes->sweep(); rt->debugScopes->sweep(rt);
SweepAtomState(rt); SweepAtomState(rt);

Просмотреть файл

@ -5192,8 +5192,8 @@ NestingPrologue(JSContext *cx, StackFrame *fp)
} }
nesting->activeCall = &fp->callObj(); nesting->activeCall = &fp->callObj();
nesting->argArray = fp->formalArgs(); nesting->argArray = Valueify(nesting->activeCall->argArray());
nesting->varArray = fp->slots(); nesting->varArray = Valueify(nesting->activeCall->varArray());
} }
/* Maintain stack frame count for the function. */ /* Maintain stack frame count for the function. */

Просмотреть файл

@ -23,6 +23,9 @@ struct TypeInferenceSizes;
} }
namespace js { namespace js {
class CallObject;
namespace types { namespace types {
/* Type set entry for either a JSObject with singleton type or a non-singleton TypeObject. */ /* Type set entry for either a JSObject with singleton type or a non-singleton TypeObject. */
@ -961,7 +964,7 @@ struct TypeScriptNesting
JSScript *next; JSScript *next;
/* If this is an outer function, the most recent activation. */ /* If this is an outer function, the most recent activation. */
JSObject *activeCall; CallObject *activeCall;
/* /*
* If this is an outer function, pointers to the most recent activation's * If this is an outer function, pointers to the most recent activation's

Просмотреть файл

@ -223,6 +223,7 @@ js::RunScript(JSContext *cx, JSScript *script, StackFrame *fp)
JS_ASSERT(fp == cx->fp()); JS_ASSERT(fp == cx->fp());
JS_ASSERT(fp->script() == script); JS_ASSERT(fp->script() == script);
JS_ASSERT_IF(!fp->isGeneratorFrame(), cx->regs().pc == script->code); JS_ASSERT_IF(!fp->isGeneratorFrame(), cx->regs().pc == script->code);
JS_ASSERT_IF(fp->isEvalFrame(), script->isActiveEval);
#ifdef JS_METHODJIT_SPEW #ifdef JS_METHODJIT_SPEW
JMCheckLogging(); JMCheckLogging();
#endif #endif
@ -317,16 +318,11 @@ js::InvokeKernel(JSContext *cx, CallArgs args, MaybeConstruct construct)
if (!cx->stack.pushInvokeFrame(cx, args, initial, &ifg)) if (!cx->stack.pushInvokeFrame(cx, args, initial, &ifg))
return false; return false;
/* Now that the new frame is rooted, maybe create a call object. */
StackFrame *fp = ifg.fp();
if (!fp->functionPrologue(cx))
return false;
/* Run function until JSOP_STOP, JSOP_RETURN or error. */ /* Run function until JSOP_STOP, JSOP_RETURN or error. */
JSBool ok = RunScript(cx, fun->script(), fp); JSBool ok = RunScript(cx, fun->script(), ifg.fp());
/* Propagate the return value out. */ /* Propagate the return value out. */
args.rval() = fp->returnValue(); args.rval() = ifg.fp()->returnValue();
JS_ASSERT_IF(ok && construct, !args.rval().isPrimitive()); JS_ASSERT_IF(ok && construct, !args.rval().isPrimitive());
return ok; return ok;
} }
@ -453,29 +449,15 @@ js::ExecuteKernel(JSContext *cx, JSScript *script_, JSObject &scopeChain, const
if (!script->ensureRanAnalysis(cx, &scopeChain)) if (!script->ensureRanAnalysis(cx, &scopeChain))
return false; return false;
TypeScript::SetThis(cx, script, efg.fp()->thisValue());
/* Give strict mode eval its own fresh lexical environment. */
StackFrame *fp = efg.fp();
if (fp->isStrictEvalFrame() && !CallObject::createForStrictEval(cx, fp))
return false;
Probes::startExecution(cx, script); Probes::startExecution(cx, script);
bool ok = RunScript(cx, script, efg.fp());
TypeScript::SetThis(cx, script, fp->thisValue());
bool ok = RunScript(cx, script, fp);
if (fp->isStrictEvalFrame()) {
if (cx->compartment->debugMode())
cx->runtime->debugScopes->onPopStrictEvalScope(fp);
js_PutCallObject(fp, fp->callObj());
}
Probes::stopExecution(cx, script); Probes::stopExecution(cx, script);
/* Propgate the return value out. */ /* Propgate the return value out. */
if (result) if (result)
*result = fp->returnValue(); *result = efg.fp()->returnValue();
return ok; return ok;
} }
@ -698,7 +680,7 @@ EnterWith(JSContext *cx, int stackIndex)
StackFrame *fp = cx->fp(); StackFrame *fp = cx->fp();
Value *sp = cx->regs().sp; Value *sp = cx->regs().sp;
JS_ASSERT(stackIndex < 0); JS_ASSERT(stackIndex < 0);
JS_ASSERT(fp->base() <= sp + stackIndex); JS_ASSERT(int(cx->regs().stackDepth()) + stackIndex >= 0);
RootedObject obj(cx); RootedObject obj(cx);
if (sp[-1].isObject()) { if (sp[-1].isObject()) {
@ -706,17 +688,17 @@ EnterWith(JSContext *cx, int stackIndex)
} else { } else {
obj = js_ValueToNonNullObject(cx, sp[-1]); obj = js_ValueToNonNullObject(cx, sp[-1]);
if (!obj) if (!obj)
return JS_FALSE; return false;
sp[-1].setObject(*obj); sp[-1].setObject(*obj);
} }
JSObject *withobj = WithObject::create(cx, obj, fp->scopeChain(), WithObject *withobj = WithObject::create(cx, obj, fp->scopeChain(),
sp + stackIndex - fp->base()); cx->regs().stackDepth() + stackIndex);
if (!withobj) if (!withobj)
return JS_FALSE; return false;
fp->setScopeChain(*withobj); fp->pushOnScopeChain(*withobj);
return JS_TRUE; return true;
} }
/* Unwind block and scope chains to match the given depth. */ /* Unwind block and scope chains to match the given depth. */
@ -724,7 +706,7 @@ void
js::UnwindScope(JSContext *cx, uint32_t stackDepth) js::UnwindScope(JSContext *cx, uint32_t stackDepth)
{ {
StackFrame *fp = cx->fp(); StackFrame *fp = cx->fp();
JS_ASSERT(fp->base() + stackDepth <= cx->regs().sp); JS_ASSERT(stackDepth <= cx->regs().stackDepth());
for (ScopeIter si(fp); !si.done(); si = si.enclosing()) { for (ScopeIter si(fp); !si.done(); si = si.enclosing()) {
switch (si.type()) { switch (si.type()) {
@ -753,7 +735,7 @@ js::UnwindForUncatchableException(JSContext *cx, const FrameRegs &regs)
for (TryNoteIter tni(regs); !tni.done(); ++tni) { for (TryNoteIter tni(regs); !tni.done(); ++tni) {
JSTryNote *tn = *tni; JSTryNote *tn = *tni;
if (tn->kind == JSTRY_ITER) { if (tn->kind == JSTRY_ITER) {
Value *sp = regs.fp()->base() + tn->stackDepth; Value *sp = regs.spForStackDepth(tn->stackDepth);
UnwindIteratorForUncatchableException(cx, &sp[-1].toObject()); UnwindIteratorForUncatchableException(cx, &sp[-1].toObject());
} }
} }
@ -813,7 +795,7 @@ TryNoteIter::settle()
* depth exceeding the current one and this condition is what we use to * depth exceeding the current one and this condition is what we use to
* filter them out. * filter them out.
*/ */
if (tn->stackDepth <= regs.sp - regs.fp()->base()) if (tn->stackDepth <= regs.stackDepth())
break; break;
} }
} }
@ -850,36 +832,6 @@ DoIncDec(JSContext *cx, JSScript *script, jsbytecode *pc, const Value &v, Value
return true; return true;
} }
static inline void
CheckLocalAccess(StackFrame *fp, unsigned index, bool aliased = false)
{
#ifdef DEBUG
if (index < fp->numFixed()) {
JS_ASSERT(fp->script()->varIsAliased(index) == aliased);
} else {
unsigned depth = index - fp->numFixed();
for (StaticBlockObject *b = fp->maybeBlockChain(); b; b = b->enclosingBlock()) {
if (b->containsVarAtDepth(depth)) {
JS_ASSERT(b->isAliased(depth - b->stackDepth()) == aliased);
return;
}
}
/*
* Unfortunately, strange uses of JSOP_GETLOCAL (e.g., comprehensions
* and group assignment) access slots above script->nfixed and not in
* any block so we cannot use JS_NOT_REACHED here.
*/
}
#endif
}
static inline void
CheckArgAccess(StackFrame *fp, unsigned index)
{
JS_ASSERT(fp->script()->formalLivesInArgumentsObject(index) ==
fp->script()->argsObjAliasesFormals());
}
#define PUSH_COPY(v) do { *regs.sp++ = v; assertSameCompartment(cx, regs.sp[-1]); } while (0) #define PUSH_COPY(v) do { *regs.sp++ = v; assertSameCompartment(cx, regs.sp[-1]); } while (0)
#define PUSH_COPY_SKIP_CHECK(v) *regs.sp++ = v #define PUSH_COPY_SKIP_CHECK(v) *regs.sp++ = v
#define PUSH_NULL() regs.sp++->setNull() #define PUSH_NULL() regs.sp++->setNull()
@ -1222,7 +1174,6 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
#define RESTORE_INTERP_VARS() \ #define RESTORE_INTERP_VARS() \
JS_BEGIN_MACRO \ JS_BEGIN_MACRO \
SET_SCRIPT(regs.fp()->script()); \ SET_SCRIPT(regs.fp()->script()); \
argv = regs.fp()->maybeFormalArgs(); \
atoms = FrameAtomBase(cx, regs.fp()); \ atoms = FrameAtomBase(cx, regs.fp()); \
JS_ASSERT(&cx->regs() == &regs); \ JS_ASSERT(&cx->regs() == &regs); \
JS_END_MACRO JS_END_MACRO
@ -1285,7 +1236,6 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
JSRuntime *const rt = cx->runtime; JSRuntime *const rt = cx->runtime;
Rooted<JSScript*> script(cx); Rooted<JSScript*> script(cx);
SET_SCRIPT(regs.fp()->script()); SET_SCRIPT(regs.fp()->script());
Value *argv = regs.fp()->maybeFormalArgs();
CHECK_INTERRUPT_HANDLER(); CHECK_INTERRUPT_HANDLER();
/* /*
@ -1320,8 +1270,8 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
#if JS_HAS_GENERATORS #if JS_HAS_GENERATORS
if (JS_UNLIKELY(regs.fp()->isGeneratorFrame())) { if (JS_UNLIKELY(regs.fp()->isGeneratorFrame())) {
JS_ASSERT((size_t) (regs.pc - script->code) <= script->length); JS_ASSERT(size_t(regs.pc - script->code) <= script->length);
JS_ASSERT((size_t) (regs.sp - regs.fp()->base()) <= StackDepth(script)); JS_ASSERT(regs.stackDepth() <= script->nslots);
/* /*
* To support generator_throw and to catch ignored exceptions, * To support generator_throw and to catch ignored exceptions,
@ -1338,8 +1288,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
/* Don't call the script prologue if executing between Method and Trace JIT. */ /* Don't call the script prologue if executing between Method and Trace JIT. */
if (interpMode == JSINTERP_NORMAL) { if (interpMode == JSINTERP_NORMAL) {
StackFrame *fp = regs.fp(); StackFrame *fp = regs.fp();
JS_ASSERT_IF(!fp->isGeneratorFrame(), regs.pc == script->code); if (!fp->isGeneratorFrame() && !fp->prologue(cx, UseNewTypeAtEntry(cx, fp)))
if (!ScriptPrologueOrGeneratorResume(cx, fp, UseNewTypeAtEntry(cx, fp)))
goto error; goto error;
if (cx->compartment->debugMode()) { if (cx->compartment->debugMode()) {
JSTrapStatus status = ScriptDebugPrologue(cx, fp); JSTrapStatus status = ScriptDebugPrologue(cx, fp);
@ -1574,25 +1523,12 @@ BEGIN_CASE(JSOP_POP)
END_CASE(JSOP_POP) END_CASE(JSOP_POP)
BEGIN_CASE(JSOP_POPN) BEGIN_CASE(JSOP_POPN)
{ JS_ASSERT(GET_UINT16(regs.pc) <= regs.stackDepth());
regs.sp -= GET_UINT16(regs.pc); regs.sp -= GET_UINT16(regs.pc);
#ifdef DEBUG #ifdef DEBUG
JS_ASSERT(regs.fp()->base() <= regs.sp); if (StaticBlockObject *block = regs.fp()->maybeBlockChain())
StaticBlockObject *block = regs.fp()->maybeBlockChain(); JS_ASSERT(regs.stackDepth() >= block->stackDepth() + block->slotCount());
JS_ASSERT_IF(block,
block->stackDepth() + block->slotCount()
<= (size_t) (regs.sp - regs.fp()->base()));
for (JSObject *obj = regs.fp()->scopeChain(); obj; obj = obj->enclosingScope()) {
if (!obj->isBlock() || !obj->isWith())
continue;
if (obj->getPrivate() != js_FloatingFrameIfGenerator(cx, regs.fp()))
break;
JS_ASSERT(regs.fp()->base() + obj->asBlock().stackDepth()
+ (obj->isBlock() ? obj->asBlock().slotCount() : 1)
<= regs.sp);
}
#endif #endif
}
END_CASE(JSOP_POPN) END_CASE(JSOP_POPN)
BEGIN_CASE(JSOP_SETRVAL) BEGIN_CASE(JSOP_SETRVAL)
@ -1639,14 +1575,13 @@ BEGIN_CASE(JSOP_STOP)
if (entryFrame != regs.fp()) if (entryFrame != regs.fp())
inline_return: inline_return:
{ {
AssertValidFunctionScopeChainAtExit(regs.fp());
if (cx->compartment->debugMode()) if (cx->compartment->debugMode())
interpReturnOK = ScriptDebugEpilogue(cx, regs.fp(), interpReturnOK); interpReturnOK = ScriptDebugEpilogue(cx, regs.fp(), interpReturnOK);
interpReturnOK = ScriptEpilogue(cx, regs.fp(), interpReturnOK); if (!regs.fp()->isYielding())
regs.fp()->epilogue(cx);
/* The JIT inlines ScriptEpilogue. */ /* The JIT inlines the epilogue. */
#ifdef JS_METHODJIT #ifdef JS_METHODJIT
jit_return: jit_return:
#endif #endif
@ -1679,7 +1614,7 @@ BEGIN_CASE(JSOP_STOP)
regs.pc += JSOP_CALL_LENGTH; regs.pc += JSOP_CALL_LENGTH;
goto error; goto error;
} else { } else {
JS_ASSERT(regs.sp == regs.fp()->base()); JS_ASSERT(regs.stackDepth() == 0);
} }
interpReturnOK = true; interpReturnOK = true;
goto exit; goto exit;
@ -1796,7 +1731,7 @@ END_CASE(JSOP_IN)
BEGIN_CASE(JSOP_ITER) BEGIN_CASE(JSOP_ITER)
{ {
JS_ASSERT(regs.sp > regs.fp()->base()); JS_ASSERT(regs.stackDepth() >= 1);
uint8_t flags = GET_UINT8(regs.pc); uint8_t flags = GET_UINT8(regs.pc);
if (!ValueToIterator(cx, flags, &regs.sp[-1])) if (!ValueToIterator(cx, flags, &regs.sp[-1]))
goto error; goto error;
@ -1807,7 +1742,7 @@ END_CASE(JSOP_ITER)
BEGIN_CASE(JSOP_MOREITER) BEGIN_CASE(JSOP_MOREITER)
{ {
JS_ASSERT(regs.sp - 1 >= regs.fp()->base()); JS_ASSERT(regs.stackDepth() >= 1);
JS_ASSERT(regs.sp[-1].isObject()); JS_ASSERT(regs.sp[-1].isObject());
PUSH_NULL(); PUSH_NULL();
bool cond; bool cond;
@ -1820,8 +1755,8 @@ END_CASE(JSOP_MOREITER)
BEGIN_CASE(JSOP_ITERNEXT) BEGIN_CASE(JSOP_ITERNEXT)
{ {
JS_ASSERT(regs.stackDepth() >= unsigned(GET_INT8(regs.pc)));
Value *itervp = regs.sp - GET_INT8(regs.pc); Value *itervp = regs.sp - GET_INT8(regs.pc);
JS_ASSERT(itervp >= regs.fp()->base());
JS_ASSERT(itervp->isObject()); JS_ASSERT(itervp->isObject());
PUSH_NULL(); PUSH_NULL();
if (!IteratorNext(cx, &itervp->toObject(), &regs.sp[-1])) if (!IteratorNext(cx, &itervp->toObject(), &regs.sp[-1]))
@ -1831,7 +1766,7 @@ END_CASE(JSOP_ITERNEXT)
BEGIN_CASE(JSOP_ENDITER) BEGIN_CASE(JSOP_ENDITER)
{ {
JS_ASSERT(regs.sp - 1 >= regs.fp()->base()); JS_ASSERT(regs.stackDepth() >= 1);
bool ok = CloseIterator(cx, &regs.sp[-1].toObject()); bool ok = CloseIterator(cx, &regs.sp[-1].toObject());
regs.sp--; regs.sp--;
if (!ok) if (!ok)
@ -1841,7 +1776,7 @@ END_CASE(JSOP_ENDITER)
BEGIN_CASE(JSOP_DUP) BEGIN_CASE(JSOP_DUP)
{ {
JS_ASSERT(regs.sp > regs.fp()->base()); JS_ASSERT(regs.stackDepth() >= 1);
const Value &rref = regs.sp[-1]; const Value &rref = regs.sp[-1];
PUSH_COPY(rref); PUSH_COPY(rref);
} }
@ -1849,7 +1784,7 @@ END_CASE(JSOP_DUP)
BEGIN_CASE(JSOP_DUP2) BEGIN_CASE(JSOP_DUP2)
{ {
JS_ASSERT(regs.sp - 2 >= regs.fp()->base()); JS_ASSERT(regs.stackDepth() >= 2);
const Value &lref = regs.sp[-2]; const Value &lref = regs.sp[-2];
const Value &rref = regs.sp[-1]; const Value &rref = regs.sp[-1];
PUSH_COPY(lref); PUSH_COPY(lref);
@ -1859,7 +1794,7 @@ END_CASE(JSOP_DUP2)
BEGIN_CASE(JSOP_SWAP) BEGIN_CASE(JSOP_SWAP)
{ {
JS_ASSERT(regs.sp - 2 >= regs.fp()->base()); JS_ASSERT(regs.stackDepth() >= 2);
Value &lref = regs.sp[-2]; Value &lref = regs.sp[-2];
Value &rref = regs.sp[-1]; Value &rref = regs.sp[-1];
lref.swap(rref); lref.swap(rref);
@ -1869,7 +1804,7 @@ END_CASE(JSOP_SWAP)
BEGIN_CASE(JSOP_PICK) BEGIN_CASE(JSOP_PICK)
{ {
unsigned i = GET_UINT8(regs.pc); unsigned i = GET_UINT8(regs.pc);
JS_ASSERT(regs.sp - (i + 1) >= regs.fp()->base()); JS_ASSERT(regs.stackDepth() >= i + 1);
Value lval = regs.sp[-int(i + 1)]; Value lval = regs.sp[-int(i + 1)];
memmove(regs.sp - (i + 1), regs.sp - i, sizeof(Value) * i); memmove(regs.sp - (i + 1), regs.sp - i, sizeof(Value) * i);
regs.sp[-1] = lval; regs.sp[-1] = lval;
@ -2355,10 +2290,17 @@ BEGIN_CASE(JSOP_INCARG)
BEGIN_CASE(JSOP_ARGINC) BEGIN_CASE(JSOP_ARGINC)
{ {
unsigned i = GET_ARGNO(regs.pc); unsigned i = GET_ARGNO(regs.pc);
CheckArgAccess(regs.fp(), i); if (script->argsObjAliasesFormals()) {
Value &arg = regs.fp()->formalArg(i); const Value &arg = regs.fp()->argsObj().arg(i);
Value v;
if (!DoIncDec(cx, script, regs.pc, arg, &v, &regs.sp[0]))
goto error;
regs.fp()->argsObj().setArg(i, v);
} else {
Value &arg = regs.fp()->unaliasedFormal(i);
if (!DoIncDec(cx, script, regs.pc, arg, &arg, &regs.sp[0])) if (!DoIncDec(cx, script, regs.pc, arg, &arg, &regs.sp[0]))
goto error; goto error;
}
regs.sp++; regs.sp++;
} }
END_CASE(JSOP_ARGINC); END_CASE(JSOP_ARGINC);
@ -2369,8 +2311,7 @@ BEGIN_CASE(JSOP_INCLOCAL)
BEGIN_CASE(JSOP_LOCALINC) BEGIN_CASE(JSOP_LOCALINC)
{ {
unsigned i = GET_SLOTNO(regs.pc); unsigned i = GET_SLOTNO(regs.pc);
CheckLocalAccess(regs.fp(), i); Value &local = regs.fp()->unaliasedLocal(i);
Value &local = regs.fp()->localSlot(i);
if (!DoIncDec(cx, script, regs.pc, local, &local, &regs.sp[0])) if (!DoIncDec(cx, script, regs.pc, local, &local, &regs.sp[0]))
goto error; goto error;
regs.sp++; regs.sp++;
@ -2481,13 +2422,12 @@ BEGIN_CASE(JSOP_NEW)
BEGIN_CASE(JSOP_CALL) BEGIN_CASE(JSOP_CALL)
BEGIN_CASE(JSOP_FUNCALL) BEGIN_CASE(JSOP_FUNCALL)
{ {
JS_ASSERT(regs.stackDepth() >= 2 + GET_ARGC(regs.pc));
CallArgs args = CallArgsFromSp(GET_ARGC(regs.pc), regs.sp); CallArgs args = CallArgsFromSp(GET_ARGC(regs.pc), regs.sp);
JS_ASSERT(args.base() >= regs.fp()->base());
bool construct = (*regs.pc == JSOP_NEW); bool construct = (*regs.pc == JSOP_NEW);
RootedFunction &fun = rootFunction0; RootedFunction &fun = rootFunction0;
/* Don't bother trying to fast-path calls to scripted non-constructors. */ /* Don't bother trying to fast-path calls to scripted non-constructors. */
if (!IsFunctionObject(args.calleev(), fun.address()) || !fun->isInterpretedConstructor()) { if (!IsFunctionObject(args.calleev(), fun.address()) || !fun->isInterpretedConstructor()) {
if (construct) { if (construct) {
@ -2521,10 +2461,6 @@ BEGIN_CASE(JSOP_FUNCALL)
goto error; goto error;
RESTORE_INTERP_VARS(); RESTORE_INTERP_VARS();
if (!regs.fp()->functionPrologue(cx))
goto error;
RESET_USE_METHODJIT(); RESET_USE_METHODJIT();
bool newType = cx->typeInferenceEnabled() && UseNewType(cx, script, regs.pc); bool newType = cx->typeInferenceEnabled() && UseNewType(cx, script, regs.pc);
@ -2547,7 +2483,7 @@ BEGIN_CASE(JSOP_FUNCALL)
} }
#endif #endif
if (!ScriptPrologue(cx, regs.fp(), newType)) if (!regs.fp()->prologue(cx, newType))
goto error; goto error;
if (cx->compartment->debugMode()) { if (cx->compartment->debugMode()) {
@ -2804,7 +2740,7 @@ END_CASE(JSOP_ACTUALSFILLED)
BEGIN_CASE(JSOP_ARGUMENTS) BEGIN_CASE(JSOP_ARGUMENTS)
JS_ASSERT(!regs.fp()->fun()->hasRest()); JS_ASSERT(!regs.fp()->fun()->hasRest());
if (script->needsArgsObj()) { if (script->needsArgsObj()) {
ArgumentsObject *obj = ArgumentsObject::create(cx, regs.fp()); ArgumentsObject *obj = ArgumentsObject::createExpected(cx, regs.fp());
if (!obj) if (!obj)
goto error; goto error;
PUSH_COPY(ObjectValue(*obj)); PUSH_COPY(ObjectValue(*obj));
@ -2843,16 +2779,20 @@ BEGIN_CASE(JSOP_GETARG)
BEGIN_CASE(JSOP_CALLARG) BEGIN_CASE(JSOP_CALLARG)
{ {
unsigned i = GET_ARGNO(regs.pc); unsigned i = GET_ARGNO(regs.pc);
CheckArgAccess(regs.fp(), i); if (script->argsObjAliasesFormals())
PUSH_COPY(regs.fp()->formalArg(i)); PUSH_COPY(regs.fp()->argsObj().arg(i));
else
PUSH_COPY(regs.fp()->unaliasedFormal(i));
} }
END_CASE(JSOP_GETARG) END_CASE(JSOP_GETARG)
BEGIN_CASE(JSOP_SETARG) BEGIN_CASE(JSOP_SETARG)
{ {
unsigned i = GET_ARGNO(regs.pc); unsigned i = GET_ARGNO(regs.pc);
CheckArgAccess(regs.fp(), i); if (script->argsObjAliasesFormals())
regs.fp()->formalArg(i) = regs.sp[-1]; regs.fp()->argsObj().setArg(i, regs.sp[-1]);
else
regs.fp()->unaliasedFormal(i) = regs.sp[-1];
} }
END_CASE(JSOP_SETARG) END_CASE(JSOP_SETARG)
@ -2860,8 +2800,7 @@ BEGIN_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_CALLLOCAL) BEGIN_CASE(JSOP_CALLLOCAL)
{ {
unsigned i = GET_SLOTNO(regs.pc); unsigned i = GET_SLOTNO(regs.pc);
CheckLocalAccess(regs.fp(), i); PUSH_COPY_SKIP_CHECK(regs.fp()->unaliasedLocal(i));
PUSH_COPY_SKIP_CHECK(regs.fp()->localSlot(i));
/* /*
* Skip the same-compartment assertion if the local will be immediately * Skip the same-compartment assertion if the local will be immediately
@ -2877,8 +2816,7 @@ END_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_SETLOCAL) BEGIN_CASE(JSOP_SETLOCAL)
{ {
unsigned i = GET_SLOTNO(regs.pc); unsigned i = GET_SLOTNO(regs.pc);
CheckLocalAccess(regs.fp(), i); regs.fp()->unaliasedLocal(i) = regs.sp[-1];
regs.fp()->localSlot(i) = regs.sp[-1];
} }
END_CASE(JSOP_SETLOCAL) END_CASE(JSOP_SETLOCAL)
@ -3025,7 +2963,7 @@ END_CASE(JSOP_LAMBDA)
BEGIN_CASE(JSOP_CALLEE) BEGIN_CASE(JSOP_CALLEE)
JS_ASSERT(regs.fp()->isNonEvalFunctionFrame()); JS_ASSERT(regs.fp()->isNonEvalFunctionFrame());
PUSH_COPY(argv[-2]); PUSH_COPY(regs.fp()->calleev());
END_CASE(JSOP_CALLEE) END_CASE(JSOP_CALLEE)
BEGIN_CASE(JSOP_GETTER) BEGIN_CASE(JSOP_GETTER)
@ -3057,7 +2995,7 @@ BEGIN_CASE(JSOP_SETTER)
case JSOP_INITPROP: case JSOP_INITPROP:
{ {
JS_ASSERT(regs.sp - regs.fp()->base() >= 2); JS_ASSERT(regs.stackDepth() >= 2);
rval = regs.sp[-1]; rval = regs.sp[-1];
i = -1; i = -1;
PropertyName *name; PropertyName *name;
@ -3067,8 +3005,7 @@ BEGIN_CASE(JSOP_SETTER)
} }
default: default:
JS_ASSERT(op2 == JSOP_INITELEM); JS_ASSERT(op2 == JSOP_INITELEM);
JS_ASSERT(regs.stackDepth() >= 3);
JS_ASSERT(regs.sp - regs.fp()->base() >= 3);
rval = regs.sp[-1]; rval = regs.sp[-1];
id = JSID_VOID; id = JSID_VOID;
i = -2; i = -2;
@ -3186,7 +3123,7 @@ END_CASE(JSOP_NEWOBJECT)
BEGIN_CASE(JSOP_ENDINIT) BEGIN_CASE(JSOP_ENDINIT)
{ {
/* FIXME remove JSOP_ENDINIT bug 588522 */ /* FIXME remove JSOP_ENDINIT bug 588522 */
JS_ASSERT(regs.sp - regs.fp()->base() >= 1); JS_ASSERT(regs.stackDepth() >= 1);
JS_ASSERT(regs.sp[-1].isObject()); JS_ASSERT(regs.sp[-1].isObject());
} }
END_CASE(JSOP_ENDINIT) END_CASE(JSOP_ENDINIT)
@ -3194,7 +3131,7 @@ END_CASE(JSOP_ENDINIT)
BEGIN_CASE(JSOP_INITPROP) BEGIN_CASE(JSOP_INITPROP)
{ {
/* Load the property's initial value into rval. */ /* Load the property's initial value into rval. */
JS_ASSERT(regs.sp - regs.fp()->base() >= 2); JS_ASSERT(regs.stackDepth() >= 2);
Value rval = regs.sp[-1]; Value rval = regs.sp[-1];
/* Load the object being initialized into lval/obj. */ /* Load the object being initialized into lval/obj. */
@ -3222,7 +3159,7 @@ END_CASE(JSOP_INITPROP);
BEGIN_CASE(JSOP_INITELEM) BEGIN_CASE(JSOP_INITELEM)
{ {
/* Pop the element's value into rval. */ /* Pop the element's value into rval. */
JS_ASSERT(regs.sp - regs.fp()->base() >= 3); JS_ASSERT(regs.stackDepth() >= 3);
const Value &rref = regs.sp[-1]; const Value &rref = regs.sp[-1];
RootedObject &obj = rootObject0; RootedObject &obj = rootObject0;
@ -3708,24 +3645,17 @@ BEGIN_CASE(JSOP_ENTERLET1)
{ {
StaticBlockObject &blockObj = script->getObject(GET_UINT32_INDEX(regs.pc))->asStaticBlock(); StaticBlockObject &blockObj = script->getObject(GET_UINT32_INDEX(regs.pc))->asStaticBlock();
if (op == JSOP_ENTERBLOCK) {
JS_ASSERT(regs.stackDepth() == blockObj.stackDepth());
JS_ASSERT(regs.stackDepth() + blockObj.slotCount() <= script->nslots);
Value *vp = regs.sp + blockObj.slotCount();
SetValueRangeToUndefined(regs.sp, vp);
regs.sp = vp;
}
/* Clone block iff there are any closed-over variables. */ /* Clone block iff there are any closed-over variables. */
if (!regs.fp()->pushBlock(cx, blockObj)) if (!regs.fp()->pushBlock(cx, blockObj))
goto error; goto error;
if (op == JSOP_ENTERBLOCK) {
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() == regs.sp);
Value *vp = regs.sp + blockObj.slotCount();
JS_ASSERT(regs.sp < vp);
JS_ASSERT(vp <= regs.fp()->slots() + script->nslots);
SetValueRangeToUndefined(regs.sp, vp);
regs.sp = vp;
} else if (op == JSOP_ENTERLET0) {
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
== regs.sp);
} else if (op == JSOP_ENTERLET1) {
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
== regs.sp - 1);
}
} }
END_CASE(JSOP_ENTERBLOCK) END_CASE(JSOP_ENTERBLOCK)
@ -3740,12 +3670,12 @@ BEGIN_CASE(JSOP_LEAVEBLOCKEXPR)
if (op == JSOP_LEAVEBLOCK) { if (op == JSOP_LEAVEBLOCK) {
/* Pop the block's slots. */ /* Pop the block's slots. */
regs.sp -= GET_UINT16(regs.pc); regs.sp -= GET_UINT16(regs.pc);
JS_ASSERT(regs.fp()->base() + blockDepth == regs.sp); JS_ASSERT(regs.stackDepth() == blockDepth);
} else if (op == JSOP_LEAVEBLOCKEXPR) { } else if (op == JSOP_LEAVEBLOCKEXPR) {
/* Pop the block's slots maintaining the topmost expr. */ /* Pop the block's slots maintaining the topmost expr. */
Value *vp = &regs.sp[-1]; Value *vp = &regs.sp[-1];
regs.sp -= GET_UINT16(regs.pc); regs.sp -= GET_UINT16(regs.pc);
JS_ASSERT(regs.fp()->base() + blockDepth == regs.sp - 1); JS_ASSERT(regs.stackDepth() == blockDepth + 1);
regs.sp[-1] = *vp; regs.sp[-1] = *vp;
} else { } else {
/* Another op will pop; nothing to do here. */ /* Another op will pop; nothing to do here. */
@ -3759,11 +3689,13 @@ END_CASE(JSOP_LEAVEBLOCK)
BEGIN_CASE(JSOP_GENERATOR) BEGIN_CASE(JSOP_GENERATOR)
{ {
JS_ASSERT(!cx->isExceptionPending()); JS_ASSERT(!cx->isExceptionPending());
regs.fp()->initGeneratorFrame();
regs.pc += JSOP_GENERATOR_LENGTH; regs.pc += JSOP_GENERATOR_LENGTH;
JSObject *obj = js_NewGenerator(cx); JSObject *obj = js_NewGenerator(cx);
if (!obj) if (!obj)
goto error; goto error;
regs.fp()->setReturnValue(ObjectValue(*obj)); regs.fp()->setReturnValue(ObjectValue(*obj));
regs.fp()->setYielding();
interpReturnOK = true; interpReturnOK = true;
if (entryFrame != regs.fp()) if (entryFrame != regs.fp())
goto inline_return; goto inline_return;
@ -3773,9 +3705,9 @@ BEGIN_CASE(JSOP_GENERATOR)
BEGIN_CASE(JSOP_YIELD) BEGIN_CASE(JSOP_YIELD)
JS_ASSERT(!cx->isExceptionPending()); JS_ASSERT(!cx->isExceptionPending());
JS_ASSERT(regs.fp()->isNonEvalFunctionFrame()); JS_ASSERT(regs.fp()->isNonEvalFunctionFrame());
if (cx->generatorFor(regs.fp())->state == JSGEN_CLOSING) { if (cx->innermostGenerator()->state == JSGEN_CLOSING) {
js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD, js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD, JSDVG_SEARCH_STACK,
JSDVG_SEARCH_STACK, argv[-2], NULL); ObjectValue(regs.fp()->callee()), NULL);
goto error; goto error;
} }
regs.fp()->setReturnValue(regs.sp[-1]); regs.fp()->setReturnValue(regs.sp[-1]);
@ -3789,9 +3721,8 @@ BEGIN_CASE(JSOP_ARRAYPUSH)
uint32_t slot = GET_UINT16(regs.pc); uint32_t slot = GET_UINT16(regs.pc);
JS_ASSERT(script->nfixed <= slot); JS_ASSERT(script->nfixed <= slot);
JS_ASSERT(slot < script->nslots); JS_ASSERT(slot < script->nslots);
CheckLocalAccess(regs.fp(), slot);
RootedObject &obj = rootObject0; RootedObject &obj = rootObject0;
obj = &regs.fp()->slots()[slot].toObject(); obj = &regs.fp()->unaliasedLocal(slot).toObject();
if (!js_NewbornArrayPush(cx, obj, regs.sp[-1])) if (!js_NewbornArrayPush(cx, obj, regs.sp[-1]))
goto error; goto error;
regs.sp--; regs.sp--;
@ -3907,7 +3838,7 @@ END_CASE(JSOP_ARRAYPUSH)
* the for-in loop. * the for-in loop.
*/ */
regs.pc = (script)->main() + tn->start + tn->length; regs.pc = (script)->main() + tn->start + tn->length;
regs.sp = regs.fp()->base() + tn->stackDepth; regs.sp = regs.spForStackDepth(tn->stackDepth);
switch (tn->kind) { switch (tn->kind) {
case JSTRY_CATCH: case JSTRY_CATCH:
@ -3977,19 +3908,10 @@ END_CASE(JSOP_ARRAYPUSH)
exit: exit:
if (cx->compartment->debugMode()) if (cx->compartment->debugMode())
interpReturnOK = ScriptDebugEpilogue(cx, regs.fp(), interpReturnOK); interpReturnOK = ScriptDebugEpilogue(cx, regs.fp(), interpReturnOK);
interpReturnOK = ScriptEpilogueOrGeneratorYield(cx, regs.fp(), interpReturnOK); if (!regs.fp()->isYielding())
regs.fp()->epilogue(cx);
regs.fp()->setFinishedInInterpreter(); regs.fp()->setFinishedInInterpreter();
#ifdef DEBUG
JS_ASSERT(entryFrame == regs.fp());
if (regs.fp()->isFunctionFrame())
AssertValidFunctionScopeChainAtExit(regs.fp());
else if (regs.fp()->isEvalFrame())
AssertValidEvalFrameScopeChainAtExit(regs.fp());
else if (!regs.fp()->isGeneratorFrame())
JS_ASSERT(!regs.fp()->scopeChain()->isScope());
#endif
#ifdef JS_METHODJIT #ifdef JS_METHODJIT
/* /*
* This path is used when it's guaranteed the method can be finished * This path is used when it's guaranteed the method can be finished

Просмотреть файл

@ -18,31 +18,6 @@
namespace js { namespace js {
/*
* ScriptPrologue/ScriptEpilogue must be called in pairs. ScriptPrologue
* must be called before the script executes. ScriptEpilogue must be called
* after the script returns or exits via exception.
*/
inline bool
ScriptPrologue(JSContext *cx, StackFrame *fp, JSScript *script);
inline bool
ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok);
/*
* It is not valid to call ScriptPrologue when a generator is resumed or to
* call ScriptEpilogue when a generator yields. However, the debugger still
* needs LIFO notification of generator start/stop. This pair of functions does
* the right thing based on the state of 'fp'.
*/
inline bool
ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp);
inline bool
ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok);
/* Implemented in jsdbgapi: */ /* Implemented in jsdbgapi: */
/* /*
@ -274,9 +249,6 @@ UnwindForUncatchableException(JSContext *cx, const FrameRegs &regs);
extern bool extern bool
OnUnknownMethod(JSContext *cx, HandleObject obj, Value idval, Value *vp); OnUnknownMethod(JSContext *cx, HandleObject obj, Value idval, Value *vp);
inline void
AssertValidFunctionScopeChainAtExit(StackFrame *fp);
class TryNoteIter class TryNoteIter
{ {
const FrameRegs &regs; const FrameRegs &regs;

Просмотреть файл

@ -421,70 +421,6 @@ DefVarOrConstOperation(JSContext *cx, HandleObject varobj, PropertyName *dn, uns
return true; return true;
} }
inline bool
FunctionNeedsPrologue(JSContext *cx, JSFunction *fun)
{
/* Heavyweight functions need call objects created. */
if (fun->isHeavyweight())
return true;
/* Outer and inner functions need to preserve nesting invariants. */
if (cx->typeInferenceEnabled() && fun->script()->nesting())
return true;
return false;
}
inline bool
ScriptPrologue(JSContext *cx, StackFrame *fp, bool newType)
{
JS_ASSERT_IF(fp->isNonEvalFunctionFrame() && fp->fun()->isHeavyweight(), fp->hasCallObj());
if (fp->isConstructing()) {
JSObject *obj = js_CreateThisForFunction(cx, RootedObject(cx, &fp->callee()), newType);
if (!obj)
return false;
fp->functionThis().setObject(*obj);
}
Probes::enterJSFun(cx, fp->maybeFun(), fp->script());
return true;
}
inline bool
ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok)
{
Probes::exitJSFun(cx, fp->maybeFun(), fp->script());
/*
* If inline-constructing, replace primitive rval with the new object
* passed in via |this|, and instrument this constructor invocation.
*/
if (fp->isConstructing() && ok) {
if (fp->returnValue().isPrimitive())
fp->setReturnValue(ObjectValue(fp->constructorThis()));
}
return ok;
}
inline bool
ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp, bool newType)
{
if (!fp->isGeneratorFrame())
return ScriptPrologue(cx, fp, newType);
return true;
}
inline bool
ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok)
{
if (!fp->isYielding())
return ScriptEpilogue(cx, fp, ok);
return ok;
}
inline void inline void
InterpreterFrames::enableInterruptsIfRunning(JSScript *script) InterpreterFrames::enableInterruptsIfRunning(JSScript *script)
{ {
@ -492,49 +428,6 @@ InterpreterFrames::enableInterruptsIfRunning(JSScript *script)
enabler.enableInterrupts(); enabler.enableInterrupts();
} }
inline void
AssertValidEvalFrameScopeChainAtExit(StackFrame *fp)
{
#ifdef DEBUG
JS_ASSERT(fp->isEvalFrame());
JS_ASSERT(!fp->hasBlockChain());
JSObject &scope = *fp->scopeChain();
if (fp->isStrictEvalFrame())
JS_ASSERT(scope.asCall().maybeStackFrame() == fp);
else if (fp->isDebuggerFrame())
JS_ASSERT(!scope.isScope());
else if (fp->isDirectEvalFrame())
JS_ASSERT(scope == *fp->prev()->scopeChain());
else
JS_ASSERT(scope.isGlobal());
#endif
}
inline void
AssertValidFunctionScopeChainAtExit(StackFrame *fp)
{
#ifdef DEBUG
JS_ASSERT(fp->isFunctionFrame());
if (fp->isGeneratorFrame() || fp->isYielding())
return;
if (fp->isEvalFrame()) {
AssertValidEvalFrameScopeChainAtExit(fp);
return;
}
JS_ASSERT(!fp->hasBlockChain());
JSObject &scope = *fp->scopeChain();
if (fp->fun()->isHeavyweight() && fp->hasCallObj())
JS_ASSERT(scope.asCall().maybeStackFrame() == fp);
else if (scope.isCall() || scope.isBlock())
JS_ASSERT(scope.asScope().maybeStackFrame() != fp);
#endif
}
static JS_ALWAYS_INLINE bool static JS_ALWAYS_INLINE bool
AddOperation(JSContext *cx, const Value &lhs, const Value &rhs, Value *res) AddOperation(JSContext *cx, const Value &lhs, const Value &rhs, Value *res)
{ {
@ -722,7 +615,7 @@ GetObjectElementOperation(JSContext *cx, JSOp op, HandleObject obj, const Value
break; break;
} }
} else if (obj->isArguments()) { } else if (obj->isArguments()) {
if (obj->asArguments().getElement(index, res)) if (obj->asArguments().maybeGetElement(index, res))
break; break;
} }
if (!obj->getElement(cx, index, res)) if (!obj->getElement(cx, index, res))
@ -888,7 +781,7 @@ GuardFunApplySpeculation(JSContext *cx, FrameRegs &regs)
if (!IsNativeFunction(args.calleev(), js_fun_apply)) { if (!IsNativeFunction(args.calleev(), js_fun_apply)) {
if (!JSScript::applySpeculationFailed(cx, regs.fp()->script())) if (!JSScript::applySpeculationFailed(cx, regs.fp()->script()))
return false; return false;
args[1] = ObjectValue(regs.fp()->argsObj()); regs.sp[-1] = ObjectValue(regs.fp()->argsObj());
} }
} }
return true; return true;

Просмотреть файл

@ -1324,32 +1324,23 @@ generator_finalize(FreeOp *fop, JSObject *obj)
JS_ASSERT(gen->state == JSGEN_NEWBORN || JS_ASSERT(gen->state == JSGEN_NEWBORN ||
gen->state == JSGEN_CLOSED || gen->state == JSGEN_CLOSED ||
gen->state == JSGEN_OPEN); gen->state == JSGEN_OPEN);
JS_POISON(gen->fp, JS_FREE_PATTERN, sizeof(StackFrame));
JS_POISON(gen, JS_FREE_PATTERN, sizeof(JSGenerator));
fop->free_(gen); fop->free_(gen);
} }
static void static void
MarkGenerator(JSTracer *trc, JSGenerator *gen) MarkGenerator(JSTracer *trc, JSGenerator *gen)
{ {
StackFrame *fp = gen->floatingFrame(); MarkValueRange(trc,
HeapValueify(gen->fp->generatorArgsSnapshotBegin()),
/* HeapValueify(gen->fp->generatorArgsSnapshotEnd()),
* MarkGenerator should only be called when regs is based on the floating frame. "Generator Floating Args");
* See calls to RebaseRegsFromTo. gen->fp->mark(trc);
*/ MarkValueRange(trc,
JS_ASSERT(size_t(gen->regs.sp - fp->slots()) <= fp->numSlots()); HeapValueify(gen->fp->generatorSlotsSnapshotBegin()),
HeapValueify(gen->regs.sp),
/* "Generator Floating Stack");
* Currently, generators are not mjitted. Still, (overflow) args can be
* pushed by the mjit and need to be conservatively marked. Technically, the
* formal args and generator slots are safe for exact marking, but since the
* plan is to eventually mjit generators, it makes sense to future-proof
* this code and save someone an hour later.
*/
MarkValueRange(trc, (HeapValue *)fp->formalArgsEnd() - gen->floatingStack,
gen->floatingStack, "Generator Floating Args");
fp->mark(trc);
MarkValueRange(trc, gen->regs.sp - fp->slots(),
(HeapValue *)fp->slots(), "Generator Floating Stack");
} }
static void static void
@ -1367,14 +1358,7 @@ generator_trace(JSTracer *trc, JSObject *obj)
if (!gen) if (!gen)
return; return;
/* if (gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN)
* Do not mark if the generator is running; the contents may be trash and
* will be replaced when the generator stops.
*/
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING)
return;
JS_ASSERT(gen->liveFrame() == gen->floatingFrame());
MarkGenerator(trc, gen); MarkGenerator(trc, gen);
} }
@ -1415,9 +1399,8 @@ JSObject *
js_NewGenerator(JSContext *cx) js_NewGenerator(JSContext *cx)
{ {
FrameRegs &stackRegs = cx->regs(); FrameRegs &stackRegs = cx->regs();
JS_ASSERT(stackRegs.stackDepth() == 0);
StackFrame *stackfp = stackRegs.fp(); StackFrame *stackfp = stackRegs.fp();
JS_ASSERT(stackfp->base() == cx->regs().sp);
JS_ASSERT(stackfp->actualArgs() <= stackfp->formalArgs());
Rooted<GlobalObject*> global(cx, &stackfp->global()); Rooted<GlobalObject*> global(cx, &stackfp->global());
JSObject *proto = global->getOrCreateGeneratorPrototype(cx); JSObject *proto = global->getOrCreateGeneratorPrototype(cx);
@ -1428,15 +1411,15 @@ js_NewGenerator(JSContext *cx)
return NULL; return NULL;
/* Load and compute stack slot counts. */ /* Load and compute stack slot counts. */
Value *stackvp = stackfp->actualArgs() - 2; Value *stackvp = stackfp->generatorArgsSnapshotBegin();
unsigned vplen = stackfp->formalArgsEnd() - stackvp; unsigned vplen = stackfp->generatorArgsSnapshotEnd() - stackvp;
/* Compute JSGenerator size. */ /* Compute JSGenerator size. */
unsigned nbytes = sizeof(JSGenerator) + unsigned nbytes = sizeof(JSGenerator) +
(-1 + /* one Value included in JSGenerator */ (-1 + /* one Value included in JSGenerator */
vplen + vplen +
VALUES_PER_STACK_FRAME + VALUES_PER_STACK_FRAME +
stackfp->numSlots()) * sizeof(HeapValue); stackfp->script()->nslots) * sizeof(HeapValue);
JS_ASSERT(nbytes % sizeof(Value) == 0); JS_ASSERT(nbytes % sizeof(Value) == 0);
JS_STATIC_ASSERT(sizeof(StackFrame) % sizeof(HeapValue) == 0); JS_STATIC_ASSERT(sizeof(StackFrame) % sizeof(HeapValue) == 0);
@ -1447,35 +1430,25 @@ js_NewGenerator(JSContext *cx)
SetValueRangeToUndefined((Value *)gen, nbytes / sizeof(Value)); SetValueRangeToUndefined((Value *)gen, nbytes / sizeof(Value));
/* Cut up floatingStack space. */ /* Cut up floatingStack space. */
HeapValue *genvp = gen->floatingStack; HeapValue *genvp = gen->stackSnapshot;
StackFrame *genfp = reinterpret_cast<StackFrame *>(genvp + vplen); StackFrame *genfp = reinterpret_cast<StackFrame *>(genvp + vplen);
/* Initialize JSGenerator. */ /* Initialize JSGenerator. */
gen->obj.init(obj); gen->obj.init(obj);
gen->state = JSGEN_NEWBORN; gen->state = JSGEN_NEWBORN;
gen->enumerators = NULL; gen->enumerators = NULL;
gen->floating = genfp; gen->fp = genfp;
gen->prevGenerator = NULL;
/* Copy from the stack to the generator's floating frame. */ /* Copy from the stack to the generator's floating frame. */
gen->regs.rebaseFromTo(stackRegs, *genfp); gen->regs.rebaseFromTo(stackRegs, *genfp);
genfp->stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>( genfp->copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
cx, genfp, genvp, stackfp, stackvp, stackRegs.sp); cx, genfp, genvp, stackfp, stackvp, stackRegs.sp);
genfp->initFloatingGenerator();
stackfp->setYielding(); /* XXX: to be removed */
obj->setPrivate(gen); obj->setPrivate(gen);
return obj; return obj;
} }
JSGenerator *
js_FloatingFrameToGenerator(StackFrame *fp)
{
JS_ASSERT(fp->isGeneratorFrame() && fp->isFloatingGenerator());
char *floatingStackp = (char *)(fp->actualArgs() - 2);
char *p = floatingStackp - offsetof(JSGenerator, floatingStack);
return reinterpret_cast<JSGenerator *>(p);
}
typedef enum JSGeneratorOp { typedef enum JSGeneratorOp {
JSGENOP_NEXT, JSGENOP_NEXT,
JSGENOP_SEND, JSGENOP_SEND,
@ -1492,16 +1465,10 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
JSGenerator *gen, const Value &arg) JSGenerator *gen, const Value &arg)
{ {
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING) { if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING) {
js_ReportValueError(cx, JSMSG_NESTING_GENERATOR, JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NESTING_GENERATOR);
JSDVG_SEARCH_STACK, ObjectOrNullValue(obj),
JS_GetFunctionId(gen->floatingFrame()->fun()));
return JS_FALSE; return JS_FALSE;
} }
/* Check for OOM errors here, where we can fail easily. */
if (!cx->ensureGeneratorStackSpace())
return JS_FALSE;
/* /*
* Write barrier is needed since the generator stack can be updated, * Write barrier is needed since the generator stack can be updated,
* and it's not barriered in any other way. We need to do it before * and it's not barriered in any other way. We need to do it before
@ -1541,8 +1508,6 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
break; break;
} }
StackFrame *genfp = gen->floatingFrame();
JSBool ok; JSBool ok;
{ {
GeneratorFrameGuard gfg; GeneratorFrameGuard gfg;
@ -1553,7 +1518,6 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
StackFrame *fp = gfg.fp(); StackFrame *fp = gfg.fp();
gen->regs = cx->regs(); gen->regs = cx->regs();
JS_ASSERT(gen->liveFrame() == fp);
cx->enterGenerator(gen); /* OOM check above. */ cx->enterGenerator(gen); /* OOM check above. */
JSObject *enumerators = cx->enumerators; JSObject *enumerators = cx->enumerators;
@ -1566,18 +1530,18 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
cx->leaveGenerator(gen); cx->leaveGenerator(gen);
} }
if (gen->floatingFrame()->isYielding()) { if (gen->fp->isYielding()) {
/* Yield cannot fail, throw or be called on closing. */ /* Yield cannot fail, throw or be called on closing. */
JS_ASSERT(ok); JS_ASSERT(ok);
JS_ASSERT(!cx->isExceptionPending()); JS_ASSERT(!cx->isExceptionPending());
JS_ASSERT(gen->state == JSGEN_RUNNING); JS_ASSERT(gen->state == JSGEN_RUNNING);
JS_ASSERT(op != JSGENOP_CLOSE); JS_ASSERT(op != JSGENOP_CLOSE);
genfp->clearYielding(); gen->fp->clearYielding();
gen->state = JSGEN_OPEN; gen->state = JSGEN_OPEN;
return JS_TRUE; return JS_TRUE;
} }
genfp->clearReturnValue(); gen->fp->clearReturnValue();
gen->state = JSGEN_CLOSED; gen->state = JSGEN_CLOSED;
if (ok) { if (ok) {
/* Returned, explicitly or by falling off the end. */ /* Returned, explicitly or by falling off the end. */
@ -1669,7 +1633,7 @@ generator_op(JSContext *cx, Native native, JSGeneratorOp op, Value *vp, unsigned
if (!SendToGenerator(cx, op, obj, gen, undef ? args[0] : UndefinedValue())) if (!SendToGenerator(cx, op, obj, gen, undef ? args[0] : UndefinedValue()))
return false; return false;
args.rval() = gen->floatingFrame()->returnValue(); args.rval() = gen->fp->returnValue();
return true; return true;
} }

Просмотреть файл

@ -262,65 +262,28 @@ ForOf(JSContext *cx, const Value &iterable, Op op)
/* /*
* Generator state codes. * Generator state codes.
*/ */
typedef enum JSGeneratorState { enum JSGeneratorState
{
JSGEN_NEWBORN, /* not yet started */ JSGEN_NEWBORN, /* not yet started */
JSGEN_OPEN, /* started by a .next() or .send(undefined) call */ JSGEN_OPEN, /* started by a .next() or .send(undefined) call */
JSGEN_RUNNING, /* currently executing via .next(), etc., call */ JSGEN_RUNNING, /* currently executing via .next(), etc., call */
JSGEN_CLOSING, /* close method is doing asynchronous return */ JSGEN_CLOSING, /* close method is doing asynchronous return */
JSGEN_CLOSED /* closed, cannot be started or closed again */ JSGEN_CLOSED /* closed, cannot be started or closed again */
} JSGeneratorState; };
struct JSGenerator { struct JSGenerator
{
js::HeapPtrObject obj; js::HeapPtrObject obj;
JSGeneratorState state; JSGeneratorState state;
js::FrameRegs regs; js::FrameRegs regs;
JSObject *enumerators; JSObject *enumerators;
js::StackFrame *floating; JSGenerator *prevGenerator;
js::HeapValue floatingStack[1]; js::StackFrame *fp;
js::HeapValue stackSnapshot[1];
js::StackFrame *floatingFrame() {
return floating;
}
js::StackFrame *liveFrame() {
JS_ASSERT((state == JSGEN_RUNNING || state == JSGEN_CLOSING) ==
(regs.fp() != floatingFrame()));
return regs.fp();
}
}; };
extern JSObject * extern JSObject *
js_NewGenerator(JSContext *cx); js_NewGenerator(JSContext *cx);
/*
* Generator stack frames do not have stable pointers since they get copied to
* and from the generator object and the stack (see SendToGenerator). This is a
* problem for Block and With objects, which need to store a pointer to the
* enclosing stack frame. The solution is for Block and With objects to store
* a pointer to the "floating" stack frame stored in the generator object,
* since it is stable, and maintain, in the generator object, a pointer to the
* "live" stack frame (either a copy on the stack or the floating frame). Thus,
* Block and With objects must "normalize" to and from the floating/live frames
* in the case of generators using the following functions.
*/
inline js::StackFrame *
js_FloatingFrameIfGenerator(JSContext *cx, js::StackFrame *fp)
{
if (JS_UNLIKELY(fp->isGeneratorFrame()))
return cx->generatorFor(fp)->floatingFrame();
return fp;
}
/* Given a floating frame, given the JSGenerator containing it. */
extern JSGenerator *
js_FloatingFrameToGenerator(js::StackFrame *fp);
inline js::StackFrame *
js_LiveFrameIfGenerator(js::StackFrame *fp)
{
return fp->isGeneratorFrame() ? js_FloatingFrameToGenerator(fp)->liveFrame() : fp;
}
#endif #endif
extern JSObject * extern JSObject *

Просмотреть файл

@ -2849,6 +2849,7 @@ js::NewObjectWithType(JSContext *cx, HandleTypeObject type, JSObject *parent, gc
JS_ASSERT(type->proto->hasNewType(type)); JS_ASSERT(type->proto->hasNewType(type));
JS_ASSERT(parent); JS_ASSERT(parent);
JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
if (CanBeFinalizedInBackground(kind, &ObjectClass)) if (CanBeFinalizedInBackground(kind, &ObjectClass))
kind = GetBackgroundAllocKind(kind); kind = GetBackgroundAllocKind(kind);
@ -3871,14 +3872,6 @@ JSObject::growSlots(JSContext *cx, uint32_t oldCount, uint32_t newCount)
JS_ASSERT(newCount >= SLOT_CAPACITY_MIN); JS_ASSERT(newCount >= SLOT_CAPACITY_MIN);
JS_ASSERT(!isDenseArray()); JS_ASSERT(!isDenseArray());
/*
* Slots are only allocated for call objects when new properties are
* added to them, which can only happen while the call is still on the
* stack (and an eval, DEFFUN, etc. happens). We thus do not need to
* worry about updating any active outer function args/vars.
*/
JS_ASSERT_IF(isCall(), asCall().maybeStackFrame() != NULL);
/* /*
* Slot capacities are determined by the span of allocated objects. Due to * Slot capacities are determined by the span of allocated objects. Due to
* the limited number of bits to store shape slots, object growth is * the limited number of bits to store shape slots, object growth is
@ -6245,15 +6238,9 @@ js_DumpStackFrame(JSContext *cx, StackFrame *start)
} }
} }
if (fp->hasArgs()) { if (fp->hasArgs()) {
fprintf(stderr, " actuals: %p (%u) ", (void *) fp->actualArgs(), (unsigned) fp->numActualArgs()); fprintf(stderr, " actuals: %p (%u) ", (void *) fp->actuals(), (unsigned) fp->numActualArgs());
fprintf(stderr, " formals: %p (%u)\n", (void *) fp->formalArgs(), (unsigned) fp->numFormalArgs()); fprintf(stderr, " formals: %p (%u)\n", (void *) fp->formals(), (unsigned) fp->numFormalArgs());
} }
if (fp->hasCallObj()) {
fprintf(stderr, " has call obj: ");
dumpValue(ObjectValue(fp->callObj()));
fprintf(stderr, "\n");
}
MaybeDumpObject("argsobj", fp->maybeArgsObj());
MaybeDumpObject("blockChain", fp->maybeBlockChain()); MaybeDumpObject("blockChain", fp->maybeBlockChain());
if (!fp->isDummyFrame()) { if (!fp->isDummyFrame()) {
MaybeDumpValue("this", fp->thisValue()); MaybeDumpValue("this", fp->thisValue());

Просмотреть файл

@ -514,12 +514,12 @@ js_Disassemble1(JSContext *cx, JSScript *script, jsbytecode *pc,
} }
case JOF_SCOPECOORD: { case JOF_SCOPECOORD: {
Value v = StringValue(ScopeCoordinateName(script, pc)); Value v = StringValue(ScopeCoordinateName(cx->runtime, script, pc));
JSAutoByteString bytes; JSAutoByteString bytes;
if (!ToDisassemblySource(cx, v, &bytes)) if (!ToDisassemblySource(cx, v, &bytes))
return 0; return 0;
ScopeCoordinate sc(pc); ScopeCoordinate sc(pc);
Sprint(sp, " %s (hops = %u, slot = %u)", bytes.ptr(), sc.hops, sc.binding); Sprint(sp, " %s (hops = %u, slot = %u)", bytes.ptr(), sc.hops, sc.slot);
break; break;
} }
@ -1409,6 +1409,12 @@ AddParenSlop(SprintStack *ss)
ss->sprinter.reserveAndClear(PAREN_SLOP); ss->sprinter.reserveAndClear(PAREN_SLOP);
} }
static unsigned
StackDepth(JSScript *script)
{
return script->nslots - script->nfixed;
}
static JSBool static JSBool
PushOff(SprintStack *ss, ptrdiff_t off, JSOp op, jsbytecode *pc = NULL) PushOff(SprintStack *ss, ptrdiff_t off, JSOp op, jsbytecode *pc = NULL)
{ {
@ -1844,7 +1850,7 @@ static bool
IsVarSlot(JSPrinter *jp, jsbytecode *pc, JSAtom **varAtom, int *localSlot) IsVarSlot(JSPrinter *jp, jsbytecode *pc, JSAtom **varAtom, int *localSlot)
{ {
if (JOF_OPTYPE(*pc) == JOF_SCOPECOORD) { if (JOF_OPTYPE(*pc) == JOF_SCOPECOORD) {
*varAtom = ScopeCoordinateName(jp->script, pc); *varAtom = ScopeCoordinateName(jp->sprinter.context->runtime, jp->script, pc);
LOCAL_ASSERT_RV(*varAtom, NULL); LOCAL_ASSERT_RV(*varAtom, NULL);
return true; return true;
} }
@ -5725,7 +5731,7 @@ js_DecompileValueGenerator(JSContext *cx, int spindex, jsval v,
* calculated value matching v under assumption that it is * calculated value matching v under assumption that it is
* it that caused exception, see bug 328664. * it that caused exception, see bug 328664.
*/ */
Value *stackBase = fp->base(); Value *stackBase = cx->regs().spForStackDepth(0);
Value *sp = cx->regs().sp; Value *sp = cx->regs().sp;
do { do {
if (sp == stackBase) { if (sp == stackBase) {

Просмотреть файл

@ -341,18 +341,14 @@ OPDEF(JSOP_FINALLY, 135,"finally", NULL, 1, 0, 2, 0, JOF_BYTE)
* 'slot' does not include RESERVED_SLOTS). * 'slot' does not include RESERVED_SLOTS).
* uint32 block: the index (into the script object table) of the block chain * uint32 block: the index (into the script object table) of the block chain
* at the point of the variable access. * at the point of the variable access.
*
* XXX: there is also a temporary 2-byte index (indicating the frame slot
* aliased by the scope chain) which will be removed with the last patch of bug
* 659577.
*/ */
OPDEF(JSOP_GETALIASEDVAR, 136,"getaliasedvar",NULL, 11, 0, 1, 19, JOF_SCOPECOORD|JOF_NAME) OPDEF(JSOP_GETALIASEDVAR, 136,"getaliasedvar",NULL, 9, 0, 1, 19, JOF_SCOPECOORD|JOF_NAME)
OPDEF(JSOP_CALLALIASEDVAR,137,"callaliasedvar",NULL, 11, 0, 1, 19, JOF_SCOPECOORD|JOF_NAME) OPDEF(JSOP_CALLALIASEDVAR,137,"callaliasedvar",NULL, 9, 0, 1, 19, JOF_SCOPECOORD|JOF_NAME)
OPDEF(JSOP_SETALIASEDVAR, 138,"setaliasedvar",NULL, 11, 1, 1, 3, JOF_SCOPECOORD|JOF_NAME|JOF_SET|JOF_DETECTING) OPDEF(JSOP_SETALIASEDVAR, 138,"setaliasedvar",NULL, 9, 1, 1, 3, JOF_SCOPECOORD|JOF_NAME|JOF_SET|JOF_DETECTING)
OPDEF(JSOP_INCALIASEDVAR, 139,"incaliasedvar",NULL, 12, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_INC|JOF_TMPSLOT3|JOF_DECOMPOSE) OPDEF(JSOP_INCALIASEDVAR, 139,"incaliasedvar",NULL, 10, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_INC|JOF_TMPSLOT3|JOF_DECOMPOSE)
OPDEF(JSOP_DECALIASEDVAR, 140,"decaliasedvar",NULL, 12, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_DEC|JOF_TMPSLOT3|JOF_DECOMPOSE) OPDEF(JSOP_DECALIASEDVAR, 140,"decaliasedvar",NULL, 10, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_DEC|JOF_TMPSLOT3|JOF_DECOMPOSE)
OPDEF(JSOP_ALIASEDVARINC, 141,"aliasedvarinc",NULL, 12, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_INC|JOF_POST|JOF_TMPSLOT3|JOF_DECOMPOSE) OPDEF(JSOP_ALIASEDVARINC, 141,"aliasedvarinc",NULL, 10, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_INC|JOF_POST|JOF_TMPSLOT3|JOF_DECOMPOSE)
OPDEF(JSOP_ALIASEDVARDEC, 142,"aliasedvardec",NULL, 12, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_DEC|JOF_POST|JOF_TMPSLOT3|JOF_DECOMPOSE) OPDEF(JSOP_ALIASEDVARDEC, 142,"aliasedvardec",NULL, 10, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_DEC|JOF_POST|JOF_TMPSLOT3|JOF_DECOMPOSE)
/* Unused. */ /* Unused. */
OPDEF(JSOP_UNUSED8, 143,"unused8", NULL, 1, 0, 0, 0, JOF_BYTE) OPDEF(JSOP_UNUSED8, 143,"unused8", NULL, 1, 0, 0, 0, JOF_BYTE)

Просмотреть файл

@ -289,7 +289,7 @@ Shape::getChildBinding(JSContext *cx, const StackShape &child)
* the fixed slot count here, which will feed into call objects created * the fixed slot count here, which will feed into call objects created
* off of the bindings. * off of the bindings.
*/ */
uint32_t slots = child.slotSpan() + 1; /* Add one for private data. */ uint32_t slots = child.slotSpan();
gc::AllocKind kind = gc::GetGCObjectKind(slots); gc::AllocKind kind = gc::GetGCObjectKind(slots);
/* /*
@ -300,11 +300,11 @@ Shape::getChildBinding(JSContext *cx, const StackShape &child)
*/ */
uint32_t nfixed = gc::GetGCKindSlots(kind); uint32_t nfixed = gc::GetGCKindSlots(kind);
if (nfixed < slots) { if (nfixed < slots) {
nfixed = CallObject::RESERVED_SLOTS + 1; nfixed = CallObject::RESERVED_SLOTS;
JS_ASSERT(gc::GetGCKindSlots(gc::GetGCObjectKind(nfixed)) == CallObject::RESERVED_SLOTS + 1); JS_ASSERT(gc::GetGCKindSlots(gc::GetGCObjectKind(nfixed)) == CallObject::RESERVED_SLOTS);
} }
shape->setNumFixedSlots(nfixed - 1); shape->setNumFixedSlots(nfixed);
} }
return shape; return shape;
} }

Просмотреть файл

@ -67,7 +67,7 @@ Bindings::lookup(JSContext *cx, JSAtom *name, unsigned *indexp) const
if (indexp) if (indexp)
*indexp = shape->shortid(); *indexp = shape->shortid();
if (shape->getter() == CallObject::getArgOp) if (shape->setter() == CallObject::setArgOp)
return ARGUMENT; return ARGUMENT;
return shape->writable() ? VARIABLE : CONSTANT; return shape->writable() ? VARIABLE : CONSTANT;
@ -102,14 +102,14 @@ Bindings::add(JSContext *cx, HandleAtom name, BindingKind kind)
if (kind == ARGUMENT) { if (kind == ARGUMENT) {
JS_ASSERT(nvars == 0); JS_ASSERT(nvars == 0);
indexp = &nargs; indexp = &nargs;
getter = CallObject::getArgOp; getter = NULL;
setter = CallObject::setArgOp; setter = CallObject::setArgOp;
slot += nargs; slot += nargs;
} else { } else {
JS_ASSERT(kind == VARIABLE || kind == CONSTANT); JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
indexp = &nvars; indexp = &nvars;
getter = CallObject::getVarOp; getter = NULL;
setter = CallObject::setVarOp; setter = CallObject::setVarOp;
if (kind == CONSTANT) if (kind == CONSTANT)
attrs |= JSPROP_READONLY; attrs |= JSPROP_READONLY;
@ -208,7 +208,7 @@ Bindings::getLocalNameArray(JSContext *cx, BindingNames *namesp)
const Shape &shape = r.front(); const Shape &shape = r.front();
unsigned index = uint16_t(shape.shortid()); unsigned index = uint16_t(shape.shortid());
if (shape.getter() == CallObject::getArgOp) { if (shape.setter() == CallObject::setArgOp) {
JS_ASSERT(index < nargs); JS_ASSERT(index < nargs);
names[index].kind = ARGUMENT; names[index].kind = ARGUMENT;
} else { } else {
@ -221,7 +221,7 @@ Bindings::getLocalNameArray(JSContext *cx, BindingNames *namesp)
names[index].maybeAtom = JSID_TO_ATOM(shape.propid()); names[index].maybeAtom = JSID_TO_ATOM(shape.propid());
} else { } else {
JS_ASSERT(JSID_IS_INT(shape.propid())); JS_ASSERT(JSID_IS_INT(shape.propid()));
JS_ASSERT(shape.getter() == CallObject::getArgOp); JS_ASSERT(shape.setter() == CallObject::setArgOp);
names[index].maybeAtom = NULL; names[index].maybeAtom = NULL;
} }
} }
@ -241,7 +241,7 @@ Bindings::lastArgument() const
const js::Shape *shape = lastVariable(); const js::Shape *shape = lastVariable();
if (nvars > 0) { if (nvars > 0) {
while (shape->previous() && shape->getter() != CallObject::getArgOp) while (shape->previous() && shape->setter() != CallObject::setArgOp)
shape = shape->previous(); shape = shape->previous();
} }
return shape; return shape;
@ -604,10 +604,10 @@ js::XDRScript(XDRState<mode> *xdr, JSScript **scriptp, JSScript *parentScript)
script->bindingsAccessedDynamically = true; script->bindingsAccessedDynamically = true;
if (scriptBits & (1 << ArgumentsHasLocalBinding)) { if (scriptBits & (1 << ArgumentsHasLocalBinding)) {
PropertyName *arguments = cx->runtime->atomState.argumentsAtom; PropertyName *arguments = cx->runtime->atomState.argumentsAtom;
unsigned slot; unsigned local;
DebugOnly<BindingKind> kind = script->bindings.lookup(cx, arguments, &slot); DebugOnly<BindingKind> kind = script->bindings.lookup(cx, arguments, &local);
JS_ASSERT(kind == VARIABLE || kind == CONSTANT); JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
script->setArgumentsHasLocalBinding(slot); script->setArgumentsHasLocalBinding(local);
} }
if (scriptBits & (1 << NeedsArgsObj)) if (scriptBits & (1 << NeedsArgsObj))
script->setNeedsArgsObj(true); script->setNeedsArgsObj(true);
@ -1311,8 +1311,8 @@ JSScript::NewScriptFromEmitter(JSContext *cx, BytecodeEmitter *bce)
if (bce->sc->inFunction()) { if (bce->sc->inFunction()) {
if (bce->sc->funArgumentsHasLocalBinding()) { if (bce->sc->funArgumentsHasLocalBinding()) {
// This must precede the script->bindings.transfer() call below. // This must precede the script->bindings.transfer() call below
script->setArgumentsHasLocalBinding(bce->sc->argumentsLocalSlot()); script->setArgumentsHasLocalBinding(bce->sc->argumentsLocal());
if (bce->sc->funDefinitelyNeedsArgsObj()) if (bce->sc->funDefinitelyNeedsArgsObj())
script->setNeedsArgsObj(true); script->setNeedsArgsObj(true);
} else { } else {
@ -1799,7 +1799,7 @@ js::CloneScript(JSContext *cx, JSScript *src)
dst->nslots = src->nslots; dst->nslots = src->nslots;
dst->staticLevel = src->staticLevel; dst->staticLevel = src->staticLevel;
if (src->argumentsHasLocalBinding()) { if (src->argumentsHasLocalBinding()) {
dst->setArgumentsHasLocalBinding(src->argumentsLocalSlot()); dst->setArgumentsHasLocalBinding(src->argumentsLocal());
if (src->analyzedArgsUsage()) if (src->analyzedArgsUsage())
dst->setNeedsArgsObj(src->needsArgsObj()); dst->setNeedsArgsObj(src->needsArgsObj());
} }
@ -2127,10 +2127,10 @@ JSScript::markChildren(JSTracer *trc)
} }
void void
JSScript::setArgumentsHasLocalBinding(uint16_t slot) JSScript::setArgumentsHasLocalBinding(uint16_t local)
{ {
argsHasLocalBinding_ = true; argsHasLocalBinding_ = true;
argsSlot_ = slot; argsLocal_ = local;
needsArgsAnalysis_ = true; needsArgsAnalysis_ = true;
} }
@ -2162,7 +2162,7 @@ JSScript::applySpeculationFailed(JSContext *cx, JSScript *script_)
script->needsArgsObj_ = true; script->needsArgsObj_ = true;
const unsigned slot = script->argumentsLocalSlot(); const unsigned local = script->argumentsLocal();
/* /*
* By design, the apply-arguments optimization is only made when there * By design, the apply-arguments optimization is only made when there
@ -2179,22 +2179,20 @@ JSScript::applySpeculationFailed(JSContext *cx, JSScript *script_)
for (AllFramesIter i(cx->stack.space()); !i.done(); ++i) { for (AllFramesIter i(cx->stack.space()); !i.done(); ++i) {
StackFrame *fp = i.fp(); StackFrame *fp = i.fp();
if (fp->isFunctionFrame() && fp->script() == script) { if (fp->isFunctionFrame() && fp->script() == script) {
if (!fp->hasArgsObj()) { ArgumentsObject *argsobj = ArgumentsObject::createExpected(cx, fp);
ArgumentsObject *obj = ArgumentsObject::create(cx, fp); if (!argsobj) {
if (!obj) {
/* /*
* We can't leave stack frames where script->needsArgsObj * We can't leave stack frames with script->needsArgsObj but no
* and !fp->hasArgsObj. It is, however, safe to leave frames * arguments object. It is, however, safe to leave frames with
* where fp->hasArgsObj and !fp->script->needsArgsObj. * an arguments object but !script->needsArgsObj.
*/ */
script->needsArgsObj_ = false; script->needsArgsObj_ = false;
return false; return false;
} }
/* Note: 'arguments' may have already been overwritten. */ /* Note: 'arguments' may have already been overwritten. */
if (fp->localSlot(slot).isMagic(JS_OPTIMIZED_ARGUMENTS)) if (fp->unaliasedLocal(local).isMagic(JS_OPTIMIZED_ARGUMENTS))
fp->localSlot(slot) = ObjectValue(*obj); fp->unaliasedLocal(local) = ObjectValue(*argsobj);
}
} }
} }

Просмотреть файл

@ -114,12 +114,12 @@ class Bindings
* These functions map between argument/var indices [0, nargs/nvars) and * These functions map between argument/var indices [0, nargs/nvars) and
* and Bindings indices [0, nargs + nvars). * and Bindings indices [0, nargs + nvars).
*/ */
bool bindingIsArg(uint16_t i) const { return i < nargs; } bool slotIsArg(uint16_t i) const { return i < nargs; }
bool bindingIsLocal(uint16_t i) const { return i >= nargs; } bool slotIsLocal(uint16_t i) const { return i >= nargs; }
uint16_t argToBinding(uint16_t i) { JS_ASSERT(i < nargs); return i; } uint16_t argToSlot(uint16_t i) { JS_ASSERT(i < nargs); return i; }
uint16_t localToBinding(uint16_t i) { return i + nargs; } uint16_t localToSlot(uint16_t i) { return i + nargs; }
uint16_t bindingToArg(uint16_t i) { JS_ASSERT(bindingIsArg(i)); return i; } uint16_t slotToArg(uint16_t i) { JS_ASSERT(slotIsArg(i)); return i; }
uint16_t bindingToLocal(uint16_t i) { JS_ASSERT(bindingIsLocal(i)); return i - nargs; } uint16_t slotToLocal(uint16_t i) { JS_ASSERT(slotIsLocal(i)); return i - nargs; }
/* Ensure these bindings have a shape lineage. */ /* Ensure these bindings have a shape lineage. */
inline bool ensureShape(JSContext *cx); inline bool ensureShape(JSContext *cx);
@ -493,7 +493,7 @@ struct JSScript : public js::gc::Cell
uint16_t staticLevel;/* static level for display maintenance */ uint16_t staticLevel;/* static level for display maintenance */
private: private:
uint16_t argsSlot_; /* slot holding 'arguments' (if argumentsHasLocalBindings) */ uint16_t argsLocal_; /* local holding 'arguments' (if argumentsHasLocalBindings) */
// 8-bit fields. // 8-bit fields.
@ -588,8 +588,8 @@ struct JSScript : public js::gc::Cell
/* See ContextFlags::funArgumentsHasLocalBinding comment. */ /* See ContextFlags::funArgumentsHasLocalBinding comment. */
bool argumentsHasLocalBinding() const { return argsHasLocalBinding_; } bool argumentsHasLocalBinding() const { return argsHasLocalBinding_; }
jsbytecode *argumentsBytecode() const { JS_ASSERT(code[0] == JSOP_ARGUMENTS); return code; } jsbytecode *argumentsBytecode() const { JS_ASSERT(code[0] == JSOP_ARGUMENTS); return code; }
unsigned argumentsLocalSlot() const { JS_ASSERT(argsHasLocalBinding_); return argsSlot_; } unsigned argumentsLocal() const { JS_ASSERT(argsHasLocalBinding_); return argsLocal_; }
void setArgumentsHasLocalBinding(uint16_t slot); void setArgumentsHasLocalBinding(uint16_t local);
/* /*
* As an optimization, even when argsHasLocalBinding, the function prologue * As an optimization, even when argsHasLocalBinding, the function prologue
@ -943,12 +943,6 @@ JS_STATIC_ASSERT(sizeof(JSScript::ArrayBitsT) * 8 >= JSScript::LIMIT);
/* If this fails, add/remove padding within JSScript. */ /* If this fails, add/remove padding within JSScript. */
JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::Cell::CellSize == 0); JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::Cell::CellSize == 0);
static JS_INLINE unsigned
StackDepth(JSScript *script)
{
return script->nslots - script->nfixed;
}
/* /*
* New-script-hook calling is factored from NewScriptFromEmitter so that it * New-script-hook calling is factored from NewScriptFromEmitter so that it
* and callers of XDRScript can share this code. In the case of callers * and callers of XDRScript can share this code. In the case of callers

Просмотреть файл

@ -61,8 +61,8 @@ Shape *
Bindings::initialShape(JSContext *cx) const Bindings::initialShape(JSContext *cx) const
{ {
/* Get an allocation kind to match an empty call object. */ /* Get an allocation kind to match an empty call object. */
gc::AllocKind kind = gc::FINALIZE_OBJECT4; gc::AllocKind kind = gc::FINALIZE_OBJECT2_BACKGROUND;
JS_ASSERT(gc::GetGCKindSlots(kind) == CallObject::RESERVED_SLOTS + 1); JS_ASSERT(gc::GetGCKindSlots(kind) == CallObject::RESERVED_SLOTS);
return EmptyShape::getInitialShape(cx, &CallClass, NULL, NULL, kind, return EmptyShape::getInitialShape(cx, &CallClass, NULL, NULL, kind,
BaseShape::VAROBJ); BaseShape::VAROBJ);

Просмотреть файл

@ -2215,17 +2215,13 @@ LambdaIsGetElem(JSObject &lambda, JSContext *cx)
* real name lookup since this can trigger observable effects. * real name lookup since this can trigger observable effects.
*/ */
Value b; Value b;
JSObject *scope = cx->stack.currentScriptedScopeChain(); RootedObject scope(cx);
scope = cx->stack.currentScriptedScopeChain();
while (true) { while (true) {
if (scope->isCall()) { if (!scope->isCall() && !scope->isBlock())
if (scope->asCall().containsVarOrArg(bname, &b, cx))
break;
} else if (scope->isBlock()) {
if (scope->asClonedBlock().containsVar(bname, &b, cx))
break;
} else {
return NULL; return NULL;
} if (HasDataProperty(cx, scope, bname, &b))
break;
scope = &scope->asScope().enclosingScope(); scope = &scope->asScope().enclosingScope();
} }

Просмотреть файл

@ -215,10 +215,11 @@ typedef enum JSWhyMagic
JS_ARG_POISON, /* used in debug builds to catch tracing errors */ JS_ARG_POISON, /* used in debug builds to catch tracing errors */
JS_SERIALIZE_NO_NODE, /* an empty subnode in the AST serializer */ JS_SERIALIZE_NO_NODE, /* an empty subnode in the AST serializer */
JS_LAZY_ARGUMENTS, /* lazy arguments value on the stack */ JS_LAZY_ARGUMENTS, /* lazy arguments value on the stack */
JS_UNASSIGNED_ARGUMENTS, /* the initial value of callobj.arguments */
JS_OPTIMIZED_ARGUMENTS, /* optimized-away 'arguments' value */ JS_OPTIMIZED_ARGUMENTS, /* optimized-away 'arguments' value */
JS_IS_CONSTRUCTING, /* magic value passed to natives to indicate construction */ JS_IS_CONSTRUCTING, /* magic value passed to natives to indicate construction */
JS_OVERWRITTEN_CALLEE, /* arguments.callee has been overwritten */ JS_OVERWRITTEN_CALLEE, /* arguments.callee has been overwritten */
JS_FORWARD_TO_CALL_OBJECT, /* args object element stored in call object */
JS_BLOCK_NEEDS_CLONE, /* value of static block object slot */
JS_GENERIC_MAGIC /* for local use */ JS_GENERIC_MAGIC /* for local use */
} JSWhyMagic; } JSWhyMagic;

Просмотреть файл

@ -1077,24 +1077,15 @@ mjit::Compiler::generatePrologue()
markUndefinedLocals(); markUndefinedLocals();
types::TypeScriptNesting *nesting = script->nesting();
/*
* Run the function prologue if necessary. This is always done in a
* stub for heavyweight functions (including nesting outer functions).
*/
JS_ASSERT_IF(nesting && nesting->children, script->function()->isHeavyweight());
if (script->function()->isHeavyweight()) {
prepareStubCall(Uses(0));
INLINE_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
} else {
/* /*
* Load the scope chain into the frame if it will be needed by NAME * Load the scope chain into the frame if it will be needed by NAME
* opcodes or by the nesting prologue below. The scope chain is * opcodes or by the nesting prologue below. The scope chain is always
* always set for global and eval frames, and will have been set by * set for global and eval frames, and will have been set by
* CreateFunCallObject for heavyweight function frames. * HeavyweightFunctionPrologue for heavyweight function frames.
*/ */
if (analysis->usesScopeChain() || nesting) { if (!script->function()->isHeavyweight() &&
(analysis->usesScopeChain() || script->nesting()))
{
RegisterID t0 = Registers::ReturnReg; RegisterID t0 = Registers::ReturnReg;
Jump hasScope = masm.branchTest32(Assembler::NonZero, Jump hasScope = masm.branchTest32(Assembler::NonZero,
FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN)); FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN));
@ -1104,35 +1095,6 @@ mjit::Compiler::generatePrologue()
hasScope.linkTo(masm.label(), &masm); hasScope.linkTo(masm.label(), &masm);
} }
if (nesting) {
/*
* Inline the common case for the nesting prologue: the
* function is a non-heavyweight inner function with no
* children of its own. We ensure during inference that the
* outer function does not add scope objects for 'let' or
* 'with', so that the frame's scope chain will be
* the parent's call object, and if it differs from the
* parent's current activation then the parent is reentrant.
*/
JSScript *parent = nesting->parent;
JS_ASSERT(parent);
JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
!parent->analysis()->addsScopeObjects());
RegisterID t0 = Registers::ReturnReg;
masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
masm.loadPtr(Address(t0), t0);
Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
stubcc.linkExitDirect(mismatch, stubcc.masm.label());
OOL_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
stubcc.crossJump(stubcc.masm.jump(), masm.label());
}
}
/* /*
* When 'arguments' is used in the script, it may be optimized away * When 'arguments' is used in the script, it may be optimized away
* which involves reading from the stack frame directly, including * which involves reading from the stack frame directly, including
@ -1161,10 +1123,51 @@ mjit::Compiler::generatePrologue()
ensureDoubleArguments(); ensureDoubleArguments();
} }
/* Inline StackFrame::prologue. */
if (script->isActiveEval && script->strictModeCode) {
prepareStubCall(Uses(0));
INLINE_STUBCALL(stubs::StrictEvalPrologue, REJOIN_EVAL_PROLOGUE);
} else if (script->function()) {
if (script->function()->isHeavyweight()) {
prepareStubCall(Uses(0));
INLINE_STUBCALL(stubs::HeavyweightFunctionPrologue, REJOIN_FUNCTION_PROLOGUE);
} else if (types::TypeScriptNesting *nesting = script->nesting()) {
/*
* Inline the common case for the nesting prologue: the
* function is a non-heavyweight inner function with no
* children of its own. We ensure during inference that the
* outer function does not add scope objects for 'let' or
* 'with', so that the frame's scope chain will be
* the parent's call object, and if it differs from the
* parent's current activation then the parent is reentrant.
*/
JSScript *parent = nesting->parent;
JS_ASSERT(parent);
JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
!parent->analysis()->addsScopeObjects());
RegisterID t0 = Registers::ReturnReg;
masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
masm.loadPtr(Address(t0), t0);
Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
masm.load32(FrameFlagsAddress(), t0);
masm.or32(Imm32(StackFrame::HAS_NESTING), t0);
masm.store32(t0, FrameFlagsAddress());
stubcc.linkExitDirect(mismatch, stubcc.masm.label());
OOL_STUBCALL(stubs::TypeNestingPrologue, REJOIN_FUNCTION_PROLOGUE);
stubcc.crossJump(stubcc.masm.jump(), masm.label());
}
if (isConstructing) { if (isConstructing) {
if (!constructThis()) if (!constructThis())
return Compile_Error; return Compile_Error;
} }
}
if (debugMode()) { if (debugMode()) {
prepareStubCall(Uses(0)); prepareStubCall(Uses(0));
@ -1209,8 +1212,8 @@ void
mjit::Compiler::markUndefinedLocals() mjit::Compiler::markUndefinedLocals()
{ {
/* /*
* Set locals to undefined, as in initCallFrameLatePrologue. * Set locals to undefined. Skip locals which aren't closed and are known
* Skip locals which aren't closed and are known to be defined before used, * to be defined before used,
*/ */
for (uint32_t i = 0; i < script->nfixed; i++) for (uint32_t i = 0; i < script->nfixed; i++)
markUndefinedLocal(0, i); markUndefinedLocal(0, i);
@ -2803,6 +2806,8 @@ mjit::Compiler::generateMethod()
uint32_t arg = GET_SLOTNO(PC); uint32_t arg = GET_SLOTNO(PC);
if (JSObject *singleton = pushedSingleton(0)) if (JSObject *singleton = pushedSingleton(0))
frame.push(ObjectValue(*singleton)); frame.push(ObjectValue(*singleton));
else if (script->argsObjAliasesFormals())
jsop_aliasedArg(arg, /* get = */ true);
else else
frame.pushArg(arg); frame.pushArg(arg);
} }
@ -2816,7 +2821,13 @@ mjit::Compiler::generateMethod()
{ {
jsbytecode *next = &PC[JSOP_SETARG_LENGTH]; jsbytecode *next = &PC[JSOP_SETARG_LENGTH];
bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next); bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
frame.storeArg(GET_SLOTNO(PC), pop);
uint32_t arg = GET_SLOTNO(PC);
if (script->argsObjAliasesFormals())
jsop_aliasedArg(arg, /* get = */ false, pop);
else
frame.storeArg(arg, pop);
updateVarType(); updateVarType();
if (pop) { if (pop) {
@ -2827,26 +2838,11 @@ mjit::Compiler::generateMethod()
} }
END_CASE(JSOP_SETARG) END_CASE(JSOP_SETARG)
BEGIN_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_CALLLOCAL)
BEGIN_CASE(JSOP_GETALIASEDVAR) BEGIN_CASE(JSOP_GETALIASEDVAR)
BEGIN_CASE(JSOP_CALLALIASEDVAR) BEGIN_CASE(JSOP_CALLALIASEDVAR)
{ {
/* This is all temporary until bug 659577. */
if (JSObject *singleton = pushedSingleton(0)) {
frame.push(ObjectValue(*singleton));
} else {
ScopeCoordinate sc = ScopeCoordinate(PC);
if (script->bindings.bindingIsArg(sc.frameBinding))
frame.pushArg(script->bindings.bindingToArg(sc.frameBinding));
else
frame.pushLocal(script->bindings.bindingToLocal(sc.frameBinding));
}
}
END_CASE(JSOP_GETALIASEDVAR)
BEGIN_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_CALLLOCAL)
{
/* /*
* Update the var type unless we are about to pop the variable. * Update the var type unless we are about to pop the variable.
* Sync is not guaranteed for types of dead locals, and GETLOCAL * Sync is not guaranteed for types of dead locals, and GETLOCAL
@ -2855,46 +2851,37 @@ mjit::Compiler::generateMethod()
jsbytecode *next = &PC[JSOP_GETLOCAL_LENGTH]; jsbytecode *next = &PC[JSOP_GETLOCAL_LENGTH];
if (JSOp(*next) != JSOP_POP || analysis->jumpTarget(next)) if (JSOp(*next) != JSOP_POP || analysis->jumpTarget(next))
restoreVarType(); restoreVarType();
uint32_t slot = GET_SLOTNO(PC);
if (JSObject *singleton = pushedSingleton(0)) if (JSObject *singleton = pushedSingleton(0))
frame.push(ObjectValue(*singleton)); frame.push(ObjectValue(*singleton));
else if (JOF_OPTYPE(*PC) == JOF_SCOPECOORD)
jsop_aliasedVar(ScopeCoordinate(PC), /* get = */ true);
else else
frame.pushLocal(slot); frame.pushLocal(GET_SLOTNO(PC));
PC += GetBytecodeLength(PC);
break;
} }
END_CASE(JSOP_GETLOCAL) END_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_SETLOCAL)
BEGIN_CASE(JSOP_SETALIASEDVAR) BEGIN_CASE(JSOP_SETALIASEDVAR)
{ {
/* This is all temporary until bug 659577. */ jsbytecode *next = &PC[GetBytecodeLength(PC)];
jsbytecode *next = &PC[JSOP_SETALIASEDVAR_LENGTH];
bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next); bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
ScopeCoordinate sc = ScopeCoordinate(PC); if (JOF_OPTYPE(*PC) == JOF_SCOPECOORD)
if (script->bindings.bindingIsArg(sc.frameBinding)) jsop_aliasedVar(ScopeCoordinate(PC), /* get = */ false, pop);
frame.storeArg(script->bindings.bindingToArg(sc.frameBinding), pop);
else else
frame.storeLocal(script->bindings.bindingToLocal(sc.frameBinding), pop);
updateVarType();
if (pop) {
frame.pop();
PC += JSOP_SETALIASEDVAR_LENGTH + JSOP_POP_LENGTH;
break;
}
}
END_CASE(JSOP_SETALIASEDVAR)
BEGIN_CASE(JSOP_SETLOCAL)
{
jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
frame.storeLocal(GET_SLOTNO(PC), pop); frame.storeLocal(GET_SLOTNO(PC), pop);
updateVarType(); updateVarType();
if (pop) { if (pop) {
frame.pop(); frame.pop();
PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH; PC = next + JSOP_POP_LENGTH;
break; break;
} }
PC = next;
break;
} }
END_CASE(JSOP_SETLOCAL) END_CASE(JSOP_SETLOCAL)
@ -3792,47 +3779,12 @@ mjit::Compiler::emitReturn(FrameEntry *fe)
return; return;
} }
/* /* Inline StackFrame::epilogue. */
* Outside the mjit, activation objects (call objects and arguments objects) are put if (debugMode()) {
* by ContextStack::pop* members. For JSOP_RETURN, the interpreter only calls prepareStubCall(Uses(0));
* popInlineFrame if fp != entryFrame since the VM protocol is that Invoke/Execute are INLINE_STUBCALL(stubs::Epilogue, REJOIN_NONE);
* responsible for pushing/popping the initial frame. However, an mjit function } else if (script->function() && script->nesting()) {
* epilogue doesn't treat the initial StackFrame of its VMFrame specially: it always masm.sub32(Imm32(1), AbsoluteAddress(&script->nesting()->activeFrames));
* puts activation objects. And furthermore, if the last mjit frame throws, the mjit
* does *not* put the activation objects. So we can't assume any particular state of
* puttedness upon exit from the mjit.
*
* To avoid double-putting, EnterMethodJIT calls updateEpilogueFlags to clear the
* entry frame's hasArgsObj() and hasCallObj() flags if the given objects have already
* been put.
*/
if (script->function()) {
types::TypeScriptNesting *nesting = script->nesting();
if (script->function()->isHeavyweight() || script->needsArgsObj() ||
(nesting && nesting->children) || debugMode())
{
prepareStubCall(Uses(fe ? 1 : 0));
INLINE_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
} else {
/* if hasCallObj() */
Jump putObjs = masm.branchTest32(Assembler::NonZero,
Address(JSFrameReg, StackFrame::offsetOfFlags()),
Imm32(StackFrame::HAS_CALL_OBJ));
stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
stubcc.leave();
OOL_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
emitReturnValue(&stubcc.masm, fe);
emitFinalReturn(stubcc.masm);
/*
* Do frame count balancing inline for inner functions in a nesting
* with no children of their own.
*/
if (nesting)
masm.sub32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
}
} }
emitReturnValue(&masm, fe); emitReturnValue(&masm, fe);
@ -5718,7 +5670,7 @@ mjit::Compiler::jsop_bindname(PropertyName *name)
analysis->resolveNameAccess(cx, NameToId(name), true); analysis->resolveNameAccess(cx, NameToId(name), true);
if (access.nesting) { if (access.nesting) {
RegisterID reg = frame.allocReg(); RegisterID reg = frame.allocReg();
JSObject **pobj = &access.nesting->activeCall; CallObject **pobj = &access.nesting->activeCall;
masm.move(ImmPtr(pobj), reg); masm.move(ImmPtr(pobj), reg);
masm.loadPtr(Address(reg), reg); masm.loadPtr(Address(reg), reg);
frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg); frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
@ -5829,6 +5781,100 @@ mjit::Compiler::jsop_bindname(PropertyName *name)
} }
#endif #endif
void
mjit::Compiler::jsop_aliasedArg(unsigned arg, bool get, bool poppedAfter)
{
RegisterID reg = frame.allocReg();
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfArgsObj()), reg);
size_t dataOff = ArgumentsObject::getDataSlotOffset();
masm.loadPrivate(Address(reg, dataOff), reg);
int32_t argsOff = ArgumentsData::offsetOfArgs() + arg * sizeof(Value);
masm.addPtr(Imm32(argsOff), reg, reg);
if (get) {
FrameEntry *fe = frame.getArg(arg);
JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
frame.push(Address(reg), type, true /* = reuseBase */);
} else {
#ifdef JSGC_INCREMENTAL_MJ
if (cx->compartment->needsBarrier()) {
/* Write barrier. */
stubcc.linkExit(masm.testGCThing(Address(reg)), Uses(0));
stubcc.leave();
stubcc.masm.move(reg, Registers::ArgReg1);
OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
stubcc.rejoin(Changes(0));
}
#endif
frame.storeTo(frame.peek(-1), Address(reg), poppedAfter);
frame.freeReg(reg);
}
}
void
mjit::Compiler::jsop_aliasedVar(ScopeCoordinate sc, bool get, bool poppedAfter)
{
RegisterID reg = frame.allocReg();
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), reg);
for (unsigned i = 0; i < sc.hops; i++)
masm.loadPayload(Address(reg, ScopeObject::offsetOfEnclosingScope()), reg);
unsigned slot = ScopeObject::CALL_BLOCK_RESERVED_SLOTS + sc.slot;
/*
* TODO bug 753158: Call and Block objects should use the same layout
* strategy: up to the maximum numFixedSlots and overflow (if any) in
* dynamic slots. For now, we special case for different layouts:
*/
Address addr;
if (ScopeCoordinateBlockChain(script, PC)) {
/*
* Block objects use a fixed AllocKind which means an invariant number
* of fixed slots. Any slot below the fixed slot count is inline, any
* slot over is in the dynamic slots.
*/
uint32_t nfixed = gc::GetGCKindSlots(BlockObject::FINALIZE_KIND);
if (nfixed <= slot) {
masm.loadPtr(Address(reg, JSObject::offsetOfSlots()), reg);
addr = Address(reg, (slot - nfixed) * sizeof(Value));
} else {
addr = Address(reg, JSObject::getFixedSlotOffset(slot));
}
} else {
/*
* Using special-case hackery in Shape::getChildBinding, CallObject
* slots are either altogether in fixed slots or altogether in dynamic
* slots (by having numFixed == RESERVED_SLOTS).
*/
if (script->bindings.lastShape()->numFixedSlots() <= slot) {
masm.loadPtr(Address(reg, JSObject::offsetOfSlots()), reg);
addr = Address(reg, sc.slot * sizeof(Value));
} else {
addr = Address(reg, JSObject::getFixedSlotOffset(slot));
}
}
if (get) {
FrameEntry *fe = script->bindings.slotIsLocal(sc.slot)
? frame.getLocal(script->bindings.slotToLocal(sc.slot))
: frame.getArg(script->bindings.slotToArg(sc.slot));
JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
frame.push(addr, type, true /* = reuseBase */);
} else {
#ifdef JSGC_INCREMENTAL_MJ
if (cx->compartment->needsBarrier()) {
/* Write barrier. */
stubcc.linkExit(masm.testGCThing(addr), Uses(0));
stubcc.leave();
stubcc.masm.addPtr(Imm32(addr.offset), addr.base, Registers::ArgReg1);
OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
stubcc.rejoin(Changes(0));
}
#endif
frame.storeTo(frame.peek(-1), addr, poppedAfter);
frame.freeReg(reg);
}
}
void void
mjit::Compiler::jsop_this() mjit::Compiler::jsop_this()
{ {

Просмотреть файл

@ -626,8 +626,8 @@ private:
void jsop_bindname(PropertyName *name); void jsop_bindname(PropertyName *name);
void jsop_setglobal(uint32_t index); void jsop_setglobal(uint32_t index);
void jsop_getprop_slow(PropertyName *name, bool forPrototype = false); void jsop_getprop_slow(PropertyName *name, bool forPrototype = false);
void jsop_getarg(uint32_t slot); void jsop_aliasedArg(unsigned i, bool get, bool poppedAfter = false);
void jsop_setarg(uint32_t slot, bool popped); void jsop_aliasedVar(ScopeCoordinate sc, bool get, bool poppedAfter = false);
void jsop_this(); void jsop_this();
void emitReturn(FrameEntry *fe); void emitReturn(FrameEntry *fe);
void emitFinalReturn(Assembler &masm); void emitFinalReturn(Assembler &masm);

Просмотреть файл

@ -62,7 +62,7 @@ FindExceptionHandler(JSContext *cx)
*/ */
jsbytecode *pc = script->main() + tn->start + tn->length; jsbytecode *pc = script->main() + tn->start + tn->length;
cx->regs().pc = pc; cx->regs().pc = pc;
cx->regs().sp = fp->base() + tn->stackDepth; cx->regs().sp = cx->regs().spForStackDepth(tn->stackDepth);
switch (tn->kind) { switch (tn->kind) {
case JSTRY_CATCH: case JSTRY_CATCH:
@ -119,22 +119,6 @@ FindExceptionHandler(JSContext *cx)
/* /*
* Clean up a frame and return. * Clean up a frame and return.
*/ */
static void
InlineReturn(VMFrame &f)
{
JS_ASSERT(f.fp() != f.entryfp);
AssertValidFunctionScopeChainAtExit(f.fp());
f.cx->stack.popInlineFrame(f.regs);
DebugOnly<JSOp> op = JSOp(*f.regs.pc);
JS_ASSERT(op == JSOP_CALL ||
op == JSOP_NEW ||
op == JSOP_EVAL ||
op == JSOP_FUNCALL ||
op == JSOP_FUNAPPLY);
f.regs.pc += JSOP_CALL_LENGTH;
}
void JS_FASTCALL void JS_FASTCALL
stubs::SlowCall(VMFrame &f, uint32_t argc) stubs::SlowCall(VMFrame &f, uint32_t argc)
@ -162,7 +146,7 @@ stubs::SlowNew(VMFrame &f, uint32_t argc)
static inline bool static inline bool
CheckStackQuota(VMFrame &f) CheckStackQuota(VMFrame &f)
{ {
JS_ASSERT(f.regs.sp == f.fp()->base()); JS_ASSERT(f.regs.stackDepth() == 0);
f.stackLimit = f.cx->stack.space().getStackLimit(f.cx, DONT_REPORT_ERROR); f.stackLimit = f.cx->stack.space().getStackLimit(f.cx, DONT_REPORT_ERROR);
if (f.stackLimit) if (f.stackLimit)
@ -306,10 +290,6 @@ UncachedInlineCall(VMFrame &f, InitialFrameFlags initial,
/* Finish the handoff to the new frame regs. */ /* Finish the handoff to the new frame regs. */
PreserveRegsGuard regsGuard(cx, regs); PreserveRegsGuard regsGuard(cx, regs);
/* Scope with a call object parented by callee's parent. */
if (!regs.fp()->functionPrologue(cx))
return false;
/* /*
* If newscript was successfully compiled, run it. Skip for calls which * If newscript was successfully compiled, run it. Skip for calls which
* will be constructing a new type object for 'this'. * will be constructing a new type object for 'this'.
@ -543,7 +523,7 @@ js_InternalThrow(VMFrame &f)
} }
ScriptEpilogue(f.cx, f.fp(), false); f.fp()->epilogue(f.cx);
// Don't remove the last frame, this is the responsibility of // Don't remove the last frame, this is the responsibility of
// JaegerShot()'s caller. We only guarantee that ScriptEpilogue() // JaegerShot()'s caller. We only guarantee that ScriptEpilogue()
@ -551,8 +531,14 @@ js_InternalThrow(VMFrame &f)
if (f.entryfp == f.fp()) if (f.entryfp == f.fp())
break; break;
JS_ASSERT(&cx->regs() == &f.regs); f.cx->stack.popInlineFrame(f.regs);
InlineReturn(f); DebugOnly<JSOp> op = JSOp(*f.regs.pc);
JS_ASSERT(op == JSOP_CALL ||
op == JSOP_NEW ||
op == JSOP_EVAL ||
op == JSOP_FUNCALL ||
op == JSOP_FUNAPPLY);
f.regs.pc += JSOP_CALL_LENGTH;
} }
JS_ASSERT(&cx->regs() == &f.regs); JS_ASSERT(&cx->regs() == &f.regs);
@ -587,11 +573,11 @@ js_InternalThrow(VMFrame &f)
if (cx->isExceptionPending()) { if (cx->isExceptionPending()) {
JS_ASSERT(JSOp(*pc) == JSOP_ENTERBLOCK); JS_ASSERT(JSOp(*pc) == JSOP_ENTERBLOCK);
StaticBlockObject &blockObj = script->getObject(GET_UINT32_INDEX(pc))->asStaticBlock(); StaticBlockObject &blockObj = script->getObject(GET_UINT32_INDEX(pc))->asStaticBlock();
if (!cx->regs().fp()->pushBlock(cx, blockObj))
return NULL;
Value *vp = cx->regs().sp + blockObj.slotCount(); Value *vp = cx->regs().sp + blockObj.slotCount();
SetValueRangeToUndefined(cx->regs().sp, vp); SetValueRangeToUndefined(cx->regs().sp, vp);
cx->regs().sp = vp; cx->regs().sp = vp;
if (!cx->regs().fp()->pushBlock(cx, blockObj))
return NULL;
JS_ASSERT(JSOp(pc[JSOP_ENTERBLOCK_LENGTH]) == JSOP_EXCEPTION); JS_ASSERT(JSOp(pc[JSOP_ENTERBLOCK_LENGTH]) == JSOP_EXCEPTION);
cx->regs().sp[0] = cx->getPendingException(); cx->regs().sp[0] = cx->getPendingException();
@ -615,7 +601,7 @@ stubs::CreateThis(VMFrame &f, JSObject *proto)
JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto); JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
if (!obj) if (!obj)
THROW(); THROW();
fp->formalArgs()[-1].setObject(*obj); fp->thisValue() = ObjectValue(*obj);
} }
void JS_FASTCALL void JS_FASTCALL
@ -706,7 +692,9 @@ FinishVarIncOp(VMFrame &f, RejoinState rejoin, Value ov, Value nv, Value *vp)
const JSCodeSpec *cs = &js_CodeSpec[op]; const JSCodeSpec *cs = &js_CodeSpec[op];
unsigned i = GET_SLOTNO(f.pc()); unsigned i = GET_SLOTNO(f.pc());
Value *var = (JOF_TYPE(cs->format) == JOF_LOCAL) ? f.fp()->slots() + i : &f.fp()->formalArg(i); Value *var = (JOF_TYPE(cs->format) == JOF_LOCAL)
? &f.fp()->unaliasedLocal(i)
: &f.fp()->unaliasedFormal(i);
if (rejoin == REJOIN_POS) { if (rejoin == REJOIN_POS) {
double d = ov.toNumber(); double d = ov.toNumber();
@ -722,7 +710,7 @@ FinishVarIncOp(VMFrame &f, RejoinState rejoin, Value ov, Value nv, Value *vp)
extern "C" void * extern "C" void *
js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VMFrame &f) js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VMFrame &f)
{ {
JSRejoinState jsrejoin = f.fp()->rejoin(); FrameRejoinState jsrejoin = f.fp()->rejoin();
RejoinState rejoin; RejoinState rejoin;
if (jsrejoin & 0x1) { if (jsrejoin & 0x1) {
/* Rejoin after a scripted call finished. Restore f.regs.pc and f.regs.inlined (NULL) */ /* Rejoin after a scripted call finished. Restore f.regs.pc and f.regs.inlined (NULL) */
@ -757,12 +745,12 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
* here. Update it to its value at the start of the opcode. * here. Update it to its value at the start of the opcode.
*/ */
Value *oldsp = f.regs.sp; Value *oldsp = f.regs.sp;
f.regs.sp = fp->base() + analysis->getCode(pc).stackDepth; f.regs.sp = f.regs.spForStackDepth(analysis->getCode(pc).stackDepth);
jsbytecode *nextpc = pc + GetBytecodeLength(pc); jsbytecode *nextpc = pc + GetBytecodeLength(pc);
Value *nextsp = NULL; Value *nextsp = NULL;
if (nextpc != script->code + script->length && analysis->maybeCode(nextpc)) if (nextpc != script->code + script->length && analysis->maybeCode(nextpc))
nextsp = fp->base() + analysis->getCode(nextpc).stackDepth; nextsp = f.regs.spForStackDepth(analysis->getCode(nextpc).stackDepth);
JS_ASSERT(&cx->regs() == &f.regs); JS_ASSERT(&cx->regs() == &f.regs);
@ -867,18 +855,13 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
f.regs.pc = nextpc; f.regs.pc = nextpc;
break; break;
case REJOIN_DEFLOCALFUN:
fp->slots()[GET_SLOTNO(pc)].setObject(* (JSObject *) returnReg);
f.regs.pc = nextpc;
break;
case REJOIN_THIS_PROTOTYPE: { case REJOIN_THIS_PROTOTYPE: {
RootedObject callee(cx, &fp->callee()); RootedObject callee(cx, &fp->callee());
JSObject *proto = f.regs.sp[0].isObject() ? &f.regs.sp[0].toObject() : NULL; JSObject *proto = f.regs.sp[0].isObject() ? &f.regs.sp[0].toObject() : NULL;
JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto); JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
if (!obj) if (!obj)
return js_InternalThrow(f); return js_InternalThrow(f);
fp->formalArgs()[-1].setObject(*obj); fp->thisValue() = ObjectValue(*obj);
if (Probes::callTrackingActive(cx)) if (Probes::callTrackingActive(cx))
Probes::enterJSFun(f.cx, f.fp()->maybeFun(), f.fp()->script()); Probes::enterJSFun(f.cx, f.fp()->maybeFun(), f.fp()->script());
@ -902,42 +885,56 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
break; break;
} }
case REJOIN_CHECK_ARGUMENTS:
/* /*
* Do all the work needed in arity check JIT prologues after the * Each of these cases indicates a point of progress through
* arguments check occurs (FixupArity has been called if needed, but * generatePrologue. Execute the rest of the prologue here.
* the stack check and late prologue have not been performed.
*/ */
case REJOIN_CHECK_ARGUMENTS:
if (!CheckStackQuota(f)) if (!CheckStackQuota(f))
return js_InternalThrow(f); return js_InternalThrow(f);
fp->initVarsToUndefined();
SetValueRangeToUndefined(fp->slots(), script->nfixed);
if (!fp->functionPrologue(cx))
return js_InternalThrow(f);
/* FALLTHROUGH */
case REJOIN_FUNCTION_PROLOGUE:
fp->scopeChain(); fp->scopeChain();
if (!fp->prologue(cx, types::UseNewTypeAtEntry(cx, fp)))
/* Construct the 'this' object for the frame if necessary. */
if (!ScriptPrologueOrGeneratorResume(cx, fp, types::UseNewTypeAtEntry(cx, fp)))
return js_InternalThrow(f); return js_InternalThrow(f);
/* /*
* Having called ScriptPrologueOrGeneratorResume, we would normally call * We would normally call ScriptDebugPrologue here. But in debug mode,
* ScriptDebugPrologue here. But in debug mode, we only use JITted * we only use JITted functions' invokeEntry entry point, whereas
* functions' invokeEntry entry point, whereas CheckArgumentTypes * CheckArgumentTypes (REJOIN_CHECK_ARGUMENTS) is only reachable via
* (REJOIN_CHECK_ARGUMENTS) and FunctionFramePrologue * the other entry points.
* (REJOIN_FUNCTION_PROLOGUE) are only reachable via the other entry
* points. So we should never need either of these rejoin tails in debug
* mode.
* *
* If we fix bug 699196 ("Debug mode code could use inline caches * If we fix bug 699196 ("Debug mode code could use inline caches
* now"), then these cases will become reachable again. * now"), then this case will become reachable again.
*/ */
JS_ASSERT(!cx->compartment->debugMode()); JS_ASSERT(!cx->compartment->debugMode());
break;
/* Finish executing the tail of generatePrologue. */
case REJOIN_FUNCTION_PROLOGUE:
if (fp->isConstructing()) {
RootedObject callee(cx, &fp->callee());
JSObject *obj = js_CreateThisForFunction(cx, callee, types::UseNewTypeAtEntry(cx, fp));
if (!obj)
return js_InternalThrow(f);
fp->functionThis() = ObjectValue(*obj);
}
/* FALLTHROUGH */
case REJOIN_EVAL_PROLOGUE:
if (cx->compartment->debugMode()) {
Probes::enterJSFun(cx, fp->maybeFun(), fp->script());
JSTrapStatus status = ScriptDebugPrologue(cx, fp);
switch (status) {
case JSTRAP_CONTINUE:
break;
case JSTRAP_RETURN:
return f.cx->jaegerRuntime().forceReturnFromFastCall();
case JSTRAP_ERROR:
case JSTRAP_THROW:
return js_InternalThrow(f);
default:
JS_NOT_REACHED("bad ScriptDebugPrologue status");
}
}
break; break;
case REJOIN_CALL_PROLOGUE: case REJOIN_CALL_PROLOGUE:
@ -1060,7 +1057,7 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
if (nextDepth == UINT32_MAX) if (nextDepth == UINT32_MAX)
nextDepth = analysis->getCode(f.regs.pc).stackDepth; nextDepth = analysis->getCode(f.regs.pc).stackDepth;
f.regs.sp = fp->base() + nextDepth; f.regs.sp = f.regs.spForStackDepth(nextDepth);
/* /*
* Monitor the result of the previous op when finishing a JOF_TYPESET op. * Monitor the result of the previous op when finishing a JOF_TYPESET op.

Просмотреть файл

@ -1056,10 +1056,6 @@ mjit::EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimi
fp->markReturnValue(); fp->markReturnValue();
} }
/* See comment in mjit::Compiler::emitReturn. */
if (fp->isFunctionFrame())
fp->updateEpilogueFlags();
return ok ? Jaeger_Returned : Jaeger_Throwing; return ok ? Jaeger_Returned : Jaeger_Throwing;
} }

Просмотреть файл

@ -109,7 +109,7 @@ struct VMFrame
Value *stackLimit; Value *stackLimit;
StackFrame *entryfp; StackFrame *entryfp;
FrameRegs *oldregs; FrameRegs *oldregs;
JSRejoinState stubRejoin; /* How to rejoin if inside a call from an IC stub. */ FrameRejoinState stubRejoin; /* How to rejoin if inside a call from an IC stub. */
#if defined(JS_CPU_X86) #if defined(JS_CPU_X86)
void *unused0, *unused1; /* For 16 byte alignment */ void *unused0, *unused1; /* For 16 byte alignment */
@ -294,9 +294,6 @@ enum RejoinState {
REJOIN_PUSH_BOOLEAN, REJOIN_PUSH_BOOLEAN,
REJOIN_PUSH_OBJECT, REJOIN_PUSH_OBJECT,
/* Call returns an object, which should be assigned to a local per the current bytecode. */
REJOIN_DEFLOCALFUN,
/* /*
* During the prologue of constructing scripts, after the function's * During the prologue of constructing scripts, after the function's
* .prototype property has been fetched. * .prototype property has been fetched.
@ -310,9 +307,10 @@ enum RejoinState {
REJOIN_CHECK_ARGUMENTS, REJOIN_CHECK_ARGUMENTS,
/* /*
* The script's jitcode was discarded after marking an outer function as * The script's jitcode was discarded during one of the following steps of
* reentrant or due to a GC while creating a call object. * a frame's prologue.
*/ */
REJOIN_EVAL_PROLOGUE,
REJOIN_FUNCTION_PROLOGUE, REJOIN_FUNCTION_PROLOGUE,
/* /*
@ -339,14 +337,14 @@ enum RejoinState {
}; };
/* Get the rejoin state for a StackFrame after returning from a scripted call. */ /* Get the rejoin state for a StackFrame after returning from a scripted call. */
static inline JSRejoinState static inline FrameRejoinState
ScriptedRejoin(uint32_t pcOffset) ScriptedRejoin(uint32_t pcOffset)
{ {
return REJOIN_SCRIPTED | (pcOffset << 1); return REJOIN_SCRIPTED | (pcOffset << 1);
} }
/* Get the rejoin state for a StackFrame after returning from a stub call. */ /* Get the rejoin state for a StackFrame after returning from a stub call. */
static inline JSRejoinState static inline FrameRejoinState
StubRejoin(RejoinState rejoin) StubRejoin(RejoinState rejoin)
{ {
return rejoin << 1; return rejoin << 1;

Просмотреть файл

@ -1054,7 +1054,7 @@ ic::SplatApplyArgs(VMFrame &f)
THROWV(false); THROWV(false);
/* Steps 7-8. */ /* Steps 7-8. */
f.regs.fp()->forEachCanonicalActualArg(CopyTo(f.regs.sp)); f.regs.fp()->forEachUnaliasedActual(CopyTo(f.regs.sp));
f.regs.sp += length; f.regs.sp += length;
f.u.call.dynamicArgc = length; f.u.call.dynamicArgc = length;

Просмотреть файл

@ -294,8 +294,6 @@ class SetPropCompiler : public PICStubCompiler
JS_ASSERT_IF(!shape->hasDefaultSetter(), obj->isCall()); JS_ASSERT_IF(!shape->hasDefaultSetter(), obj->isCall());
MaybeJump skipOver;
if (adding) { if (adding) {
JS_ASSERT(shape->hasSlot()); JS_ASSERT(shape->hasSlot());
pic.shapeRegHasBaseShape = false; pic.shapeRegHasBaseShape = false;
@ -353,29 +351,11 @@ class SetPropCompiler : public PICStubCompiler
// then we can rely on fun->nargs remaining invariant. // then we can rely on fun->nargs remaining invariant.
JSFunction *fun = obj->asCall().getCalleeFunction(); JSFunction *fun = obj->asCall().getCalleeFunction();
uint16_t slot = uint16_t(shape->shortid()); uint16_t slot = uint16_t(shape->shortid());
/* Guard that the call object has a frame. */
masm.loadObjPrivate(pic.objReg, pic.shapeReg, obj->numFixedSlots());
Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
{
Address addr(pic.shapeReg, shape->setterOp() == CallObject::setArgOp
? StackFrame::offsetOfFormalArg(fun, slot)
: StackFrame::offsetOfFixed(slot));
masm.storeValue(pic.u.vr, addr);
skipOver = masm.jump();
}
escapedFrame.linkTo(masm.label(), &masm);
{
if (shape->setterOp() == CallObject::setVarOp) if (shape->setterOp() == CallObject::setVarOp)
slot += fun->nargs; slot += fun->nargs;
slot += CallObject::RESERVED_SLOTS; slot += CallObject::RESERVED_SLOTS;
Address address = masm.objPropAddress(obj, pic.objReg, slot); Address address = masm.objPropAddress(obj, pic.objReg, slot);
masm.storeValue(pic.u.vr, address); masm.storeValue(pic.u.vr, address);
}
pic.shapeRegHasBaseShape = false; pic.shapeRegHasBaseShape = false;
} }
@ -410,8 +390,6 @@ class SetPropCompiler : public PICStubCompiler
for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj) for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj)
buffer.link(*pj, pic.slowPathStart); buffer.link(*pj, pic.slowPathStart);
buffer.link(done, pic.fastPathRejoin); buffer.link(done, pic.fastPathRejoin);
if (skipOver.isSet())
buffer.link(skipOver.get(), pic.fastPathRejoin);
CodeLocationLabel cs = buffer.finalize(f); CodeLocationLabel cs = buffer.finalize(f);
JaegerSpew(JSpew_PICs, "generate setprop stub %p %p %d at %p\n", JaegerSpew(JSpew_PICs, "generate setprop stub %p %p %d at %p\n",
(void*)&pic, (void*)&pic,
@ -762,6 +740,9 @@ struct GetPropHelper {
} }
}; };
namespace js {
namespace mjit {
class GetPropCompiler : public PICStubCompiler class GetPropCompiler : public PICStubCompiler
{ {
JSObject *obj; JSObject *obj;
@ -1392,6 +1373,9 @@ class GetPropCompiler : public PICStubCompiler
} }
}; };
} // namespace mjit
} // namespace js
class ScopeNameCompiler : public PICStubCompiler class ScopeNameCompiler : public PICStubCompiler
{ {
private: private:
@ -1570,9 +1554,9 @@ class ScopeNameCompiler : public PICStubCompiler
CallObjPropKind kind; CallObjPropKind kind;
const Shape *shape = getprop.shape; const Shape *shape = getprop.shape;
if (shape->getterOp() == CallObject::getArgOp) { if (shape->setterOp() == CallObject::setArgOp) {
kind = ARG; kind = ARG;
} else if (shape->getterOp() == CallObject::getVarOp) { } else if (shape->setterOp() == CallObject::setVarOp) {
kind = VAR; kind = VAR;
} else { } else {
return disable("unhandled callobj sprop getter"); return disable("unhandled callobj sprop getter");
@ -1590,38 +1574,16 @@ class ScopeNameCompiler : public PICStubCompiler
Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg, Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
ImmPtr(getprop.holder->lastProperty())); ImmPtr(getprop.holder->lastProperty()));
/* Get callobj's stack frame. */
masm.loadObjPrivate(pic.objReg, pic.shapeReg, getprop.holder->numFixedSlots());
JSFunction *fun = getprop.holder->asCall().getCalleeFunction(); JSFunction *fun = getprop.holder->asCall().getCalleeFunction();
uint16_t slot = uint16_t(shape->shortid()); unsigned slot = shape->shortid();
Jump skipOver;
Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
/* Not-escaped case. */
{
Address addr(pic.shapeReg, kind == ARG ? StackFrame::offsetOfFormalArg(fun, slot)
: StackFrame::offsetOfFixed(slot));
masm.loadPayload(addr, pic.objReg);
masm.loadTypeTag(addr, pic.shapeReg);
skipOver = masm.jump();
}
escapedFrame.linkTo(masm.label(), &masm);
{
if (kind == VAR) if (kind == VAR)
slot += fun->nargs; slot += fun->nargs;
slot += CallObject::RESERVED_SLOTS; slot += CallObject::RESERVED_SLOTS;
Address address = masm.objPropAddress(obj, pic.objReg, slot); Address address = masm.objPropAddress(obj, pic.objReg, slot);
/* Safe because type is loaded first. */ /* Safe because type is loaded first. */
masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg); masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg);
}
skipOver.linkTo(masm.label(), &masm);
Jump done = masm.jump(); Jump done = masm.jump();
// All failures flow to here, so there is a common point to patch. // All failures flow to here, so there is a common point to patch.

Просмотреть файл

@ -912,7 +912,7 @@ stubs::InitElem(VMFrame &f, uint32_t last)
FrameRegs &regs = f.regs; FrameRegs &regs = f.regs;
/* Pop the element's value into rval. */ /* Pop the element's value into rval. */
JS_ASSERT(regs.sp - f.fp()->base() >= 3); JS_ASSERT(regs.stackDepth() >= 3);
const Value &rref = regs.sp[-1]; const Value &rref = regs.sp[-1];
/* Find the object being initialized at top of stack. */ /* Find the object being initialized at top of stack. */
@ -1020,7 +1020,7 @@ InitPropOrMethod(VMFrame &f, PropertyName *name, JSOp op)
FrameRegs &regs = f.regs; FrameRegs &regs = f.regs;
/* Load the property's initial value into rval. */ /* Load the property's initial value into rval. */
JS_ASSERT(regs.sp - f.fp()->base() >= 2); JS_ASSERT(regs.stackDepth() >= 2);
Value rval; Value rval;
rval = regs.sp[-1]; rval = regs.sp[-1];
@ -1048,7 +1048,7 @@ stubs::InitProp(VMFrame &f, PropertyName *name)
void JS_FASTCALL void JS_FASTCALL
stubs::IterNext(VMFrame &f, int32_t offset) stubs::IterNext(VMFrame &f, int32_t offset)
{ {
JS_ASSERT(f.regs.sp - offset >= f.fp()->base()); JS_ASSERT(f.regs.stackDepth() >= unsigned(offset));
JS_ASSERT(f.regs.sp[-offset].isObject()); JS_ASSERT(f.regs.sp[-offset].isObject());
JSObject *iterobj = &f.regs.sp[-offset].toObject(); JSObject *iterobj = &f.regs.sp[-offset].toObject();
@ -1061,7 +1061,7 @@ stubs::IterNext(VMFrame &f, int32_t offset)
JSBool JS_FASTCALL JSBool JS_FASTCALL
stubs::IterMore(VMFrame &f) stubs::IterMore(VMFrame &f)
{ {
JS_ASSERT(f.regs.sp - 1 >= f.fp()->base()); JS_ASSERT(f.regs.stackDepth() >= 1);
JS_ASSERT(f.regs.sp[-1].isObject()); JS_ASSERT(f.regs.sp[-1].isObject());
Value v; Value v;
@ -1075,7 +1075,7 @@ stubs::IterMore(VMFrame &f)
void JS_FASTCALL void JS_FASTCALL
stubs::EndIter(VMFrame &f) stubs::EndIter(VMFrame &f)
{ {
JS_ASSERT(f.regs.sp - 1 >= f.fp()->base()); JS_ASSERT(f.regs.stackDepth() >= 1);
if (!CloseIterator(f.cx, &f.regs.sp[-1].toObject())) if (!CloseIterator(f.cx, &f.regs.sp[-1].toObject()))
THROW(); THROW();
} }
@ -1125,7 +1125,7 @@ stubs::Throw(VMFrame &f)
void JS_FASTCALL void JS_FASTCALL
stubs::Arguments(VMFrame &f) stubs::Arguments(VMFrame &f)
{ {
ArgumentsObject *obj = ArgumentsObject::create(f.cx, f.fp()); ArgumentsObject *obj = ArgumentsObject::createExpected(f.cx, f.fp());
if (!obj) if (!obj)
THROW(); THROW();
f.regs.sp[0] = ObjectValue(*obj); f.regs.sp[0] = ObjectValue(*obj);
@ -1173,27 +1173,21 @@ void JS_FASTCALL
stubs::EnterBlock(VMFrame &f, JSObject *obj) stubs::EnterBlock(VMFrame &f, JSObject *obj)
{ {
FrameRegs &regs = f.regs; FrameRegs &regs = f.regs;
StackFrame *fp = f.fp();
JS_ASSERT(!f.regs.inlined()); JS_ASSERT(!f.regs.inlined());
StaticBlockObject &blockObj = obj->asStaticBlock(); StaticBlockObject &blockObj = obj->asStaticBlock();
if (!fp->pushBlock(f.cx, blockObj))
THROW();
if (*regs.pc == JSOP_ENTERBLOCK) { if (*regs.pc == JSOP_ENTERBLOCK) {
JS_ASSERT(fp->base() + blockObj.stackDepth() == regs.sp); JS_ASSERT(regs.stackDepth() == blockObj.stackDepth());
JS_ASSERT(regs.stackDepth() + blockObj.slotCount() <= f.fp()->script()->nslots);
Value *vp = regs.sp + blockObj.slotCount(); Value *vp = regs.sp + blockObj.slotCount();
JS_ASSERT(regs.sp < vp);
JS_ASSERT(vp <= fp->slots() + fp->script()->nslots);
SetValueRangeToUndefined(regs.sp, vp); SetValueRangeToUndefined(regs.sp, vp);
regs.sp = vp; regs.sp = vp;
} else if (*regs.pc == JSOP_ENTERLET0) {
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
== regs.sp);
} else if (*regs.pc == JSOP_ENTERLET1) {
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
== regs.sp - 1);
} }
/* Clone block iff there are any closed-over variables. */
if (!regs.fp()->pushBlock(f.cx, blockObj))
THROW();
} }
void JS_FASTCALL void JS_FASTCALL
@ -1522,7 +1516,7 @@ stubs::CheckArgumentTypes(VMFrame &f)
if (!f.fp()->isConstructing()) if (!f.fp()->isConstructing())
TypeScript::SetThis(f.cx, script, fp->thisValue()); TypeScript::SetThis(f.cx, script, fp->thisValue());
for (unsigned i = 0; i < fun->nargs; i++) for (unsigned i = 0; i < fun->nargs; i++)
TypeScript::SetArgument(f.cx, script, i, fp->formalArg(i)); TypeScript::SetArgument(f.cx, script, i, fp->unaliasedFormal(i, DONT_CHECK_ALIASING));
} }
if (monitor.recompiled()) if (monitor.recompiled())
@ -1552,7 +1546,7 @@ stubs::AssertArgumentTypes(VMFrame &f)
} }
for (unsigned i = 0; i < fun->nargs; i++) { for (unsigned i = 0; i < fun->nargs; i++) {
Type type = GetValueType(f.cx, fp->formalArg(i)); Type type = GetValueType(f.cx, fp->unaliasedFormal(i, DONT_CHECK_ALIASING));
if (!TypeScript::ArgTypes(script, i)->hasType(type)) if (!TypeScript::ArgTypes(script, i)->hasType(type))
TypeFailure(f.cx, "Missing type for arg %d: %s", i, TypeString(type)); TypeFailure(f.cx, "Missing type for arg %d: %s", i, TypeString(type));
} }
@ -1609,16 +1603,29 @@ stubs::Exception(VMFrame &f)
} }
void JS_FASTCALL void JS_FASTCALL
stubs::FunctionFramePrologue(VMFrame &f) stubs::StrictEvalPrologue(VMFrame &f)
{ {
if (!f.fp()->functionPrologue(f.cx)) if (!f.fp()->jitStrictEvalPrologue(f.cx))
THROW(); THROW();
} }
void JS_FASTCALL void JS_FASTCALL
stubs::FunctionFrameEpilogue(VMFrame &f) stubs::HeavyweightFunctionPrologue(VMFrame &f)
{ {
f.fp()->functionEpilogue(f.cx); if (!f.fp()->jitHeavyweightFunctionPrologue(f.cx))
THROW();
}
void JS_FASTCALL
stubs::TypeNestingPrologue(VMFrame &f)
{
f.fp()->jitTypeNestingPrologue(f.cx);
}
void JS_FASTCALL
stubs::Epilogue(VMFrame &f)
{
f.fp()->epilogue(f.cx);
} }
void JS_FASTCALL void JS_FASTCALL
@ -1626,17 +1633,15 @@ stubs::AnyFrameEpilogue(VMFrame &f)
{ {
/* /*
* On the normal execution path, emitReturn calls ScriptDebugEpilogue * On the normal execution path, emitReturn calls ScriptDebugEpilogue
* and inlines ScriptEpilogue. This function implements forced early * and inlines epilogue. This function implements forced early
* returns, so it must have the same effect. * returns, so it must have the same effect.
*/ */
bool ok = true; bool ok = true;
if (f.cx->compartment->debugMode()) if (f.cx->compartment->debugMode())
ok = js::ScriptDebugEpilogue(f.cx, f.fp(), ok); ok = js::ScriptDebugEpilogue(f.cx, f.fp(), ok);
ok = ScriptEpilogue(f.cx, f.fp(), ok); f.fp()->epilogue(f.cx);
if (!ok) if (!ok)
THROW(); THROW();
if (f.fp()->isNonEvalFunctionFrame())
f.fp()->functionEpilogue(f.cx);
} }
template <bool Clamped> template <bool Clamped>

Просмотреть файл

@ -168,10 +168,12 @@ void JS_FASTCALL ConvertToTypedFloat(JSContext *cx, Value *vp);
void JS_FASTCALL Exception(VMFrame &f); void JS_FASTCALL Exception(VMFrame &f);
void JS_FASTCALL FunctionFramePrologue(VMFrame &f); void JS_FASTCALL StrictEvalPrologue(VMFrame &f);
void JS_FASTCALL FunctionFrameEpilogue(VMFrame &f); void JS_FASTCALL HeavyweightFunctionPrologue(VMFrame &f);
void JS_FASTCALL TypeNestingPrologue(VMFrame &f);
void JS_FASTCALL AnyFrameEpilogue(VMFrame &f); void JS_FASTCALL AnyFrameEpilogue(VMFrame &f);
void JS_FASTCALL Epilogue(VMFrame &f);
JSObject * JS_FASTCALL JSObject * JS_FASTCALL
NewDenseUnallocatedArray(VMFrame &f, uint32_t length); NewDenseUnallocatedArray(VMFrame &f, uint32_t length);

Просмотреть файл

@ -1281,31 +1281,14 @@ AssertJit(JSContext *cx, unsigned argc, jsval *vp)
static JSScript * static JSScript *
ValueToScript(JSContext *cx, jsval v, JSFunction **funp = NULL) ValueToScript(JSContext *cx, jsval v, JSFunction **funp = NULL)
{ {
JSScript *script = NULL; JSFunction *fun = JS_ValueToFunction(cx, v);
JSFunction *fun = NULL;
if (!JSVAL_IS_PRIMITIVE(v)) {
JSObject *obj = JSVAL_TO_OBJECT(v);
JSClass *clasp = JS_GetClass(obj);
if (clasp == Jsvalify(&GeneratorClass)) {
if (JSGenerator *gen = (JSGenerator *) JS_GetPrivate(obj)) {
fun = gen->floatingFrame()->fun();
script = fun->script();
}
}
}
if (!script) {
fun = JS_ValueToFunction(cx, v);
if (!fun) if (!fun)
return NULL; return NULL;
script = fun->maybeScript();
if (!script) { JSScript *script = fun->maybeScript();
JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, if (!script)
JSSMSG_SCRIPTS_ONLY); JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, JSSMSG_SCRIPTS_ONLY);
}
}
if (fun && funp) if (fun && funp)
*funp = fun; *funp = fun;

Просмотреть файл

@ -10,16 +10,9 @@
#include "ArgumentsObject.h" #include "ArgumentsObject.h"
namespace js { #include "ScopeObject-inl.h"
inline void namespace js {
ArgumentsObject::initInitialLength(uint32_t length)
{
JS_ASSERT(getFixedSlot(INITIAL_LENGTH_SLOT).isUndefined());
initFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(length << PACKED_BITS_COUNT));
JS_ASSERT((getFixedSlot(INITIAL_LENGTH_SLOT).toInt32() >> PACKED_BITS_COUNT) == int32_t(length));
JS_ASSERT(!hasOverriddenLength());
}
inline uint32_t inline uint32_t
ArgumentsObject::initialLength() const ArgumentsObject::initialLength() const
@ -39,26 +32,67 @@ ArgumentsObject::markLengthOverridden()
inline bool inline bool
ArgumentsObject::hasOverriddenLength() const ArgumentsObject::hasOverriddenLength() const
{ {
const js::Value &v = getFixedSlot(INITIAL_LENGTH_SLOT); const Value &v = getFixedSlot(INITIAL_LENGTH_SLOT);
return v.toInt32() & LENGTH_OVERRIDDEN_BIT; return v.toInt32() & LENGTH_OVERRIDDEN_BIT;
} }
inline void
ArgumentsObject::initData(ArgumentsData *data)
{
JS_ASSERT(getFixedSlot(DATA_SLOT).isUndefined());
initFixedSlot(DATA_SLOT, PrivateValue(data));
}
inline ArgumentsData * inline ArgumentsData *
ArgumentsObject::data() const ArgumentsObject::data() const
{ {
return reinterpret_cast<js::ArgumentsData *>(getFixedSlot(DATA_SLOT).toPrivate()); return reinterpret_cast<ArgumentsData *>(getFixedSlot(DATA_SLOT).toPrivate());
}
inline JSScript *
ArgumentsObject::containingScript() const
{
return data()->script;
}
inline const Value &
ArgumentsObject::arg(unsigned i) const
{
JS_ASSERT(i < data()->numArgs);
const Value &v = data()->args[i];
JS_ASSERT(!v.isMagic(JS_FORWARD_TO_CALL_OBJECT));
return v;
}
inline void
ArgumentsObject::setArg(unsigned i, const Value &v)
{
JS_ASSERT(i < data()->numArgs);
HeapValue &lhs = data()->args[i];
JS_ASSERT(!lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT));
lhs = v;
}
inline const Value &
ArgumentsObject::element(uint32_t i) const
{
JS_ASSERT(!isElementDeleted(i));
const Value &v = data()->args[i];
if (v.isMagic(JS_FORWARD_TO_CALL_OBJECT))
return getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall().arg(i);
return v;
}
inline void
ArgumentsObject::setElement(uint32_t i, const Value &v)
{
JS_ASSERT(!isElementDeleted(i));
HeapValue &lhs = data()->args[i];
if (lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT))
getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall().setArg(i, v);
else
lhs = v;
} }
inline bool inline bool
ArgumentsObject::isElementDeleted(uint32_t i) const ArgumentsObject::isElementDeleted(uint32_t i) const
{ {
JS_ASSERT(i < data()->numArgs);
if (i >= initialLength())
return false;
return IsBitArrayElementSet(data()->deletedBits, initialLength(), i); return IsBitArrayElementSet(data()->deletedBits, initialLength(), i);
} }
@ -74,57 +108,17 @@ ArgumentsObject::markElementDeleted(uint32_t i)
SetBitArrayElement(data()->deletedBits, initialLength(), i); SetBitArrayElement(data()->deletedBits, initialLength(), i);
} }
inline const Value &
ArgumentsObject::element(uint32_t i) const
{
JS_ASSERT(!isElementDeleted(i));
return data()->slots[i];
}
inline void
ArgumentsObject::setElement(uint32_t i, const js::Value &v)
{
JS_ASSERT(!isElementDeleted(i));
data()->slots[i] = v;
}
inline bool inline bool
ArgumentsObject::getElement(uint32_t i, Value *vp) ArgumentsObject::maybeGetElement(uint32_t i, Value *vp)
{ {
if (i >= initialLength() || isElementDeleted(i)) if (i >= initialLength() || isElementDeleted(i))
return false; return false;
/*
* If this arguments object has an associated stack frame, that contains
* the canonical argument value. Note that strict arguments objects do not
* alias named arguments and never have a stack frame.
*/
StackFrame *fp = maybeStackFrame();
JS_ASSERT_IF(isStrictArguments(), !fp);
if (fp)
*vp = fp->canonicalActualArg(i);
else
*vp = element(i); *vp = element(i);
return true; return true;
} }
namespace detail {
struct STATIC_SKIP_INFERENCE CopyNonHoleArgsTo
{
CopyNonHoleArgsTo(ArgumentsObject *argsobj, Value *dst) : argsobj(*argsobj), dst(dst) {}
ArgumentsObject &argsobj;
Value *dst;
bool operator()(uint32_t argi, Value *src) {
*dst++ = *src;
return true;
}
};
} /* namespace detail */
inline bool inline bool
ArgumentsObject::getElements(uint32_t start, uint32_t count, Value *vp) ArgumentsObject::maybeGetElements(uint32_t start, uint32_t count, Value *vp)
{ {
JS_ASSERT(start + count >= start); JS_ASSERT(start + count >= start);
@ -132,33 +126,9 @@ ArgumentsObject::getElements(uint32_t start, uint32_t count, Value *vp)
if (start > length || start + count > length || isAnyElementDeleted()) if (start > length || start + count > length || isAnyElementDeleted())
return false; return false;
StackFrame *fp = maybeStackFrame(); for (uint32_t i = start, end = start + count; i < end; ++i, ++vp)
*vp = element(i);
/* If there's no stack frame for this, argument values are in elements(). */
if (!fp) {
const Value *srcbeg = Valueify(data()->slots) + start;
const Value *srcend = srcbeg + count;
const Value *src = srcbeg;
for (Value *dst = vp; src < srcend; ++dst, ++src)
*dst = *src;
return true; return true;
}
/* Otherwise, element values are on the stack. */
JS_ASSERT(fp->numActualArgs() <= StackSpace::ARGS_LENGTH_MAX);
return fp->forEachCanonicalActualArg(detail::CopyNonHoleArgsTo(this, vp), start, count);
}
inline js::StackFrame *
ArgumentsObject::maybeStackFrame() const
{
return reinterpret_cast<js::StackFrame *>(getFixedSlot(STACK_FRAME_SLOT).toPrivate());
}
inline void
ArgumentsObject::setStackFrame(StackFrame *frame)
{
setFixedSlot(STACK_FRAME_SLOT, PrivateValue(frame));
} }
inline size_t inline size_t
@ -167,7 +137,7 @@ ArgumentsObject::sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const
return mallocSizeOf(data()); return mallocSizeOf(data());
} }
inline const js::Value & inline const Value &
NormalArgumentsObject::callee() const NormalArgumentsObject::callee() const
{ {
return data()->callee; return data()->callee;
@ -179,6 +149,6 @@ NormalArgumentsObject::clearCallee()
data()->callee.set(compartment(), MagicValue(JS_OVERWRITTEN_CALLEE)); data()->callee.set(compartment(), MagicValue(JS_OVERWRITTEN_CALLEE));
} }
} // namespace js } /* namespace js */
#endif /* ArgumentsObject_inl_h___ */ #endif /* ArgumentsObject_inl_h___ */

Просмотреть файл

@ -22,53 +22,20 @@
using namespace js; using namespace js;
using namespace js::gc; using namespace js::gc;
struct PutArg
{
PutArg(JSCompartment *comp, ArgumentsObject &argsobj)
: compartment(comp), argsobj(argsobj), dst(argsobj.data()->slots) {}
JSCompartment *compartment;
ArgumentsObject &argsobj;
HeapValue *dst;
bool operator()(unsigned i, Value *src) {
JS_ASSERT(dst->isUndefined());
if (!argsobj.isElementDeleted(i))
dst->set(compartment, *src);
++dst;
return true;
}
};
void
js_PutArgsObject(StackFrame *fp)
{
ArgumentsObject &argsobj = fp->argsObj();
if (argsobj.isNormalArguments()) {
JS_ASSERT(argsobj.maybeStackFrame() == fp);
JSCompartment *comp = fp->compartment();
fp->forEachCanonicalActualArg(PutArg(comp, argsobj));
argsobj.setStackFrame(NULL);
} else {
JS_ASSERT(!argsobj.maybeStackFrame());
}
}
ArgumentsObject * ArgumentsObject *
ArgumentsObject::create(JSContext *cx, uint32_t argc, HandleObject callee) ArgumentsObject::create(JSContext *cx, StackFrame *fp)
{ {
JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX); JSFunction &callee = fp->callee();
JS_ASSERT(!callee->toFunction()->hasRest()); RootedObject proto(cx, callee.global().getOrCreateObjectPrototype(cx));
RootedObject proto(cx, callee->global().getOrCreateObjectPrototype(cx));
if (!proto) if (!proto)
return NULL; return NULL;
RootedTypeObject type(cx); RootedTypeObject type(cx);
type = proto->getNewType(cx); type = proto->getNewType(cx);
if (!type) if (!type)
return NULL; return NULL;
bool strict = callee->toFunction()->inStrictMode(); bool strict = callee.inStrictMode();
Class *clasp = strict ? &StrictArgumentsObjectClass : &NormalArgumentsObjectClass; Class *clasp = strict ? &StrictArgumentsObjectClass : &NormalArgumentsObjectClass;
RootedShape emptyArgumentsShape(cx); RootedShape emptyArgumentsShape(cx);
@ -79,59 +46,76 @@ ArgumentsObject::create(JSContext *cx, uint32_t argc, HandleObject callee)
if (!emptyArgumentsShape) if (!emptyArgumentsShape)
return NULL; return NULL;
unsigned numDeletedWords = NumWordsForBitArrayOfLength(argc); unsigned numActuals = fp->numActualArgs();
unsigned numBytes = offsetof(ArgumentsData, slots) + unsigned numFormals = fp->numFormalArgs();
unsigned numDeletedWords = NumWordsForBitArrayOfLength(numActuals);
unsigned numArgs = Max(numActuals, numFormals);
unsigned numBytes = offsetof(ArgumentsData, args) +
numDeletedWords * sizeof(size_t) + numDeletedWords * sizeof(size_t) +
argc * sizeof(Value); numArgs * sizeof(Value);
ArgumentsData *data = (ArgumentsData *)cx->malloc_(numBytes); ArgumentsData *data = (ArgumentsData *)cx->malloc_(numBytes);
if (!data) if (!data)
return NULL; return NULL;
data->callee.init(ObjectValue(*callee)); data->numArgs = numArgs;
for (HeapValue *vp = data->slots; vp != data->slots + argc; vp++) data->callee.init(ObjectValue(callee));
vp->init(UndefinedValue()); data->script = fp->script();
data->deletedBits = (size_t *)(data->slots + argc);
/* Copy [0, numArgs) into data->slots. */
HeapValue *dst = data->args, *dstEnd = data->args + numArgs;
for (Value *src = fp->formals(), *end = src + numFormals; src != end; ++src, ++dst)
dst->init(*src);
if (numActuals > numFormals) {
for (Value *src = fp->actuals() + numFormals; dst != dstEnd; ++src, ++dst)
dst->init(*src);
} else if (numActuals < numFormals) {
for (; dst != dstEnd; ++dst)
dst->init(UndefinedValue());
}
data->deletedBits = reinterpret_cast<size_t *>(dstEnd);
ClearAllBitArrayElements(data->deletedBits, numDeletedWords); ClearAllBitArrayElements(data->deletedBits, numDeletedWords);
/* We have everything needed to fill in the object, so make the object. */
JSObject *obj = JSObject::create(cx, FINALIZE_KIND, emptyArgumentsShape, type, NULL); JSObject *obj = JSObject::create(cx, FINALIZE_KIND, emptyArgumentsShape, type, NULL);
if (!obj) if (!obj)
return NULL; return NULL;
obj->initFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(numActuals << PACKED_BITS_COUNT));
obj->initFixedSlot(DATA_SLOT, PrivateValue(data));
/*
* If it exists and the arguments object aliases formals, the call object
* is the canonical location for formals.
*/
JSScript *script = fp->script();
if (fp->fun()->isHeavyweight() && script->argsObjAliasesFormals()) {
obj->initFixedSlot(MAYBE_CALL_SLOT, ObjectValue(fp->callObj()));
/* Flag each slot that canonically lives in the callObj. */
if (script->bindingsAccessedDynamically) {
for (unsigned i = 0; i < numFormals; ++i)
data->args[i] = MagicValue(JS_FORWARD_TO_CALL_OBJECT);
} else {
for (unsigned i = 0; i < script->numClosedArgs(); ++i)
data->args[script->getClosedArg(i)] = MagicValue(JS_FORWARD_TO_CALL_OBJECT);
}
}
ArgumentsObject &argsobj = obj->asArguments(); ArgumentsObject &argsobj = obj->asArguments();
JS_ASSERT(argsobj.initialLength() == numActuals);
JS_ASSERT(UINT32_MAX > (uint64_t(argc) << PACKED_BITS_COUNT)); JS_ASSERT(!argsobj.hasOverriddenLength());
argsobj.initInitialLength(argc);
argsobj.initData(data);
argsobj.setStackFrame(NULL);
JS_ASSERT(argsobj.numFixedSlots() >= NormalArgumentsObject::RESERVED_SLOTS);
JS_ASSERT(argsobj.numFixedSlots() >= StrictArgumentsObject::RESERVED_SLOTS);
return &argsobj; return &argsobj;
} }
ArgumentsObject * ArgumentsObject *
ArgumentsObject::create(JSContext *cx, StackFrame *fp) ArgumentsObject::createExpected(JSContext *cx, StackFrame *fp)
{ {
JS_ASSERT(fp->script()->needsArgsObj()); JS_ASSERT(fp->script()->needsArgsObj());
ArgumentsObject *argsobj = create(cx, fp);
ArgumentsObject *argsobj = ArgumentsObject::create(cx, fp->numActualArgs(),
RootedObject(cx, &fp->callee()));
if (!argsobj) if (!argsobj)
return NULL; return NULL;
/*
* Strict mode functions have arguments objects that copy the initial
* actual parameter values. Non-strict mode arguments use the frame pointer
* to retrieve up-to-date parameter values.
*/
if (argsobj->isStrictArguments())
fp->forEachCanonicalActualArg(PutArg(cx->compartment, *argsobj));
else
argsobj->setStackFrame(fp);
fp->initArgsObj(*argsobj); fp->initArgsObj(*argsobj);
return argsobj; return argsobj;
} }
@ -139,12 +123,7 @@ ArgumentsObject::create(JSContext *cx, StackFrame *fp)
ArgumentsObject * ArgumentsObject *
ArgumentsObject::createUnexpected(JSContext *cx, StackFrame *fp) ArgumentsObject::createUnexpected(JSContext *cx, StackFrame *fp)
{ {
ArgumentsObject *argsobj = create(cx, fp->numActualArgs(), RootedObject(cx, &fp->callee())); return create(cx, fp);
if (!argsobj)
return NULL;
fp->forEachCanonicalActualArg(PutArg(cx->compartment, *argsobj));
return argsobj;
} }
static JSBool static JSBool
@ -153,10 +132,8 @@ args_delProperty(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
ArgumentsObject &argsobj = obj->asArguments(); ArgumentsObject &argsobj = obj->asArguments();
if (JSID_IS_INT(id)) { if (JSID_IS_INT(id)) {
unsigned arg = unsigned(JSID_TO_INT(id)); unsigned arg = unsigned(JSID_TO_INT(id));
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) { if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg))
argsobj.setElement(arg, UndefinedValue());
argsobj.markElementDeleted(arg); argsobj.markElementDeleted(arg);
}
} else if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) { } else if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) {
argsobj.markLengthOverridden(); argsobj.markLengthOverridden();
} else if (JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom)) { } else if (JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom)) {
@ -178,22 +155,15 @@ ArgGetter(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
* prototype to point to another Arguments object with a bigger argc. * prototype to point to another Arguments object with a bigger argc.
*/ */
unsigned arg = unsigned(JSID_TO_INT(id)); unsigned arg = unsigned(JSID_TO_INT(id));
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) { if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg))
if (StackFrame *fp = argsobj.maybeStackFrame()) {
JS_ASSERT_IF(arg < fp->numFormalArgs(), fp->script()->formalIsAliased(arg));
*vp = fp->canonicalActualArg(arg);
} else {
*vp = argsobj.element(arg); *vp = argsobj.element(arg);
}
}
} else if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) { } else if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) {
if (!argsobj.hasOverriddenLength()) if (!argsobj.hasOverriddenLength())
vp->setInt32(argsobj.initialLength()); *vp = Int32Value(argsobj.initialLength());
} else { } else {
JS_ASSERT(JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom)); JS_ASSERT(JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom));
const Value &v = argsobj.callee(); if (!argsobj.callee().isMagic(JS_OVERWRITTEN_CALLEE))
if (!v.isMagic(JS_OVERWRITTEN_CALLEE)) *vp = argsobj.callee();
*vp = v;
} }
return true; return true;
} }
@ -205,21 +175,16 @@ ArgSetter(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp
return true; return true;
NormalArgumentsObject &argsobj = obj->asNormalArguments(); NormalArgumentsObject &argsobj = obj->asNormalArguments();
JSScript *script = argsobj.containingScript();
if (JSID_IS_INT(id)) { if (JSID_IS_INT(id)) {
unsigned arg = unsigned(JSID_TO_INT(id)); unsigned arg = unsigned(JSID_TO_INT(id));
if (arg < argsobj.initialLength()) { if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) {
if (StackFrame *fp = argsobj.maybeStackFrame()) { argsobj.setElement(arg, *vp);
JSScript *script = fp->functionScript(); if (arg < script->function()->nargs)
JS_ASSERT(script->needsArgsObj());
if (arg < fp->numFormalArgs()) {
JS_ASSERT(fp->script()->formalIsAliased(arg));
types::TypeScript::SetArgument(cx, script, arg, *vp); types::TypeScript::SetArgument(cx, script, arg, *vp);
}
fp->canonicalActualArg(arg) = *vp;
return true; return true;
} }
}
} else { } else {
JS_ASSERT(JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom) || JS_ASSERT(JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom) ||
JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom)); JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom));
@ -275,13 +240,13 @@ args_resolve(JSContext *cx, HandleObject obj, HandleId id, unsigned flags,
bool bool
NormalArgumentsObject::optimizedGetElem(JSContext *cx, StackFrame *fp, const Value &elem, Value *vp) NormalArgumentsObject::optimizedGetElem(JSContext *cx, StackFrame *fp, const Value &elem, Value *vp)
{ {
JS_ASSERT(!fp->hasArgsObj()); JS_ASSERT(!fp->script()->needsArgsObj());
/* Fast path: no need to convert to id when elem is already an int in range. */ /* Fast path: no need to convert to id when elem is already an int in range. */
if (elem.isInt32()) { if (elem.isInt32()) {
int32_t i = elem.toInt32(); int32_t i = elem.toInt32();
if (i >= 0 && uint32_t(i) < fp->numActualArgs()) { if (i >= 0 && uint32_t(i) < fp->numActualArgs()) {
*vp = fp->canonicalActualArg(i); *vp = fp->unaliasedActual(i);
return true; return true;
} }
} }
@ -295,7 +260,7 @@ NormalArgumentsObject::optimizedGetElem(JSContext *cx, StackFrame *fp, const Val
if (JSID_IS_INT(id)) { if (JSID_IS_INT(id)) {
int32_t i = JSID_TO_INT(id); int32_t i = JSID_TO_INT(id);
if (i >= 0 && uint32_t(i) < fp->numActualArgs()) { if (i >= 0 && uint32_t(i) < fp->numActualArgs()) {
*vp = fp->canonicalActualArg(i); *vp = fp->unaliasedActual(i);
return true; return true;
} }
} }
@ -472,34 +437,20 @@ strictargs_enumerate(JSContext *cx, HandleObject obj)
return true; return true;
} }
static void void
args_finalize(FreeOp *fop, JSObject *obj) ArgumentsObject::finalize(FreeOp *fop, JSObject *obj)
{ {
fop->free_(reinterpret_cast<void *>(obj->asArguments().data())); fop->free_(reinterpret_cast<void *>(obj->asArguments().data()));
} }
static void void
args_trace(JSTracer *trc, JSObject *obj) ArgumentsObject::trace(JSTracer *trc, JSObject *obj)
{ {
ArgumentsObject &argsobj = obj->asArguments(); ArgumentsObject &argsobj = obj->asArguments();
ArgumentsData *data = argsobj.data(); ArgumentsData *data = argsobj.data();
MarkValue(trc, &data->callee, js_callee_str); MarkValue(trc, &data->callee, js_callee_str);
MarkValueRange(trc, argsobj.initialLength(), data->slots, js_arguments_str); MarkValueRange(trc, data->numArgs, data->args, js_arguments_str);
MarkScriptUnbarriered(trc, &data->script, "script");
/*
* If a generator's arguments or call object escapes, and the generator
* frame is not executing, the generator object needs to be marked because
* it is not otherwise reachable. An executing generator is rooted by its
* invocation. To distinguish the two cases (which imply different access
* paths to the generator object), we use the JSFRAME_FLOATING_GENERATOR
* flag, which is only set on the StackFrame kept in the generator object's
* JSGenerator.
*/
#if JS_HAS_GENERATORS
StackFrame *fp = argsobj.maybeStackFrame();
if (fp && fp->isFloatingGenerator())
MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
#endif
} }
/* /*
@ -521,12 +472,12 @@ Class js::NormalArgumentsObjectClass = {
args_enumerate, args_enumerate,
reinterpret_cast<JSResolveOp>(args_resolve), reinterpret_cast<JSResolveOp>(args_resolve),
JS_ConvertStub, JS_ConvertStub,
args_finalize, /* finalize */ ArgumentsObject::finalize,
NULL, /* checkAccess */ NULL, /* checkAccess */
NULL, /* call */ NULL, /* call */
NULL, /* construct */ NULL, /* construct */
NULL, /* hasInstance */ NULL, /* hasInstance */
args_trace, ArgumentsObject::trace,
{ {
NULL, /* equality */ NULL, /* equality */
NULL, /* outerObject */ NULL, /* outerObject */
@ -555,12 +506,12 @@ Class js::StrictArgumentsObjectClass = {
strictargs_enumerate, strictargs_enumerate,
reinterpret_cast<JSResolveOp>(strictargs_resolve), reinterpret_cast<JSResolveOp>(strictargs_resolve),
JS_ConvertStub, JS_ConvertStub,
args_finalize, /* finalize */ ArgumentsObject::finalize,
NULL, /* checkAccess */ NULL, /* checkAccess */
NULL, /* call */ NULL, /* call */
NULL, /* construct */ NULL, /* construct */
NULL, /* hasInstance */ NULL, /* hasInstance */
args_trace, ArgumentsObject::trace,
{ {
NULL, /* equality */ NULL, /* equality */
NULL, /* outerObject */ NULL, /* outerObject */

Просмотреть файл

@ -16,17 +16,25 @@ namespace js {
* ArgumentsData stores the initial indexed arguments provided to the * ArgumentsData stores the initial indexed arguments provided to the
* corresponding and that function itself. It is used to store arguments[i] * corresponding and that function itself. It is used to store arguments[i]
* and arguments.callee -- up until the corresponding property is modified, * and arguments.callee -- up until the corresponding property is modified,
* when the relevant value is overwritten with MagicValue(JS_ARGS_HOLE) to * when the relevant value is flagged to memorialize the modification.
* memorialize the modification.
*/ */
struct ArgumentsData struct ArgumentsData
{ {
/* /*
* arguments.callee, or MagicValue(JS_ARGS_HOLE) if arguments.callee has * numArgs = Max(numFormalArgs, numActualArgs)
* been modified. * The array 'args' has numArgs elements.
*/
unsigned numArgs;
/*
* arguments.callee, or MagicValue(JS_OVERWRITTEN_CALLEE) if
* arguments.callee has been modified.
*/ */
HeapValue callee; HeapValue callee;
/* The script for the function containing this arguments object. */
JSScript *script;
/* /*
* Pointer to an array of bits indicating, for every argument in 'slots', * Pointer to an array of bits indicating, for every argument in 'slots',
* whether the element has been deleted. See isElementDeleted comment. * whether the element has been deleted. See isElementDeleted comment.
@ -34,17 +42,25 @@ struct ArgumentsData
size_t *deletedBits; size_t *deletedBits;
/* /*
* Values of the arguments for this object, or MagicValue(JS_ARGS_HOLE) if * This array holds either the current argument value or the magic value
* the indexed argument has been modified. * JS_FORWARD_TO_CALL_OBJECT. The latter means that the function has both a
* CallObject and an ArgumentsObject AND the particular formal variable is
* aliased by the CallObject. In such cases, the CallObject holds the
* canonical value so any element access to the arguments object should
* load the value out of the CallObject (which is pointed to by
* MAYBE_CALL_SLOT).
*/ */
HeapValue slots[1]; HeapValue args[1];
/* For jit use: */
static ptrdiff_t offsetOfArgs() { return offsetof(ArgumentsData, args); }
}; };
/* /*
* ArgumentsObject instances represent |arguments| objects created to store * ArgumentsObject instances represent |arguments| objects created to store
* function arguments when a function is called. It's expensive to create such * function arguments when a function is called. It's expensive to create such
* objects if they're never used, so they're only created lazily. (See * objects if they're never used, so they're only created when they are
* js::StackFrame::setArgsObj and friends.) * potentially used.
* *
* Arguments objects are complicated because, for non-strict mode code, they * Arguments objects are complicated because, for non-strict mode code, they
* must alias any named arguments which were provided to the function. Gnarly * must alias any named arguments which were provided to the function. Gnarly
@ -75,43 +91,27 @@ struct ArgumentsData
* been modified, then the current value of arguments.length is stored in * been modified, then the current value of arguments.length is stored in
* another slot associated with a new property. * another slot associated with a new property.
* DATA_SLOT * DATA_SLOT
* Stores an ArgumentsData* storing argument values and the callee, or * Stores an ArgumentsData*, described above.
* sentinels for any of these if the corresponding property is modified.
* Use callee() to access the callee/sentinel, and use
* element/addressOfElement/setElement to access the values stored in
* the ArgumentsData. If you're simply looking to get arguments[i],
* however, use getElement or getElements to avoid spreading arguments
* object implementation details around too much.
* STACK_FRAME_SLOT
* Stores the function's stack frame for non-strict arguments objects until
* the function returns, when it is replaced with null. When an arguments
* object is created on-trace its private is JS_ARGUMENTS_OBJECT_ON_TRACE,
* and when the trace exits its private is replaced with the stack frame or
* null, as appropriate. This slot is used by strict arguments objects as
* well, but the slot is always null. Conceptually it would be better to
* remove this oddity, but preserving it allows us to work with arguments
* objects of either kind more abstractly, so we keep it for now.
*/ */
class ArgumentsObject : public JSObject class ArgumentsObject : public JSObject
{ {
protected:
static const uint32_t INITIAL_LENGTH_SLOT = 0; static const uint32_t INITIAL_LENGTH_SLOT = 0;
static const uint32_t DATA_SLOT = 1; static const uint32_t DATA_SLOT = 1;
static const uint32_t STACK_FRAME_SLOT = 2; static const uint32_t MAYBE_CALL_SLOT = 2;
/* Lower-order bit stolen from the length slot. */
static const uint32_t LENGTH_OVERRIDDEN_BIT = 0x1; static const uint32_t LENGTH_OVERRIDDEN_BIT = 0x1;
static const uint32_t PACKED_BITS_COUNT = 1; static const uint32_t PACKED_BITS_COUNT = 1;
void initInitialLength(uint32_t length); static ArgumentsObject *create(JSContext *cx, StackFrame *fp);
void initData(ArgumentsData *data); inline ArgumentsData *data() const;
static ArgumentsObject *create(JSContext *cx, uint32_t argc, HandleObject callee);
public: public:
static const uint32_t RESERVED_SLOTS = 3; static const uint32_t RESERVED_SLOTS = 3;
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4; static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
/* Create an arguments object for a frame that is expecting them. */ /* Create an arguments object for a frame that is expecting them. */
static ArgumentsObject *create(JSContext *cx, StackFrame *fp); static ArgumentsObject *createExpected(JSContext *cx, StackFrame *fp);
/* /*
* Purposefully disconnect the returned arguments object from the frame * Purposefully disconnect the returned arguments object from the frame
@ -127,33 +127,13 @@ class ArgumentsObject : public JSObject
*/ */
inline uint32_t initialLength() const; inline uint32_t initialLength() const;
/* The script for the function containing this arguments object. */
JSScript *containingScript() const;
/* True iff arguments.length has been assigned or its attributes changed. */ /* True iff arguments.length has been assigned or its attributes changed. */
inline bool hasOverriddenLength() const; inline bool hasOverriddenLength() const;
inline void markLengthOverridden(); inline void markLengthOverridden();
/*
* Attempt to speedily and efficiently access the i-th element of this
* arguments object. Return true if the element was speedily returned.
* Return false if the element must be looked up more slowly using
* getProperty or some similar method.
*
* NB: Returning false does not indicate error!
*/
inline bool getElement(uint32_t i, js::Value *vp);
/*
* Attempt to speedily and efficiently get elements [start, start + count)
* of this arguments object into the locations starting at |vp|. Return
* true if all elements were copied. Return false if the elements must be
* gotten more slowly, perhaps using a getProperty or some similar method
* in a loop.
*
* NB: Returning false does not indicate error!
*/
inline bool getElements(uint32_t start, uint32_t count, js::Value *vp);
inline js::ArgumentsData *data() const;
/* /*
* Because the arguments object is a real object, its elements may be * Because the arguments object is a real object, its elements may be
* deleted. This is implemented by setting a 'deleted' flag for the arg * deleted. This is implemented by setting a 'deleted' flag for the arg
@ -172,18 +152,51 @@ class ArgumentsObject : public JSObject
inline bool isAnyElementDeleted() const; inline bool isAnyElementDeleted() const;
inline void markElementDeleted(uint32_t i); inline void markElementDeleted(uint32_t i);
inline const js::Value &element(uint32_t i) const; /*
inline void setElement(uint32_t i, const js::Value &v); * An ArgumentsObject serves two roles:
* - a real object, accessed through regular object operations, e.g..,
* JSObject::getElement corresponding to 'arguments[i]';
* - a VM-internal data structure, storing the value of arguments (formal
* and actual) that are accessed directly by the VM when a reading the
* value of a formal parameter.
* There are two ways to access the ArgumentsData::args corresponding to
* these two use cases:
* - object access should use elements(i) which will take care of
* forwarding when the value is JS_FORWARD_TO_CALL_OBJECT;
* - VM argument access should use arg(i) which will assert that the
* value is not JS_FORWARD_TO_CALL_OBJECT (since, if such forwarding was
* needed, the frontend should have emitted JSOP_GETALIASEDVAR.
*/
inline const Value &element(uint32_t i) const;
inline void setElement(uint32_t i, const Value &v);
inline const Value &arg(unsigned i) const;
inline void setArg(unsigned i, const Value &v);
/* The stack frame for this ArgumentsObject, if the frame is still active. */ /*
inline js::StackFrame *maybeStackFrame() const; * Attempt to speedily and efficiently access the i-th element of this
inline void setStackFrame(js::StackFrame *frame); * arguments object. Return true if the element was speedily returned.
* Return false if the element must be looked up more slowly using
* getProperty or some similar method. The second overload copies the
* elements [start, start + count) into the locations starting at 'vp'.
*
* NB: Returning false does not indicate error!
*/
inline bool maybeGetElement(uint32_t i, Value *vp);
inline bool maybeGetElements(uint32_t start, uint32_t count, js::Value *vp);
/* /*
* Measures things hanging off this ArgumentsObject that are counted by the * Measures things hanging off this ArgumentsObject that are counted by the
* |miscSize| argument in JSObject::sizeOfExcludingThis(). * |miscSize| argument in JSObject::sizeOfExcludingThis().
*/ */
inline size_t sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const; inline size_t sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const;
static void finalize(FreeOp *fop, JSObject *obj);
static void trace(JSTracer *trc, JSObject *obj);
/* For jit use: */
static size_t getDataSlotOffset() {
return getFixedSlotOffset(DATA_SLOT);
}
}; };
class NormalArgumentsObject : public ArgumentsObject class NormalArgumentsObject : public ArgumentsObject

Просмотреть файл

@ -3137,10 +3137,16 @@ DebuggerArguments_getArg(JSContext *cx, unsigned argc, Value *vp)
*/ */
JS_ASSERT(i >= 0); JS_ASSERT(i >= 0);
Value arg; Value arg;
if (unsigned(i) < fp->numActualArgs()) if (unsigned(i) < fp->numActualArgs()) {
arg = fp->canonicalActualArg(i); if (unsigned(i) < fp->numFormalArgs() && fp->script()->formalLivesInCallObject(i))
arg = fp->callObj().arg(i);
else if (fp->script()->argsObjAliasesFormals())
arg = fp->argsObj().arg(i);
else else
arg = fp->unaliasedActual(i);
} else {
arg.setUndefined(); arg.setUndefined();
}
if (!Debugger::fromChildJSObject(thisobj)->wrapDebuggeeValue(cx, &arg)) if (!Debugger::fromChildJSObject(thisobj)->wrapDebuggeeValue(cx, &arg))
return false; return false;
@ -3370,6 +3376,7 @@ js::EvaluateInEnv(JSContext *cx, Handle<Env*> env, StackFrame *fp, const jschar
if (!script) if (!script)
return false; return false;
script->isActiveEval = true;
return ExecuteKernel(cx, script, *env, fp->thisValue(), EXECUTE_DEBUG, fp, rval); return ExecuteKernel(cx, script, *env, fp->thisValue(), EXECUTE_DEBUG, fp, rval);
} }

Просмотреть файл

@ -14,8 +14,7 @@ namespace js {
inline inline
ScopeCoordinate::ScopeCoordinate(jsbytecode *pc) ScopeCoordinate::ScopeCoordinate(jsbytecode *pc)
: hops(GET_UINT16(pc)), binding(GET_UINT16(pc + 2)), : hops(GET_UINT16(pc)), slot(GET_UINT16(pc + 2))
frameBinding(GET_UINT16(pc + 8))
{ {
JS_ASSERT(JOF_OPTYPE(*pc) == JOF_SCOPECOORD); JS_ASSERT(JOF_OPTYPE(*pc) == JOF_SCOPECOORD);
} }
@ -36,69 +35,22 @@ ScopeObject::setEnclosingScope(JSContext *cx, HandleObject obj)
return true; return true;
} }
inline StackFrame *
ScopeObject::maybeStackFrame() const
{
JS_ASSERT(!isStaticBlock() && !isWith());
return reinterpret_cast<StackFrame *>(JSObject::getPrivate());
}
inline void
ScopeObject::setStackFrame(StackFrame *frame)
{
return setPrivate(frame);
}
inline const Value & inline const Value &
ScopeObject::aliasedVar(ScopeCoordinate sc) ScopeObject::aliasedVar(ScopeCoordinate sc)
{ {
/* XXX: all this is temporary until the last patch of 659577 */ JS_ASSERT(isCall() || isClonedBlock());
StackFrame *fp = maybeStackFrame(); JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == CallObject::RESERVED_SLOTS);
Bindings &bindings = fp->script()->bindings; JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == BlockObject::RESERVED_SLOTS);
if (isCall()) { return getSlot(CALL_BLOCK_RESERVED_SLOTS + sc.slot);
JS_ASSERT(sc.binding == sc.frameBinding);
if (bindings.bindingIsArg(sc.binding)) {
unsigned arg = bindings.bindingToArg(sc.binding);
JS_ASSERT(fp->script()->formalLivesInCallObject(arg));
return fp->formalArg(arg);
}
unsigned var = bindings.bindingToLocal(sc.binding);
JS_ASSERT(fp->script()->varIsAliased(var));
return fp->localSlot(var);
}
unsigned var = bindings.bindingToLocal(sc.frameBinding);
fp = js_LiveFrameIfGenerator(fp);
JS_ASSERT(var == sc.binding + asClonedBlock().staticBlock().stackDepth() + fp->numFixed());
JS_ASSERT(asClonedBlock().staticBlock().isAliased(sc.binding));
return fp->localSlot(var);
} }
inline void inline void
ScopeObject::setAliasedVar(ScopeCoordinate sc, const Value &v) ScopeObject::setAliasedVar(ScopeCoordinate sc, const Value &v)
{ {
/* XXX: all this is temporary until the last patch of 659577 */ JS_ASSERT(isCall() || isClonedBlock());
StackFrame *fp = maybeStackFrame(); JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == CallObject::RESERVED_SLOTS);
Bindings &bindings = fp->script()->bindings; JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == BlockObject::RESERVED_SLOTS);
if (isCall()) { setSlot(CALL_BLOCK_RESERVED_SLOTS + sc.slot, v);
JS_ASSERT(sc.binding == sc.frameBinding);
if (bindings.bindingIsArg(sc.binding)) {
unsigned arg = bindings.bindingToArg(sc.binding);
JS_ASSERT(fp->script()->formalLivesInCallObject(arg));
fp->formalArg(arg) = v;
} else {
unsigned var = bindings.bindingToLocal(sc.binding);
JS_ASSERT(fp->script()->varIsAliased(var));
fp->localSlot(var) = v;
}
} else {
unsigned var = bindings.bindingToLocal(sc.frameBinding);
fp = js_LiveFrameIfGenerator(fp);
JS_ASSERT(var == sc.binding + asClonedBlock().staticBlock().stackDepth() + fp->numFixed());
JS_ASSERT(asClonedBlock().staticBlock().isAliased(sc.binding));
fp->localSlot(var) = v;
}
} }
/*static*/ inline size_t /*static*/ inline size_t
@ -136,61 +88,35 @@ CallObject::getCalleeFunction() const
} }
inline const Value & inline const Value &
CallObject::arg(unsigned i) const CallObject::arg(unsigned i, MaybeCheckAliasing checkAliasing) const
{ {
JS_ASSERT(i < getCalleeFunction()->nargs); JS_ASSERT_IF(checkAliasing, getCalleeFunction()->script()->formalLivesInCallObject(i));
return getSlot(RESERVED_SLOTS + i); return getSlot(RESERVED_SLOTS + i);
} }
inline void inline void
CallObject::setArg(unsigned i, const Value &v) CallObject::setArg(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
{ {
JS_ASSERT(i < getCalleeFunction()->nargs); JS_ASSERT_IF(checkAliasing, getCalleeFunction()->script()->formalLivesInCallObject(i));
setSlot(RESERVED_SLOTS + i, v); setSlot(RESERVED_SLOTS + i, v);
} }
inline void
CallObject::initArgUnchecked(unsigned i, const Value &v)
{
JS_ASSERT(i < getCalleeFunction()->nargs);
initSlotUnchecked(RESERVED_SLOTS + i, v);
}
inline const Value & inline const Value &
CallObject::var(unsigned i) const CallObject::var(unsigned i, MaybeCheckAliasing checkAliasing) const
{ {
JSFunction *fun = getCalleeFunction(); JSFunction *fun = getCalleeFunction();
JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs()); JS_ASSERT_IF(checkAliasing, fun->script()->varIsAliased(i));
JS_ASSERT(i < fun->script()->bindings.numVars());
return getSlot(RESERVED_SLOTS + fun->nargs + i); return getSlot(RESERVED_SLOTS + fun->nargs + i);
} }
inline void inline void
CallObject::setVar(unsigned i, const Value &v) CallObject::setVar(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
{ {
JSFunction *fun = getCalleeFunction(); JSFunction *fun = getCalleeFunction();
JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs()); JS_ASSERT_IF(checkAliasing, fun->script()->varIsAliased(i));
JS_ASSERT(i < fun->script()->bindings.numVars());
setSlot(RESERVED_SLOTS + fun->nargs + i, v); setSlot(RESERVED_SLOTS + fun->nargs + i, v);
} }
inline void
CallObject::initVarUnchecked(unsigned i, const Value &v)
{
JSFunction *fun = getCalleeFunction();
JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs());
JS_ASSERT(i < fun->script()->bindings.numVars());
initSlotUnchecked(RESERVED_SLOTS + fun->nargs + i, v);
}
inline void
CallObject::copyValues(unsigned nargs, Value *argv, unsigned nvars, Value *slots)
{
JS_ASSERT(slotInRange(RESERVED_SLOTS + nargs + nvars, SENTINEL_ALLOWED));
copySlotRange(RESERVED_SLOTS, argv, nargs);
copySlotRange(RESERVED_SLOTS + nargs, slots, nvars);
}
inline HeapSlotArray inline HeapSlotArray
CallObject::argArray() CallObject::argArray()
{ {
@ -232,6 +158,13 @@ BlockObject::slotCount() const
return propertyCount(); return propertyCount();
} }
inline unsigned
BlockObject::slotToFrameLocal(JSScript *script, unsigned i)
{
JS_ASSERT(i < slotCount());
return script->nfixed + stackDepth() + i;
}
inline const Value & inline const Value &
BlockObject::slotValue(unsigned i) BlockObject::slotValue(unsigned i)
{ {
@ -283,9 +216,12 @@ StaticBlockObject::maybeDefinitionParseNode(unsigned i)
inline void inline void
StaticBlockObject::setAliased(unsigned i, bool aliased) StaticBlockObject::setAliased(unsigned i, bool aliased)
{ {
JS_ASSERT_IF(i > 0, slotValue(i-1).isBoolean());
setSlotValue(i, BooleanValue(aliased)); setSlotValue(i, BooleanValue(aliased));
if (aliased) if (aliased && !needsClone()) {
JSObject::setPrivate(reinterpret_cast<void *>(1)); setSlotValue(0, MagicValue(JS_BLOCK_NEEDS_CLONE));
JS_ASSERT(needsClone());
}
} }
inline bool inline bool
@ -295,9 +231,9 @@ StaticBlockObject::isAliased(unsigned i)
} }
inline bool inline bool
StaticBlockObject::needsClone() const StaticBlockObject::needsClone()
{ {
return JSObject::getPrivate() != NULL; return !slotValue(0).isFalse();
} }
inline bool inline bool
@ -313,15 +249,16 @@ ClonedBlockObject::staticBlock() const
} }
inline const Value & inline const Value &
ClonedBlockObject::var(unsigned i) ClonedBlockObject::var(unsigned i, MaybeCheckAliasing checkAliasing)
{ {
JS_ASSERT(!maybeStackFrame()); JS_ASSERT_IF(checkAliasing, staticBlock().isAliased(i));
return slotValue(i); return slotValue(i);
} }
inline void inline void
ClonedBlockObject::setVar(unsigned i, const Value &v) ClonedBlockObject::setVar(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
{ {
JS_ASSERT_IF(checkAliasing, staticBlock().isAliased(i));
setSlotValue(i, v); setSlotValue(i, v);
} }

Просмотреть файл

@ -45,115 +45,24 @@ js::ScopeCoordinateBlockChain(JSScript *script, jsbytecode *pc)
} }
PropertyName * PropertyName *
js::ScopeCoordinateName(JSScript *script, jsbytecode *pc) js::ScopeCoordinateName(JSRuntime *rt, JSScript *script, jsbytecode *pc)
{ {
StaticBlockObject *maybeBlock = ScopeCoordinateBlockChain(script, pc); StaticBlockObject *maybeBlock = ScopeCoordinateBlockChain(script, pc);
ScopeCoordinate sc(pc); ScopeCoordinate sc(pc);
uint32_t targetSlot = ScopeObject::CALL_BLOCK_RESERVED_SLOTS + sc.slot;
uint32_t targetSlot; Shape *shape = maybeBlock ? maybeBlock->lastProperty() : script->bindings.lastShape();
Shape *shape;
if (maybeBlock) {
targetSlot = BlockObject::RESERVED_SLOTS + sc.binding;
shape = maybeBlock->lastProperty();
} else {
targetSlot = CallObject::RESERVED_SLOTS + sc.binding;
shape = script->bindings.lastShape();
}
Shape::Range r = shape->all(); Shape::Range r = shape->all();
while (r.front().slot() != targetSlot) while (r.front().slot() != targetSlot)
r.popFront(); r.popFront();
return JSID_TO_ATOM(r.front().propid())->asPropertyName(); jsid id = r.front().propid();
/* Beware nameless destructuring formal. */
if (!JSID_IS_ATOM(id))
return rt->atomState.emptyAtom;
return JSID_TO_ATOM(id)->asPropertyName();
} }
/*****************************************************************************/ /*****************************************************************************/
void
js_PutCallObject(StackFrame *fp, CallObject &callobj)
{
JS_ASSERT(callobj.maybeStackFrame() == fp);
JS_ASSERT_IF(fp->isEvalFrame(), fp->isStrictEvalFrame());
JS_ASSERT(fp->isEvalFrame() == callobj.isForEval());
JSScript *script = fp->script();
Bindings &bindings = script->bindings;
if (callobj.isForEval()) {
JS_ASSERT(script->strictModeCode);
JS_ASSERT(bindings.numArgs() == 0);
/* This could be optimized as below, but keep it simple for now. */
callobj.copyValues(0, NULL, bindings.numVars(), fp->slots());
} else {
JSFunction *fun = fp->fun();
JS_ASSERT(script == callobj.getCalleeFunction()->script());
JS_ASSERT(script == fun->script());
unsigned n = bindings.count();
if (n > 0) {
uint32_t nvars = bindings.numVars();
uint32_t nargs = bindings.numArgs();
JS_ASSERT(fun->nargs == nargs);
JS_ASSERT(nvars + nargs == n);
JSScript *script = fun->script();
if (script->bindingsAccessedDynamically
#ifdef JS_METHODJIT
|| script->debugMode
#endif
) {
callobj.copyValues(nargs, fp->formalArgs(), nvars, fp->slots());
} else {
/*
* For each arg & var that is closed over, copy it from the stack
* into the call object. We use initArg/VarUnchecked because,
* when you call a getter on a call object, js_NativeGetInline
* caches the return value in the slot, so we can't assert that
* it's undefined.
*/
uint32_t nclosed = script->numClosedArgs();
for (uint32_t i = 0; i < nclosed; i++) {
uint32_t e = script->getClosedArg(i);
#ifdef JS_GC_ZEAL
callobj.setArg(e, fp->formalArg(e));
#else
callobj.initArgUnchecked(e, fp->formalArg(e));
#endif
}
nclosed = script->numClosedVars();
for (uint32_t i = 0; i < nclosed; i++) {
uint32_t e = script->getClosedVar(i);
#ifdef JS_GC_ZEAL
callobj.setVar(e, fp->slots()[e]);
#else
callobj.initVarUnchecked(e, fp->slots()[e]);
#endif
}
}
/*
* Update the args and vars for the active call if this is an outer
* function in a script nesting.
*/
types::TypeScriptNesting *nesting = script->nesting();
if (nesting && script->isOuterFunction) {
nesting->argArray = callobj.argArray();
nesting->varArray = callobj.varArray();
}
}
/* Clear private pointers to fp, which is about to go away. */
if (js_IsNamedLambda(fun)) {
JSObject &env = callobj.enclosingScope();
JS_ASSERT(env.asDeclEnv().maybeStackFrame() == fp);
env.setPrivate(NULL);
}
}
callobj.setStackFrame(NULL);
}
/* /*
* Construct a call object for the given bindings. If this is a call object * Construct a call object for the given bindings. If this is a call object
* for a function invocation, callee should be the function being called. * for a function invocation, callee should be the function being called.
@ -161,14 +70,18 @@ js_PutCallObject(StackFrame *fp, CallObject &callobj)
* must be null. * must be null.
*/ */
CallObject * CallObject *
CallObject::create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleObject callee) CallObject::create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleFunction callee)
{ {
RootedShape shape(cx); RootedShape shape(cx);
shape = script->bindings.callObjectShape(cx); shape = script->bindings.callObjectShape(cx);
if (shape == NULL) if (shape == NULL)
return NULL; return NULL;
gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots() + 1); gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots());
#ifdef JS_THREADSAFE
JS_ASSERT(CanBeFinalizedInBackground(kind, &CallClass));
kind = gc::GetBackgroundAllocKind(kind);
#endif
RootedTypeObject type(cx); RootedTypeObject type(cx);
type = cx->compartment->getEmptyType(cx); type = cx->compartment->getEmptyType(cx);
@ -194,21 +107,9 @@ CallObject::create(JSContext *cx, JSScript *script, HandleObject enclosing, Hand
return NULL; return NULL;
} }
#ifdef DEBUG
JS_ASSERT(!obj->inDictionaryMode());
for (Shape::Range r = obj->lastProperty(); !r.empty(); r.popFront()) {
const Shape &s = r.front();
if (s.hasSlot()) {
JS_ASSERT(s.slot() + 1 == obj->slotSpan());
break;
}
}
#endif
if (!obj->asScope().setEnclosingScope(cx, enclosing)) if (!obj->asScope().setEnclosingScope(cx, enclosing))
return NULL; return NULL;
JS_ASSERT_IF(callee, callee->isFunction());
obj->initFixedSlot(CALLEE_SLOT, ObjectOrNullValue(callee)); obj->initFixedSlot(CALLEE_SLOT, ObjectOrNullValue(callee));
/* /*
@ -227,7 +128,6 @@ CallObject *
CallObject::createForFunction(JSContext *cx, StackFrame *fp) CallObject::createForFunction(JSContext *cx, StackFrame *fp)
{ {
JS_ASSERT(fp->isNonEvalFunctionFrame()); JS_ASSERT(fp->isNonEvalFunctionFrame());
JS_ASSERT(!fp->hasCallObj());
RootedObject scopeChain(cx, fp->scopeChain()); RootedObject scopeChain(cx, fp->scopeChain());
@ -241,11 +141,24 @@ CallObject::createForFunction(JSContext *cx, StackFrame *fp)
return NULL; return NULL;
} }
CallObject *callobj = create(cx, fp->script(), scopeChain, RootedObject(cx, &fp->callee())); JSScript *script = fp->script();
CallObject *callobj = create(cx, script, scopeChain, RootedFunction(cx, &fp->callee()));
if (!callobj) if (!callobj)
return NULL; return NULL;
callobj->setStackFrame(fp); /* Copy in the closed-over formal arguments. */
if (script->bindingsAccessedDynamically) {
Value *formals = fp->formals();
for (unsigned slot = 0, n = fp->fun()->nargs; slot < n; ++slot)
callobj->setArg(slot, formals[slot]);
} else if (unsigned n = script->numClosedArgs()) {
Value *formals = fp->formals();
for (unsigned i = 0; i < n; ++i) {
uint32_t slot = script->getClosedArg(i);
callobj->setArg(slot, formals[slot]);
}
}
return callobj; return callobj;
} }
@ -261,45 +174,29 @@ CallObject::copyUnaliasedValues(StackFrame *fp)
/* Copy the unaliased formals. */ /* Copy the unaliased formals. */
for (unsigned i = 0; i < script->bindings.numArgs(); ++i) { for (unsigned i = 0; i < script->bindings.numArgs(); ++i) {
if (!script->formalLivesInCallObject(i)) if (!script->formalLivesInCallObject(i)) {
setArg(i, fp->formalArg(i)); if (script->argsObjAliasesFormals())
setArg(i, fp->argsObj().arg(i), DONT_CHECK_ALIASING);
else
setArg(i, fp->unaliasedFormal(i), DONT_CHECK_ALIASING);
}
} }
/* Copy the unaliased var/let bindings. */ /* Copy the unaliased var/let bindings. */
for (unsigned i = 0; i < script->bindings.numVars(); ++i) { for (unsigned i = 0; i < script->bindings.numVars(); ++i) {
if (!script->varIsAliased(i)) if (!script->varIsAliased(i))
setVar(i, fp->localSlot(i)); setVar(i, fp->unaliasedLocal(i), DONT_CHECK_ALIASING);
} }
} }
CallObject * CallObject *
CallObject::createForStrictEval(JSContext *cx, StackFrame *fp) CallObject::createForStrictEval(JSContext *cx, StackFrame *fp)
{ {
CallObject *callobj = create(cx, fp->script(), fp->scopeChain(), RootedObject(cx)); JS_ASSERT(fp->isStrictEvalFrame());
if (!callobj) JS_ASSERT(cx->fp() == fp);
return NULL; JS_ASSERT(cx->regs().pc == fp->script()->code);
callobj->setStackFrame(fp); return create(cx, fp->script(), fp->scopeChain(), RootedFunction(cx));
fp->initScopeChain(*callobj);
return callobj;
}
JSBool
CallObject::getArgOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
{
CallObject &callobj = obj->asCall();
JS_ASSERT((int16_t) JSID_TO_INT(id) == JSID_TO_INT(id));
unsigned i = (uint16_t) JSID_TO_INT(id);
DebugOnly<JSScript *> script = callobj.getCalleeFunction()->script();
JS_ASSERT(script->formalLivesInCallObject(i));
if (StackFrame *fp = callobj.maybeStackFrame())
*vp = fp->formalArg(i);
else
*vp = callobj.arg(i);
return true;
} }
JSBool JSBool
@ -313,36 +210,12 @@ CallObject::setArgOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict
JSScript *script = callobj.getCalleeFunction()->script(); JSScript *script = callobj.getCalleeFunction()->script();
JS_ASSERT(script->formalLivesInCallObject(i)); JS_ASSERT(script->formalLivesInCallObject(i));
if (StackFrame *fp = callobj.maybeStackFrame())
fp->formalArg(i) = *vp;
else
callobj.setArg(i, *vp); callobj.setArg(i, *vp);
if (!script->ensureHasTypes(cx)) if (!script->ensureHasTypes(cx))
return false; return false;
TypeScript::SetArgument(cx, script, i, *vp); TypeScript::SetArgument(cx, script, i, *vp);
return true;
}
JSBool
CallObject::getVarOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
{
CallObject &callobj = obj->asCall();
JS_ASSERT((int16_t) JSID_TO_INT(id) == JSID_TO_INT(id));
unsigned i = (uint16_t) JSID_TO_INT(id);
DebugOnly<JSScript *> script = callobj.getCalleeFunction()->script();
JS_ASSERT(script->varIsAliased(i));
if (StackFrame *fp = callobj.maybeStackFrame())
*vp = fp->varSlot(i);
else
*vp = callobj.var(i);
JS_ASSERT(!vp->isMagic(JS_OPTIMIZED_ARGUMENTS));
return true; return true;
} }
@ -357,9 +230,6 @@ CallObject::setVarOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict
JSScript *script = callobj.getCalleeFunction()->script(); JSScript *script = callobj.getCalleeFunction()->script();
JS_ASSERT(script->varIsAliased(i)); JS_ASSERT(script->varIsAliased(i));
if (StackFrame *fp = callobj.maybeStackFrame())
fp->varSlot(i) = *vp;
else
callobj.setVar(i, *vp); callobj.setVar(i, *vp);
if (!script->ensureHasTypes(cx)) if (!script->ensureHasTypes(cx))
@ -369,52 +239,16 @@ CallObject::setVarOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict
return true; return true;
} }
bool
CallObject::containsVarOrArg(PropertyName *name, Value *vp, JSContext *cx)
{
jsid id = NameToId(name);
const Shape *shape = nativeLookup(cx, id);
if (!shape)
return false;
PropertyOp op = shape->getterOp();
if (op != getVarOp && op != getArgOp)
return false;
JS_ALWAYS_TRUE(op(cx, RootedObject(cx, this), RootedId(cx, INT_TO_JSID(shape->shortid())), vp));
return true;
}
static void
call_trace(JSTracer *trc, JSObject *obj)
{
JS_ASSERT(obj->isCall());
/* Mark any generator frame, as for arguments objects. */
#if JS_HAS_GENERATORS
StackFrame *fp = (StackFrame *) obj->getPrivate();
if (fp && fp->isFloatingGenerator())
MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
#endif
}
JS_PUBLIC_DATA(Class) js::CallClass = { JS_PUBLIC_DATA(Class) js::CallClass = {
"Call", "Call",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_IS_ANONYMOUS | JSCLASS_IS_ANONYMOUS | JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS),
JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS),
JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */ JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */ JS_PropertyStub, /* getProperty */
JS_StrictPropertyStub, /* setProperty */ JS_StrictPropertyStub, /* setProperty */
JS_EnumerateStub, JS_EnumerateStub,
JS_ResolveStub, JS_ResolveStub,
NULL, /* convert: Leave it NULL so we notice if calls ever escape */ NULL /* convert: Leave it NULL so we notice if calls ever escape */
NULL, /* finalize */
NULL, /* checkAccess */
NULL, /* call */
NULL, /* construct */
NULL, /* hasInstance */
call_trace
}; };
Class js::DeclEnvClass = { Class js::DeclEnvClass = {
@ -449,7 +283,6 @@ DeclEnvObject::create(JSContext *cx, StackFrame *fp)
if (!obj) if (!obj)
return NULL; return NULL;
obj->setPrivate(fp);
if (!obj->asScope().setEnclosingScope(cx, fp->scopeChain())) if (!obj->asScope().setEnclosingScope(cx, fp->scopeChain()))
return NULL; return NULL;
@ -753,103 +586,35 @@ ClonedBlockObject::create(JSContext *cx, Handle<StaticBlockObject *> block, Stac
obj->setReservedSlot(SCOPE_CHAIN_SLOT, ObjectValue(*fp->scopeChain())); obj->setReservedSlot(SCOPE_CHAIN_SLOT, ObjectValue(*fp->scopeChain()));
obj->setReservedSlot(DEPTH_SLOT, PrivateUint32Value(block->stackDepth())); obj->setReservedSlot(DEPTH_SLOT, PrivateUint32Value(block->stackDepth()));
obj->setPrivate(js_FloatingFrameIfGenerator(cx, fp));
if (obj->lastProperty()->extensibleParents() && !obj->generateOwnShape(cx)) if (obj->lastProperty()->extensibleParents() && !obj->generateOwnShape(cx))
return NULL; return NULL;
/*
* Copy in the closed-over locals. Closed-over locals don't need
* any fixup since the initial value is 'undefined'.
*/
Value *src = fp->base() + block->stackDepth();
unsigned nslots = block->slotCount();
for (unsigned i = 0; i < nslots; ++i, ++src) {
if (block->isAliased(i))
obj->asClonedBlock().setVar(i, *src);
}
return &obj->asClonedBlock(); return &obj->asClonedBlock();
} }
void
ClonedBlockObject::put(StackFrame *fp)
{
uint32_t count = slotCount();
uint32_t depth = stackDepth();
/* See comments in CheckDestructuring in frontend/Parser.cpp. */
JS_ASSERT(count >= 1);
copySlotRange(RESERVED_SLOTS, fp->base() + depth, count);
/* We must clear the private slot even with errors. */
setPrivate(NULL);
}
void void
ClonedBlockObject::copyUnaliasedValues(StackFrame *fp) ClonedBlockObject::copyUnaliasedValues(StackFrame *fp)
{ {
StaticBlockObject &block = staticBlock(); StaticBlockObject &block = staticBlock();
unsigned base = fp->script()->nfixed + stackDepth(); unsigned base = block.slotToFrameLocal(fp->script(), 0);
for (unsigned i = 0; i < slotCount(); ++i) { for (unsigned i = 0; i < slotCount(); ++i) {
if (!block.isAliased(i)) if (!block.isAliased(i))
setVar(i, fp->localSlot(base + i)); setVar(i, fp->unaliasedLocal(base + i), DONT_CHECK_ALIASING);
} }
} }
static JSBool
block_getProperty(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
{
/*
* Block objects are never exposed to script, and the engine handles them
* with care. So unlike other getters, this one can assert (rather than
* check) certain invariants about obj.
*/
ClonedBlockObject &block = obj->asClonedBlock();
unsigned index = (unsigned) JSID_TO_INT(id);
JS_ASSERT_IF(!block.compartment()->debugMode(), block.staticBlock().isAliased(index));
if (StackFrame *fp = block.maybeStackFrame()) {
fp = js_LiveFrameIfGenerator(fp);
index += fp->numFixed() + block.stackDepth();
JS_ASSERT(index < fp->numSlots());
*vp = fp->slots()[index];
return true;
}
/* Values are in slots immediately following the class-reserved ones. */
JS_ASSERT(block.var(index) == *vp);
return true;
}
static JSBool
block_setProperty(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp)
{
ClonedBlockObject &block = obj->asClonedBlock();
unsigned index = (unsigned) JSID_TO_INT(id);
JS_ASSERT_IF(!block.compartment()->debugMode(), block.staticBlock().isAliased(index));
if (StackFrame *fp = block.maybeStackFrame()) {
fp = js_LiveFrameIfGenerator(fp);
index += fp->numFixed() + block.stackDepth();
JS_ASSERT(index < fp->numSlots());
fp->slots()[index] = *vp;
return true;
}
/*
* The value in *vp will be written back to the slot in obj that was
* allocated when this let binding was defined.
*/
return true;
}
bool
ClonedBlockObject::containsVar(PropertyName *name, Value *vp, JSContext *cx)
{
RootedObject self(cx, this);
const Shape *shape = nativeLookup(cx, NameToId(name));
if (!shape)
return false;
JS_ASSERT(shape->getterOp() == block_getProperty);
JS_ALWAYS_TRUE(block_getProperty(cx, self, RootedId(cx, INT_TO_JSID(shape->shortid())), vp));
return true;
}
StaticBlockObject * StaticBlockObject *
StaticBlockObject::create(JSContext *cx) StaticBlockObject::create(JSContext *cx)
{ {
@ -867,7 +632,6 @@ StaticBlockObject::create(JSContext *cx)
if (!obj) if (!obj)
return NULL; return NULL;
obj->setPrivate(NULL);
return &obj->asStaticBlock(); return &obj->asStaticBlock();
} }
@ -890,29 +654,15 @@ StaticBlockObject::addVar(JSContext *cx, jsid id, int index, bool *redeclared)
* block's shape later. * block's shape later.
*/ */
uint32_t slot = JSSLOT_FREE(&BlockClass) + index; uint32_t slot = JSSLOT_FREE(&BlockClass) + index;
return addPropertyInternal(cx, id, block_getProperty, block_setProperty, return addPropertyInternal(cx, id, /* getter = */ NULL, /* setter = */ NULL,
slot, JSPROP_ENUMERATE | JSPROP_PERMANENT, slot, JSPROP_ENUMERATE | JSPROP_PERMANENT,
Shape::HAS_SHORTID, index, spp, Shape::HAS_SHORTID, index, spp,
/* allowDictionary = */ false); /* allowDictionary = */ false);
} }
static void
block_trace(JSTracer *trc, JSObject *obj)
{
if (obj->isStaticBlock())
return;
/* XXX: this will be removed again with bug 659577. */
#if JS_HAS_GENERATORS
StackFrame *fp = obj->asClonedBlock().maybeStackFrame();
if (fp && fp->isFloatingGenerator())
MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
#endif
}
Class js::BlockClass = { Class js::BlockClass = {
"Block", "Block",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(BlockObject::RESERVED_SLOTS) | JSCLASS_HAS_RESERVED_SLOTS(BlockObject::RESERVED_SLOTS) |
JSCLASS_IS_ANONYMOUS, JSCLASS_IS_ANONYMOUS,
JS_PropertyStub, /* addProperty */ JS_PropertyStub, /* addProperty */
@ -921,13 +671,7 @@ Class js::BlockClass = {
JS_StrictPropertyStub, /* setProperty */ JS_StrictPropertyStub, /* setProperty */
JS_EnumerateStub, JS_EnumerateStub,
JS_ResolveStub, JS_ResolveStub,
JS_ConvertStub, JS_ConvertStub
NULL, /* finalize */
NULL, /* checkAccess */
NULL, /* call */
NULL, /* construct */
NULL, /* hasInstance */
block_trace
}; };
#define NO_PARENT_INDEX UINT32_MAX #define NO_PARENT_INDEX UINT32_MAX
@ -1044,7 +788,7 @@ js::XDRStaticBlockObject(XDRState<mode> *xdr, JSScript *script, StaticBlockObjec
*/ */
for (unsigned i = 0; i < count; i++) { for (unsigned i = 0; i < count; i++) {
const Shape *shape = shapes[i]; const Shape *shape = shapes[i];
JS_ASSERT(shape->getter() == block_getProperty); JS_ASSERT(shape->hasDefaultGetter());
JS_ASSERT(unsigned(shape->shortid()) == i); JS_ASSERT(unsigned(shape->shortid()) == i);
jsid propid = shape->propid(); jsid propid = shape->propid();
@ -1263,6 +1007,9 @@ ScopeIter::settle()
} else if (fp_->isNonEvalFunctionFrame() && !fp_->hasCallObj()) { } else if (fp_->isNonEvalFunctionFrame() && !fp_->hasCallObj()) {
JS_ASSERT(cur_ == fp_->fun()->environment()); JS_ASSERT(cur_ == fp_->fun()->environment());
fp_ = NULL; fp_ = NULL;
} else if (fp_->isStrictEvalFrame() && !fp_->hasCallObj()) {
JS_ASSERT(cur_ == fp_->prev()->scopeChain());
fp_ = NULL;
} else if (cur_->isWith()) { } else if (cur_->isWith()) {
JS_ASSERT_IF(fp_->isFunctionFrame(), fp_->fun()->isHeavyweight()); JS_ASSERT_IF(fp_->isFunctionFrame(), fp_->fun()->isHeavyweight());
JS_ASSERT_IF(block_, block_->needsClone()); JS_ASSERT_IF(block_, block_->needsClone());
@ -1359,14 +1106,14 @@ class DebugScopeProxy : public BaseProxyHandler
if (maybefp) { if (maybefp) {
if (action == GET) if (action == GET)
*vp = maybefp->varSlot(i); *vp = maybefp->unaliasedVar(i);
else else
maybefp->varSlot(i) = *vp; maybefp->unaliasedVar(i) = *vp;
} else { } else {
if (action == GET) if (action == GET)
*vp = callobj.var(i); *vp = callobj.var(i, DONT_CHECK_ALIASING);
else else
callobj.setVar(i, *vp); callobj.setVar(i, *vp, DONT_CHECK_ALIASING);
} }
if (action == SET) if (action == SET)
@ -1381,15 +1128,22 @@ class DebugScopeProxy : public BaseProxyHandler
return false; return false;
if (maybefp) { if (maybefp) {
if (script->argsObjAliasesFormals()) {
if (action == GET) if (action == GET)
*vp = maybefp->formalArg(i); *vp = maybefp->argsObj().arg(i);
else else
maybefp->formalArg(i) = *vp; maybefp->argsObj().setArg(i, *vp);
} else { } else {
if (action == GET) if (action == GET)
*vp = callobj.arg(i); *vp = maybefp->unaliasedFormal(i);
else else
callobj.setArg(i, *vp); maybefp->unaliasedFormal(i) = *vp;
}
} else {
if (action == GET)
*vp = callobj.arg(i, DONT_CHECK_ALIASING);
else
callobj.setArg(i, *vp, DONT_CHECK_ALIASING);
} }
if (action == SET) if (action == SET)
@ -1409,17 +1163,17 @@ class DebugScopeProxy : public BaseProxyHandler
if (maybefp) { if (maybefp) {
JSScript *script = maybefp->script(); JSScript *script = maybefp->script();
unsigned local = i + script->nfixed + block.stackDepth(); unsigned local = block.slotToFrameLocal(maybefp->script(), i);
if (action == GET) if (action == GET)
*vp = maybefp->localSlot(local); *vp = maybefp->unaliasedLocal(local);
else else
maybefp->localSlot(local) = *vp; maybefp->unaliasedLocal(local) = *vp;
JS_ASSERT(analyze::LocalSlot(script, local) >= analyze::TotalSlots(script)); JS_ASSERT(analyze::LocalSlot(script, local) >= analyze::TotalSlots(script));
} else { } else {
if (action == GET) if (action == GET)
*vp = block.var(i); *vp = block.var(i, DONT_CHECK_ALIASING);
else else
block.setVar(i, *vp); block.setVar(i, *vp, DONT_CHECK_ALIASING);
} }
return true; return true;
@ -1693,7 +1447,7 @@ DebugScopes::mark(JSTracer *trc)
} }
void void
DebugScopes::sweep() DebugScopes::sweep(JSRuntime *rt)
{ {
/* /*
* Note: missingScopes points to debug scopes weakly not just so that debug * Note: missingScopes points to debug scopes weakly not just so that debug
@ -1705,16 +1459,36 @@ DebugScopes::sweep()
e.removeFront(); e.removeFront();
} }
/*
* Scopes can be finalized when a suspended generator becomes garbage or
* when a debugger-synthesized ScopeObject is no longer rooted by its
* DebugScopeObject.
*/
for (LiveScopeMap::Enum e(liveScopes); !e.empty(); e.popFront()) { for (LiveScopeMap::Enum e(liveScopes); !e.empty(); e.popFront()) {
ScopeObject &scope = *e.front().key; ScopeObject *scope = e.front().key;
if (JS_IsAboutToBeFinalized(&scope)) { StackFrame *fp = e.front().value;
JS_ASSERT(!scope.maybeStackFrame() || scope.maybeStackFrame()->isGeneratorFrame());
/*
* Scopes can be finalized when a debugger-synthesized ScopeObject is
* no longer reachable via its DebugScopeObject.
*/
if (JS_IsAboutToBeFinalized(scope)) {
e.removeFront(); e.removeFront();
continue;
}
/*
* As explained in onGeneratorFrameChange, liveScopes includes
* suspended generator frames. Since a generator can be finalized while
* its scope is live, we must explicitly detect finalized generators.
* Since the scope is still live, we simulate the onPop* call by
* copying unaliased variables into the scope object.
*/
if (JSGenerator *gen = fp->maybeSuspendedGenerator(rt)) {
JS_ASSERT(gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN);
if (!IsMarked(&gen->obj)) {
if (scope->isCall())
scope->asCall().copyUnaliasedValues(fp);
else if (scope->isBlock())
scope->asClonedBlock().copyUnaliasedValues(fp);
e.removeFront();
continue;
}
} }
} }
} }
@ -1791,14 +1565,11 @@ DebugScopes::addDebugScope(JSContext *cx, ScopeIter si, DebugScopeObject &debugS
void void
DebugScopes::onPopCall(StackFrame *fp) DebugScopes::onPopCall(StackFrame *fp)
{ {
if (fp->isYielding()) JS_ASSERT(!fp->isYielding());
return;
if (fp->fun()->isHeavyweight()) { if (fp->fun()->isHeavyweight()) {
/* /*
* When a frame finishes executing in mjit code, the epilogue is called * The StackFrame may be observed before the prologue has created the
* once from the return and once when the frame is popped. * CallObject. See ScopeIter::settle.
* TODO: bug 659577 will remove this (with HAS_CALL_OBJ).
*/ */
if (fp->hasCallObj()) { if (fp->hasCallObj()) {
CallObject &callobj = fp->scopeChain()->asCall(); CallObject &callobj = fp->scopeChain()->asCall();
@ -1806,7 +1577,6 @@ DebugScopes::onPopCall(StackFrame *fp)
liveScopes.remove(&callobj); liveScopes.remove(&callobj);
} }
} else { } else {
JS_ASSERT(!fp->hasCallObj());
if (MissingScopeMap::Ptr p = missingScopes.lookup(ScopeIter(fp))) { if (MissingScopeMap::Ptr p = missingScopes.lookup(ScopeIter(fp))) {
CallObject &callobj = p->value->scope().asCall(); CallObject &callobj = p->value->scope().asCall();
callobj.copyUnaliasedValues(fp); callobj.copyUnaliasedValues(fp);
@ -1825,8 +1595,6 @@ DebugScopes::onPopBlock(JSContext *cx, StackFrame *fp)
clone.copyUnaliasedValues(fp); clone.copyUnaliasedValues(fp);
liveScopes.remove(&clone); liveScopes.remove(&clone);
} else { } else {
JS_ASSERT(!fp->scopeChain()->isBlock() ||
fp->scopeChain()->asClonedBlock().staticBlock() != staticBlock);
if (MissingScopeMap::Ptr p = missingScopes.lookup(ScopeIter(fp))) { if (MissingScopeMap::Ptr p = missingScopes.lookup(ScopeIter(fp))) {
ClonedBlockObject &clone = p->value->scope().asClonedBlock(); ClonedBlockObject &clone = p->value->scope().asClonedBlock();
clone.copyUnaliasedValues(fp); clone.copyUnaliasedValues(fp);
@ -1845,6 +1613,11 @@ DebugScopes::onPopWith(StackFrame *fp)
void void
DebugScopes::onPopStrictEvalScope(StackFrame *fp) DebugScopes::onPopStrictEvalScope(StackFrame *fp)
{ {
/*
* The StackFrame may be observed before the prologue has created the
* CallObject. See ScopeIter::settle.
*/
if (fp->hasCallObj())
liveScopes.remove(&fp->scopeChain()->asCall()); liveScopes.remove(&fp->scopeChain()->asCall());
} }
@ -1993,13 +1766,11 @@ GetDebugScopeForMissing(JSContext *cx, ScopeIter si)
if (callobj->enclosingScope().isDeclEnv()) { if (callobj->enclosingScope().isDeclEnv()) {
JS_ASSERT(CallObjectLambdaName(callobj->getCalleeFunction())); JS_ASSERT(CallObjectLambdaName(callobj->getCalleeFunction()));
DeclEnvObject &declenv = callobj->enclosingScope().asDeclEnv(); DeclEnvObject &declenv = callobj->enclosingScope().asDeclEnv();
declenv.setStackFrame(NULL);
enclosingDebug = DebugScopeObject::create(cx, declenv, *enclosingDebug); enclosingDebug = DebugScopeObject::create(cx, declenv, *enclosingDebug);
if (!enclosingDebug) if (!enclosingDebug)
return NULL; return NULL;
} }
callobj->setStackFrame(NULL);
debugScope = DebugScopeObject::create(cx, *callobj, *enclosingDebug); debugScope = DebugScopeObject::create(cx, *callobj, *enclosingDebug);
break; break;
} }
@ -2009,7 +1780,6 @@ GetDebugScopeForMissing(JSContext *cx, ScopeIter si)
if (!block) if (!block)
return NULL; return NULL;
block->setStackFrame(NULL);
debugScope = DebugScopeObject::create(cx, *block, *enclosingDebug); debugScope = DebugScopeObject::create(cx, *block, *enclosingDebug);
break; break;
} }

Просмотреть файл

@ -9,7 +9,6 @@
#define ScopeObject_h___ #define ScopeObject_h___
#include "jscntxt.h" #include "jscntxt.h"
#include "jsiter.h"
#include "jsobj.h" #include "jsobj.h"
#include "jsweakmap.h" #include "jsweakmap.h"
@ -29,10 +28,7 @@ namespace js {
struct ScopeCoordinate struct ScopeCoordinate
{ {
uint16_t hops; uint16_t hops;
uint16_t binding; uint16_t slot;
/* XXX this will be removed with the last patch of bug 659577. */
uint16_t frameBinding;
inline ScopeCoordinate(jsbytecode *pc); inline ScopeCoordinate(jsbytecode *pc);
inline ScopeCoordinate() {} inline ScopeCoordinate() {}
@ -44,7 +40,7 @@ ScopeCoordinateBlockChain(JSScript *script, jsbytecode *pc);
/* Return the name being accessed by the given ALIASEDVAR op. */ /* Return the name being accessed by the given ALIASEDVAR op. */
extern PropertyName * extern PropertyName *
ScopeCoordinateName(JSScript *script, jsbytecode *pc); ScopeCoordinateName(JSRuntime *rt, JSScript *script, jsbytecode *pc);
/*****************************************************************************/ /*****************************************************************************/
@ -88,13 +84,13 @@ ScopeCoordinateName(JSScript *script, jsbytecode *pc);
class ScopeObject : public JSObject class ScopeObject : public JSObject
{ {
/* Use maybeStackFrame() instead. */
void *getPrivate() const;
protected: protected:
static const uint32_t SCOPE_CHAIN_SLOT = 0; static const uint32_t SCOPE_CHAIN_SLOT = 0;
public: public:
/* Number of reserved slots for both CallObject and BlockObject. */
static const uint32_t CALL_BLOCK_RESERVED_SLOTS = 2;
/* /*
* Since every scope chain terminates with a global object and GlobalObject * Since every scope chain terminates with a global object and GlobalObject
* does not derive ScopeObject (it has a completely different layout), the * does not derive ScopeObject (it has a completely different layout), the
@ -112,14 +108,6 @@ class ScopeObject : public JSObject
inline const Value &aliasedVar(ScopeCoordinate sc); inline const Value &aliasedVar(ScopeCoordinate sc);
inline void setAliasedVar(ScopeCoordinate sc, const Value &v); inline void setAliasedVar(ScopeCoordinate sc, const Value &v);
/*
* The stack frame for this scope object, if the frame is still active.
* Note: these members may not be called for a StaticBlockObject or
* WithObject.
*/
inline StackFrame *maybeStackFrame() const;
inline void setStackFrame(StackFrame *frame);
/* For jit access. */ /* For jit access. */
static inline size_t offsetOfEnclosingScope(); static inline size_t offsetOfEnclosingScope();
}; };
@ -129,10 +117,10 @@ class CallObject : public ScopeObject
static const uint32_t CALLEE_SLOT = 1; static const uint32_t CALLEE_SLOT = 1;
static CallObject * static CallObject *
create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleObject callee); create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleFunction callee);
public: public:
static const uint32_t RESERVED_SLOTS = 3; static const uint32_t RESERVED_SLOTS = CALL_BLOCK_RESERVED_SLOTS;
static CallObject *createForFunction(JSContext *cx, StackFrame *fp); static CallObject *createForFunction(JSContext *cx, StackFrame *fp);
static CallObject *createForStrictEval(JSContext *cx, StackFrame *fp); static CallObject *createForStrictEval(JSContext *cx, StackFrame *fp);
@ -149,14 +137,12 @@ class CallObject : public ScopeObject
inline void setCallee(JSObject *callee); inline void setCallee(JSObject *callee);
/* Returns the formal argument at the given index. */ /* Returns the formal argument at the given index. */
inline const Value &arg(unsigned i) const; inline const Value &arg(unsigned i, MaybeCheckAliasing = CHECK_ALIASING) const;
inline void setArg(unsigned i, const Value &v); inline void setArg(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
inline void initArgUnchecked(unsigned i, const Value &v);
/* Returns the variable at the given index. */ /* Returns the variable at the given index. */
inline const Value &var(unsigned i) const; inline const Value &var(unsigned i, MaybeCheckAliasing = CHECK_ALIASING) const;
inline void setVar(unsigned i, const Value &v); inline void setVar(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
inline void initVarUnchecked(unsigned i, const Value &v);
/* /*
* Get the actual arrays of arguments and variables. Only call if type * Get the actual arrays of arguments and variables. Only call if type
@ -166,16 +152,9 @@ class CallObject : public ScopeObject
inline HeapSlotArray argArray(); inline HeapSlotArray argArray();
inline HeapSlotArray varArray(); inline HeapSlotArray varArray();
inline void copyValues(unsigned nargs, Value *argv, unsigned nvars, Value *slots);
static JSBool getArgOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp);
static JSBool getVarOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp);
static JSBool setArgOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp); static JSBool setArgOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp);
static JSBool setVarOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp); static JSBool setVarOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp);
/* Return whether this environment contains 'name' and, if so, its value. */
bool containsVarOrArg(PropertyName *name, Value *vp, JSContext *cx);
/* Copy in all the unaliased formals and locals. */ /* Copy in all the unaliased formals and locals. */
void copyUnaliasedValues(StackFrame *fp); void copyUnaliasedValues(StackFrame *fp);
}; };
@ -202,10 +181,6 @@ class NestedScopeObject : public ScopeObject
class WithObject : public NestedScopeObject class WithObject : public NestedScopeObject
{ {
/* These ScopeObject operations are not valid on a with object. */
js::StackFrame *maybeStackFrame() const;
void setStackFrame(StackFrame *frame);
static const unsigned THIS_SLOT = 2; static const unsigned THIS_SLOT = 2;
/* Use WithObject::object() instead. */ /* Use WithObject::object() instead. */
@ -213,7 +188,11 @@ class WithObject : public NestedScopeObject
public: public:
static const unsigned RESERVED_SLOTS = 3; static const unsigned RESERVED_SLOTS = 3;
#ifdef JS_THREADSAFE
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
#else
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4; static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
#endif
static WithObject * static WithObject *
create(JSContext *cx, HandleObject proto, HandleObject enclosing, uint32_t depth); create(JSContext *cx, HandleObject proto, HandleObject enclosing, uint32_t depth);
@ -228,12 +207,23 @@ class WithObject : public NestedScopeObject
class BlockObject : public NestedScopeObject class BlockObject : public NestedScopeObject
{ {
public: public:
static const unsigned RESERVED_SLOTS = 2; static const unsigned RESERVED_SLOTS = CALL_BLOCK_RESERVED_SLOTS;
#ifdef JS_THREADSAFE
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
#else
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4; static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
#endif
/* Return the number of variables associated with this block. */ /* Return the number of variables associated with this block. */
inline uint32_t slotCount() const; inline uint32_t slotCount() const;
/*
* Return the local corresponding to the ith binding where i is in the
* range [0, slotCount()) and the return local index is in the range
* [script->nfixed, script->nfixed + script->nslots).
*/
unsigned slotToFrameLocal(JSScript *script, unsigned i);
protected: protected:
/* Blocks contain an object slot for each slot i: 0 <= i < slotCount. */ /* Blocks contain an object slot for each slot i: 0 <= i < slotCount. */
inline const Value &slotValue(unsigned i); inline const Value &slotValue(unsigned i);
@ -242,10 +232,6 @@ class BlockObject : public NestedScopeObject
class StaticBlockObject : public BlockObject class StaticBlockObject : public BlockObject
{ {
/* These ScopeObject operations are not valid on a static block object. */
StackFrame *maybeStackFrame() const;
void setStackFrame(StackFrame *frame);
public: public:
static StaticBlockObject *create(JSContext *cx); static StaticBlockObject *create(JSContext *cx);
@ -273,7 +259,7 @@ class StaticBlockObject : public BlockObject
* A static block object is cloned (when entering the block) iff some * A static block object is cloned (when entering the block) iff some
* variable of the block isAliased. * variable of the block isAliased.
*/ */
bool needsClone() const; bool needsClone();
const Shape *addVar(JSContext *cx, jsid id, int index, bool *redeclared); const Shape *addVar(JSContext *cx, jsid id, int index, bool *redeclared);
}; };
@ -287,18 +273,9 @@ class ClonedBlockObject : public BlockObject
/* The static block from which this block was cloned. */ /* The static block from which this block was cloned. */
StaticBlockObject &staticBlock() const; StaticBlockObject &staticBlock() const;
/*
* When this block's stack slots are about to be popped, 'put' must be
* called to copy the slot values into this block's object slots.
*/
void put(StackFrame *fp);
/* Assuming 'put' has been called, return the value of the ith let var. */ /* Assuming 'put' has been called, return the value of the ith let var. */
const Value &var(unsigned i); const Value &var(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
void setVar(unsigned i, const Value &v); void setVar(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
/* Return whether this environment contains 'name' and, if so, its value. */
bool containsVar(PropertyName *name, Value *vp, JSContext *cx);
/* Copy in all the unaliased formals and locals. */ /* Copy in all the unaliased formals and locals. */
void copyUnaliasedValues(StackFrame *fp); void copyUnaliasedValues(StackFrame *fp);
@ -470,7 +447,7 @@ class DebugScopes
bool init(); bool init();
void mark(JSTracer *trc); void mark(JSTracer *trc);
void sweep(); void sweep(JSRuntime *rt);
DebugScopeObject *hasDebugScope(JSContext *cx, ScopeObject &scope) const; DebugScopeObject *hasDebugScope(JSContext *cx, ScopeObject &scope) const;
bool addDebugScope(JSContext *cx, ScopeObject &scope, DebugScopeObject &debugScope); bool addDebugScope(JSContext *cx, ScopeObject &scope, DebugScopeObject &debugScope);

Просмотреть файл

@ -92,7 +92,7 @@ StackFrame::initPrev(JSContext *cx)
prev_ = NULL; prev_ = NULL;
#ifdef DEBUG #ifdef DEBUG
prevpc_ = (jsbytecode *)0xbadc; prevpc_ = (jsbytecode *)0xbadc;
prevInline_ = (JSInlinedSite *)0xbadc; prevInline_ = (InlinedSite *)0xbadc;
#endif #endif
} }
} }
@ -147,9 +147,8 @@ StackFrame::initCallFrame(JSContext *cx, JSFunction &callee,
JS_ASSERT(!hasBlockChain()); JS_ASSERT(!hasBlockChain());
JS_ASSERT(!hasHookData()); JS_ASSERT(!hasHookData());
JS_ASSERT(annotation() == NULL); JS_ASSERT(annotation() == NULL);
JS_ASSERT(!hasCallObj());
SetValueRangeToUndefined(slots(), script->nfixed); initVarsToUndefined();
} }
/* /*
@ -171,85 +170,137 @@ StackFrame::initFixupFrame(StackFrame *prev, StackFrame::Flags flags, void *ncod
u.nactual = nactual; u.nactual = nactual;
} }
inline bool
StackFrame::jitHeavyweightFunctionPrologue(JSContext *cx)
{
JS_ASSERT(isNonEvalFunctionFrame());
JS_ASSERT(fun()->isHeavyweight());
CallObject *callobj = CallObject::createForFunction(cx, this);
if (!callobj)
return false;
pushOnScopeChain(*callobj);
flags_ |= HAS_CALL_OBJ;
if (script()->nesting()) {
types::NestingPrologue(cx, this);
flags_ |= HAS_NESTING;
}
return true;
}
inline void
StackFrame::jitTypeNestingPrologue(JSContext *cx)
{
types::NestingPrologue(cx, this);
flags_ |= HAS_NESTING;
}
inline void
StackFrame::initVarsToUndefined()
{
SetValueRangeToUndefined(slots(), script()->nfixed);
}
inline JSObject * inline JSObject *
StackFrame::createRestParameter(JSContext *cx) StackFrame::createRestParameter(JSContext *cx)
{ {
JS_ASSERT(fun()->hasRest()); JS_ASSERT(fun()->hasRest());
unsigned nformal = fun()->nargs - 1, nactual = numActualArgs(); unsigned nformal = fun()->nargs - 1, nactual = numActualArgs();
unsigned nrest = (nactual > nformal) ? nactual - nformal : 0; unsigned nrest = (nactual > nformal) ? nactual - nformal : 0;
return NewDenseCopiedArray(cx, nrest, actualArgs() + nformal); return NewDenseCopiedArray(cx, nrest, actuals() + nformal);
} }
inline Value & inline Value &
StackFrame::canonicalActualArg(unsigned i) const StackFrame::unaliasedVar(unsigned i, MaybeCheckAliasing checkAliasing)
{ {
if (i < numFormalArgs()) JS_ASSERT_IF(checkAliasing, !script()->varIsAliased(i));
return formalArg(i); JS_ASSERT(i < script()->nfixed);
JS_ASSERT(i < numActualArgs()); return slots()[i];
return actualArgs()[i];
} }
template <class Op> inline Value &
inline bool StackFrame::unaliasedLocal(unsigned i, MaybeCheckAliasing checkAliasing)
StackFrame::forEachCanonicalActualArg(Op op, unsigned start /* = 0 */, unsigned count /* = unsigned(-1) */)
{ {
unsigned nformal = fun()->nargs; #ifdef DEBUG
JS_ASSERT(start <= nformal); if (checkAliasing) {
JS_ASSERT(i < script()->nslots);
Value *formals = formalArgsEnd() - nformal; if (i < script()->nfixed) {
unsigned nactual = numActualArgs(); JS_ASSERT(!script()->varIsAliased(i));
if (count == unsigned(-1))
count = nactual - start;
unsigned end = start + count;
JS_ASSERT(end >= start);
JS_ASSERT(end <= nactual);
if (end <= nformal) {
Value *p = formals + start;
for (; start < end; ++p, ++start) {
if (!op(start, p))
return false;
}
} else { } else {
for (Value *p = formals + start; start < nformal; ++p, ++start) { unsigned depth = i - script()->nfixed;
if (!op(start, p)) for (StaticBlockObject *b = maybeBlockChain(); b; b = b->enclosingBlock()) {
return false; if (b->containsVarAtDepth(depth)) {
} JS_ASSERT(!b->isAliased(depth - b->stackDepth()));
JS_ASSERT(start >= nformal); break;
Value *actuals = formals - (nactual + 2) + start;
for (Value *p = actuals; start < end; ++p, ++start) {
if (!op(start, p))
return false;
} }
} }
return true; }
}
#endif
return slots()[i];
}
inline Value &
StackFrame::unaliasedFormal(unsigned i, MaybeCheckAliasing checkAliasing)
{
JS_ASSERT(i < numFormalArgs());
JS_ASSERT_IF(checkAliasing, !script()->formalIsAliased(i));
return formals()[i];
}
inline Value &
StackFrame::unaliasedActual(unsigned i)
{
JS_ASSERT(i < numActualArgs());
JS_ASSERT(!script()->formalIsAliased(i));
return i < numFormalArgs() ? formals()[i] : actuals()[i];
} }
template <class Op> template <class Op>
inline bool inline void
StackFrame::forEachFormalArg(Op op) StackFrame::forEachUnaliasedActual(Op op)
{ {
Value *formals = formalArgsEnd() - fun()->nargs; JS_ASSERT(script()->numClosedArgs() == 0);
Value *formalsEnd = formalArgsEnd(); JS_ASSERT(!script()->needsArgsObj());
unsigned i = 0;
for (Value *p = formals; p != formalsEnd; ++p, ++i) { unsigned nformal = numFormalArgs();
if (!op(i, p)) unsigned nactual = numActualArgs();
return false;
const Value *formalsEnd = (const Value *)this;
const Value *formals = formalsEnd - nformal;
if (nactual <= nformal) {
const Value *actualsEnd = formals + nactual;
for (const Value *p = formals; p < actualsEnd; ++p)
op(*p);
} else {
for (const Value *p = formals; p < formalsEnd; ++p)
op(*p);
const Value *actualsEnd = formals - 2;
const Value *actuals = actualsEnd - nactual;
for (const Value *p = actuals + nformal; p < actualsEnd; ++p)
op(*p);
} }
return true;
} }
struct CopyTo struct CopyTo
{ {
Value *dst; Value *dst;
CopyTo(Value *dst) : dst(dst) {} CopyTo(Value *dst) : dst(dst) {}
bool operator()(unsigned, Value *src) { void operator()(const Value &src) { *dst++ = src; }
*dst++ = *src;
return true;
}
}; };
inline unsigned
StackFrame::numFormalArgs() const
{
JS_ASSERT(hasArgs());
return fun()->nargs;
}
inline unsigned inline unsigned
StackFrame::numActualArgs() const StackFrame::numActualArgs() const
{ {
@ -267,23 +318,20 @@ StackFrame::numActualArgs() const
return numFormalArgs(); return numFormalArgs();
} }
inline Value * inline ArgumentsObject &
StackFrame::actualArgs() const StackFrame::argsObj() const
{ {
JS_ASSERT(hasArgs()); JS_ASSERT(script()->needsArgsObj());
Value *argv = formalArgs(); JS_ASSERT(flags_ & HAS_ARGS_OBJ);
if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS)) return *argsObj_;
return argv - (2 + u.nactual);
return argv;
} }
inline Value * inline void
StackFrame::actualArgsEnd() const StackFrame::initArgsObj(ArgumentsObject &argsobj)
{ {
JS_ASSERT(hasArgs()); JS_ASSERT(script()->needsArgsObj());
if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS)) flags_ |= HAS_ARGS_OBJ;
return formalArgs() - 2; argsObj_ = &argsobj;
return formalArgs() + numActualArgs();
} }
inline ScopeObject & inline ScopeObject &
@ -292,54 +340,29 @@ StackFrame::aliasedVarScope(ScopeCoordinate sc) const
JSObject *scope = &scopeChain()->asScope(); JSObject *scope = &scopeChain()->asScope();
for (unsigned i = sc.hops; i; i--) for (unsigned i = sc.hops; i; i--)
scope = &scope->asScope().enclosingScope(); scope = &scope->asScope().enclosingScope();
#ifdef DEBUG
if (scope->isCall()) {
JS_ASSERT(scope->asCall() == callObj());
JS_ASSERT(scope->asCall().maybeStackFrame() == this);
} else {
StaticBlockObject &target = scope->asClonedBlock().staticBlock();
StaticBlockObject *b = &blockChain();
while (b != &target)
b = b->enclosingBlock();
}
#endif
return scope->asScope(); return scope->asScope();
} }
inline void inline void
StackFrame::setScopeChain(JSObject &obj) StackFrame::pushOnScopeChain(ScopeObject &scope)
{ {
#ifdef DEBUG JS_ASSERT(*scopeChain() == scope.enclosingScope() ||
JS_ASSERT(&obj != NULL); *scopeChain() == scope.asCall().enclosingScope().asDeclEnv().enclosingScope());
if (hasCallObj()) { scopeChain_ = &scope;
JSObject *pobj = &obj;
while (pobj && !pobj->isWith() && pobj->asScope().maybeStackFrame() != this)
pobj = pobj->enclosingScope();
JS_ASSERT(pobj);
} else {
for (JSObject *pobj = &obj; pobj->isScope() && !pobj->isWith(); pobj = pobj->enclosingScope())
JS_ASSERT_IF(pobj->isCall(), pobj->asScope().maybeStackFrame() != this);
}
#endif
scopeChain_ = &obj;
flags_ |= HAS_SCOPECHAIN; flags_ |= HAS_SCOPECHAIN;
} }
inline void inline void
StackFrame::initScopeChain(CallObject &obj) StackFrame::popOffScopeChain()
{ {
JS_ASSERT(&obj != NULL); JS_ASSERT(flags_ & HAS_SCOPECHAIN);
JS_ASSERT(!hasCallObj() && obj.maybeStackFrame() == this); scopeChain_ = &scopeChain_->asScope().enclosingScope();
scopeChain_ = &obj;
flags_ |= HAS_SCOPECHAIN | HAS_CALL_OBJ;
} }
inline CallObject & inline CallObject &
StackFrame::callObj() const StackFrame::callObj() const
{ {
JS_ASSERT_IF(isNonEvalFunctionFrame() || isStrictEvalFrame(), hasCallObj()); JS_ASSERT(fun()->isHeavyweight());
JSObject *pobj = scopeChain(); JSObject *pobj = scopeChain();
while (JS_UNLIKELY(!pobj->isCall())) while (JS_UNLIKELY(!pobj->isCall()))
@ -347,89 +370,6 @@ StackFrame::callObj() const
return pobj->asCall(); return pobj->asCall();
} }
inline bool
StackFrame::maintainNestingState() const
{
/*
* Whether to invoke the nesting epilogue/prologue to maintain active
* frame counts and check for reentrant outer functions.
*/
return isNonEvalFunctionFrame() && !isGeneratorFrame() && script()->nesting();
}
inline bool
StackFrame::functionPrologue(JSContext *cx)
{
JS_ASSERT(isNonEvalFunctionFrame());
JS_ASSERT(!isGeneratorFrame());
if (fun()->isHeavyweight()) {
CallObject *callobj = CallObject::createForFunction(cx, this);
if (!callobj)
return false;
initScopeChain(*callobj);
} else {
/* Force instantiation of the scope chain, for JIT frames. */
scopeChain();
}
if (script()->nesting()) {
JS_ASSERT(maintainNestingState());
types::NestingPrologue(cx, this);
}
return true;
}
inline void
StackFrame::functionEpilogue(JSContext *cx)
{
JS_ASSERT(isNonEvalFunctionFrame());
if (cx->compartment->debugMode())
cx->runtime->debugScopes->onPopCall(this);
if (flags_ & (HAS_ARGS_OBJ | HAS_CALL_OBJ)) {
if (hasCallObj())
js_PutCallObject(this, scopeChain_->asCall());
if (hasArgsObj())
js_PutArgsObject(this);
}
if (maintainNestingState())
types::NestingEpilogue(this);
}
inline void
StackFrame::updateEpilogueFlags()
{
if (flags_ & (HAS_ARGS_OBJ | HAS_CALL_OBJ)) {
if (hasArgsObj() && !argsObj().maybeStackFrame())
flags_ &= ~HAS_ARGS_OBJ;
if (hasCallObj() && !callObj().maybeStackFrame()) {
/*
* For function frames, the call object may or may not have have an
* enclosing DeclEnv object, so we use the callee's parent, since
* it was the initial scope chain. For global (strict) eval frames,
* there is no callee, but the call object's parent is the initial
* scope chain.
*/
scopeChain_ = isFunctionFrame()
? callee().environment()
: &scopeChain_->asScope().enclosingScope();
flags_ &= ~HAS_CALL_OBJ;
}
}
/*
* For outer/inner function frames, undo the active frame balancing so that
* when we redo it in the epilogue we get the right final value. The other
* nesting epilogue changes (update active args/vars) are idempotent.
*/
if (maintainNestingState())
script()->nesting()->activeFrames++;
}
/*****************************************************************************/ /*****************************************************************************/
STATIC_POSTCONDITION(!return || ubound(from) >= nvals) STATIC_POSTCONDITION(!return || ubound(from) >= nvals)
@ -451,7 +391,7 @@ inline Value *
StackSpace::getStackLimit(JSContext *cx, MaybeReportError report) StackSpace::getStackLimit(JSContext *cx, MaybeReportError report)
{ {
FrameRegs &regs = cx->regs(); FrameRegs &regs = cx->regs();
unsigned nvals = regs.fp()->numSlots() + STACK_JIT_EXTRA; unsigned nvals = regs.fp()->script()->nslots + STACK_JIT_EXTRA;
return ensureSpace(cx, report, regs.sp, nvals) return ensureSpace(cx, report, regs.sp, nvals)
? conservativeEnd_ ? conservativeEnd_
: NULL; : NULL;
@ -472,7 +412,7 @@ ContextStack::getCallFrame(JSContext *cx, MaybeReportError report, const CallArg
/* Include extra space to satisfy the method-jit stackLimit invariant. */ /* Include extra space to satisfy the method-jit stackLimit invariant. */
unsigned nvals = VALUES_PER_STACK_FRAME + script->nslots + StackSpace::STACK_JIT_EXTRA; unsigned nvals = VALUES_PER_STACK_FRAME + script->nslots + StackSpace::STACK_JIT_EXTRA;
/* Maintain layout invariant: &formalArgs[0] == ((Value *)fp) - nformal. */ /* Maintain layout invariant: &formals[0] == ((Value *)fp) - nformal. */
if (args.length() == nformal) { if (args.length() == nformal) {
if (!space().ensureSpace(cx, report, firstUnused, nvals)) if (!space().ensureSpace(cx, report, firstUnused, nvals))
@ -564,9 +504,7 @@ ContextStack::popInlineFrame(FrameRegs &regs)
JS_ASSERT(&regs == &seg_->regs()); JS_ASSERT(&regs == &seg_->regs());
StackFrame *fp = regs.fp(); StackFrame *fp = regs.fp();
fp->functionEpilogue(cx_); Value *newsp = fp->actuals() - 1;
Value *newsp = fp->actualArgs() - 1;
JS_ASSERT(newsp >= fp->prev()->base()); JS_ASSERT(newsp >= fp->prev()->base());
newsp[-1] = fp->returnValue(); newsp[-1] = fp->returnValue();
@ -579,7 +517,7 @@ ContextStack::popFrameAfterOverflow()
/* Restore the regs to what they were on entry to JSOP_CALL. */ /* Restore the regs to what they were on entry to JSOP_CALL. */
FrameRegs &regs = seg_->regs(); FrameRegs &regs = seg_->regs();
StackFrame *fp = regs.fp(); StackFrame *fp = regs.fp();
regs.popFrame(fp->actualArgsEnd()); regs.popFrame(fp->actuals() + fp->numActualArgs());
} }
inline JSScript * inline JSScript *

Просмотреть файл

@ -90,22 +90,22 @@ StackFrame::initDummyFrame(JSContext *cx, JSObject &chain)
flags_ = DUMMY | HAS_PREVPC | HAS_SCOPECHAIN; flags_ = DUMMY | HAS_PREVPC | HAS_SCOPECHAIN;
initPrev(cx); initPrev(cx);
JS_ASSERT(chain.isGlobal()); JS_ASSERT(chain.isGlobal());
setScopeChain(chain); scopeChain_ = &chain;
} }
template <class T, class U, StackFrame::TriggerPostBarriers doPostBarrier> template <class T, class U, StackFrame::TriggerPostBarriers doPostBarrier>
void void
StackFrame::stealFrameAndSlots(JSContext *cx, StackFrame *fp, T *vp, StackFrame::copyFrameAndValues(JSContext *cx, StackFrame *fp, T *vp,
StackFrame *otherfp, U *othervp, Value *othersp) StackFrame *otherfp, U *othervp, Value *othersp)
{ {
JS_ASSERT((U *)vp == (U *)this - ((U *)otherfp - othervp)); JS_ASSERT((U *)vp == (U *)this - ((U *)otherfp - othervp));
JS_ASSERT((Value *)othervp == otherfp->actualArgs() - 2); JS_ASSERT((Value *)othervp == otherfp->generatorArgsSnapshotBegin());
JS_ASSERT(othersp >= otherfp->slots()); JS_ASSERT(othersp >= otherfp->slots());
JS_ASSERT(othersp <= otherfp->base() + otherfp->numSlots()); JS_ASSERT(othersp <= otherfp->generatorSlotsSnapshotBegin() + otherfp->script()->nslots);
JS_ASSERT((T *)fp - vp == (U *)otherfp - othervp); JS_ASSERT((T *)fp - vp == (U *)otherfp - othervp);
/* Copy args, StackFrame, and slots. */ /* Copy args, StackFrame, and slots. */
U *srcend = (U *)otherfp->formalArgsEnd(); U *srcend = (U *)otherfp->generatorArgsSnapshotEnd();
T *dst = vp; T *dst = vp;
for (U *src = othervp; src < srcend; src++, dst++) for (U *src = othervp; src < srcend; src++, dst++)
*dst = *src; *dst = *src;
@ -119,39 +119,15 @@ StackFrame::stealFrameAndSlots(JSContext *cx, StackFrame *fp, T *vp,
for (U *src = (U *)otherfp->slots(); src < srcend; src++, dst++) for (U *src = (U *)otherfp->slots(); src < srcend; src++, dst++)
*dst = *src; *dst = *src;
/*
* Repoint Call, Arguments, Block and With objects to the new live frame.
* Call and Arguments are done directly because we have pointers to them.
* Block and With objects are done indirectly through 'liveFrame'. See
* js_LiveFrameToFloating comment in jsiter.h.
*/
if (hasCallObj()) {
CallObject &obj = callObj();
obj.setStackFrame(this);
otherfp->flags_ &= ~HAS_CALL_OBJ;
if (js_IsNamedLambda(fun())) {
DeclEnvObject &env = obj.enclosingScope().asDeclEnv();
env.setStackFrame(this);
}
}
if (hasArgsObj()) {
ArgumentsObject &argsobj = argsObj();
if (argsobj.isNormalArguments())
argsobj.setStackFrame(this);
else
JS_ASSERT(!argsobj.maybeStackFrame());
otherfp->flags_ &= ~HAS_ARGS_OBJ;
}
if (cx->compartment->debugMode()) if (cx->compartment->debugMode())
cx->runtime->debugScopes->onGeneratorFrameChange(otherfp, this); cx->runtime->debugScopes->onGeneratorFrameChange(otherfp, this);
} }
/* Note: explicit instantiation for js_NewGenerator located in jsiter.cpp. */ /* Note: explicit instantiation for js_NewGenerator located in jsiter.cpp. */
template void StackFrame::stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>( template void StackFrame::copyFrameAndValues<Value, HeapValue, StackFrame::NoPostBarrier>(
JSContext *, StackFrame *, Value *, JSContext *, StackFrame *, Value *,
StackFrame *, HeapValue *, Value *); StackFrame *, HeapValue *, Value *);
template void StackFrame::stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>( template void StackFrame::copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
JSContext *, StackFrame *, HeapValue *, JSContext *, StackFrame *, HeapValue *,
StackFrame *, Value *, Value *); StackFrame *, Value *, Value *);
@ -163,7 +139,7 @@ StackFrame::writeBarrierPost()
JSObject::writeBarrierPost(scopeChain_, (void *)&scopeChain_); JSObject::writeBarrierPost(scopeChain_, (void *)&scopeChain_);
if (isDummyFrame()) if (isDummyFrame())
return; return;
if (hasArgsObj()) if (flags_ & HAS_ARGS_OBJ)
JSObject::writeBarrierPost(argsObj_, (void *)&argsObj_); JSObject::writeBarrierPost(argsObj_, (void *)&argsObj_);
if (isScriptFrame()) { if (isScriptFrame()) {
if (isFunctionFrame()) { if (isFunctionFrame()) {
@ -178,8 +154,29 @@ StackFrame::writeBarrierPost()
HeapValue::writeBarrierPost(rval_, &rval_); HeapValue::writeBarrierPost(rval_, &rval_);
} }
JSGenerator *
StackFrame::maybeSuspendedGenerator(JSRuntime *rt)
{
/*
* A suspended generator's frame is embedded inside the JSGenerator object
* instead of on the contiguous stack like all active frames.
*/
if (!isGeneratorFrame() || rt->stackSpace.containsFast(this))
return NULL;
/*
* Once we know we have a suspended generator frame, there is a static
* offset from the frame's snapshot to beginning of the JSGenerator.
*/
char *vp = reinterpret_cast<char *>(generatorArgsSnapshotBegin());
char *p = vp - offsetof(JSGenerator, stackSnapshot);
JSGenerator *gen = reinterpret_cast<JSGenerator *>(p);
JS_ASSERT(gen->fp == this);
return gen;
}
jsbytecode * jsbytecode *
StackFrame::prevpcSlow(JSInlinedSite **pinlined) StackFrame::prevpcSlow(InlinedSite **pinlined)
{ {
JS_ASSERT(!(flags_ & HAS_PREVPC)); JS_ASSERT(!(flags_ & HAS_PREVPC));
#if defined(JS_METHODJIT) && defined(JS_MONOIC) #if defined(JS_METHODJIT) && defined(JS_MONOIC)
@ -197,7 +194,7 @@ StackFrame::prevpcSlow(JSInlinedSite **pinlined)
} }
jsbytecode * jsbytecode *
StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, JSInlinedSite **pinlined) StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, InlinedSite **pinlined)
{ {
JS_ASSERT_IF(next, next->prev() == this); JS_ASSERT_IF(next, next->prev() == this);
@ -219,6 +216,116 @@ StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, JSInlinedSi
return next->prevpc(pinlined); return next->prevpc(pinlined);
} }
bool
StackFrame::prologue(JSContext *cx, bool newType)
{
JS_ASSERT(!isDummyFrame());
JS_ASSERT(!isGeneratorFrame());
JS_ASSERT(cx->regs().pc == script()->code);
if (isEvalFrame()) {
if (script()->strictModeCode) {
CallObject *callobj = CallObject::createForStrictEval(cx, this);
if (!callobj)
return false;
pushOnScopeChain(*callobj);
flags_ |= HAS_CALL_OBJ;
}
return true;
}
if (isGlobalFrame())
return true;
JS_ASSERT(isNonEvalFunctionFrame());
if (fun()->isHeavyweight()) {
CallObject *callobj = CallObject::createForFunction(cx, this);
if (!callobj)
return false;
pushOnScopeChain(*callobj);
flags_ |= HAS_CALL_OBJ;
}
if (script()->nesting()) {
types::NestingPrologue(cx, this);
flags_ |= HAS_NESTING;
}
if (isConstructing()) {
RootedObject callee(cx, &this->callee());
JSObject *obj = js_CreateThisForFunction(cx, callee, newType);
if (!obj)
return false;
functionThis() = ObjectValue(*obj);
}
Probes::enterJSFun(cx, fun(), script());
return true;
}
void
StackFrame::epilogue(JSContext *cx)
{
JS_ASSERT(!isDummyFrame());
JS_ASSERT(!isYielding());
JS_ASSERT(!hasBlockChain());
if (isEvalFrame()) {
if (isStrictEvalFrame()) {
JS_ASSERT_IF(hasCallObj(), scopeChain()->asCall().isForEval());
if (cx->compartment->debugMode())
cx->runtime->debugScopes->onPopStrictEvalScope(this);
} else if (isDirectEvalFrame()) {
if (isDebuggerFrame())
JS_ASSERT(!scopeChain()->isScope());
else
JS_ASSERT(scopeChain() == prev()->scopeChain());
} else {
JS_ASSERT(scopeChain()->isGlobal());
}
return;
}
if (isGlobalFrame()) {
JS_ASSERT(!scopeChain()->isScope());
return;
}
JS_ASSERT(isNonEvalFunctionFrame());
if (fun()->isHeavyweight()) {
JS_ASSERT_IF(hasCallObj(),
scopeChain()->asCall().getCalleeFunction()->script() == script());
} else {
JS_ASSERT(!scopeChain()->isCall() || scopeChain()->asCall().isForEval() ||
scopeChain()->asCall().getCalleeFunction()->script() != script());
}
if (cx->compartment->debugMode())
cx->runtime->debugScopes->onPopCall(this);
Probes::exitJSFun(cx, fun(), script());
if (script()->nesting() && (flags_ & HAS_NESTING))
types::NestingEpilogue(this);
if (isConstructing() && returnValue().isPrimitive())
setReturnValue(ObjectValue(constructorThis()));
}
bool
StackFrame::jitStrictEvalPrologue(JSContext *cx)
{
JS_ASSERT(isStrictEvalFrame());
CallObject *callobj = CallObject::createForStrictEval(cx, this);
if (!callobj)
return false;
pushOnScopeChain(*callobj);
flags_ |= HAS_CALL_OBJ;
return true;
}
bool bool
StackFrame::pushBlock(JSContext *cx, StaticBlockObject &block) StackFrame::pushBlock(JSContext *cx, StaticBlockObject &block)
{ {
@ -230,7 +337,7 @@ StackFrame::pushBlock(JSContext *cx, StaticBlockObject &block)
if (!clone) if (!clone)
return false; return false;
scopeChain_ = clone; pushOnScopeChain(*clone);
} }
flags_ |= HAS_BLOCKCHAIN; flags_ |= HAS_BLOCKCHAIN;
@ -247,10 +354,8 @@ StackFrame::popBlock(JSContext *cx)
cx->runtime->debugScopes->onPopBlock(cx, this); cx->runtime->debugScopes->onPopBlock(cx, this);
if (blockChain_->needsClone()) { if (blockChain_->needsClone()) {
ClonedBlockObject &clone = scopeChain()->asClonedBlock(); JS_ASSERT(scopeChain_->asClonedBlock().staticBlock() == *blockChain_);
JS_ASSERT(clone.staticBlock() == *blockChain_); popOffScopeChain();
clone.put(cx->fp());
scopeChain_ = &clone.enclosingScope();
} }
blockChain_ = blockChain_->enclosingBlock(); blockChain_ = blockChain_->enclosingBlock();
@ -262,7 +367,8 @@ StackFrame::popWith(JSContext *cx)
if (cx->compartment->debugMode()) if (cx->compartment->debugMode())
cx->runtime->debugScopes->onPopWith(this); cx->runtime->debugScopes->onPopWith(this);
setScopeChain(scopeChain()->asWith().enclosingScope()); JS_ASSERT(scopeChain()->isWith());
popOffScopeChain();
} }
void void
@ -277,7 +383,7 @@ StackFrame::mark(JSTracer *trc)
gc::MarkObjectUnbarriered(trc, &scopeChain_, "scope chain"); gc::MarkObjectUnbarriered(trc, &scopeChain_, "scope chain");
if (isDummyFrame()) if (isDummyFrame())
return; return;
if (hasArgsObj()) if (flags_ & HAS_ARGS_OBJ)
gc::MarkObjectUnbarriered(trc, &argsObj_, "arguments"); gc::MarkObjectUnbarriered(trc, &argsObj_, "arguments");
if (isFunctionFrame()) { if (isFunctionFrame()) {
gc::MarkObjectUnbarriered(trc, &exec.fun, "fun"); gc::MarkObjectUnbarriered(trc, &exec.fun, "fun");
@ -462,7 +568,7 @@ StackSpace::containingSegment(const StackFrame *target) const
} }
void void
StackSpace::markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc) StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
{ {
Value *slotsBegin = fp->slots(); Value *slotsBegin = fp->slots();
@ -536,12 +642,12 @@ StackSpace::mark(JSTracer *trc)
jsbytecode *pc = seg->maybepc(); jsbytecode *pc = seg->maybepc();
for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) { for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
/* Mark from fp->slots() to slotsEnd. */ /* Mark from fp->slots() to slotsEnd. */
markFrameSlots(trc, fp, slotsEnd, pc); markFrameValues(trc, fp, slotsEnd, pc);
fp->mark(trc); fp->mark(trc);
slotsEnd = (Value *)fp; slotsEnd = (Value *)fp;
JSInlinedSite *site; InlinedSite *site;
pc = fp->prevpc(&site); pc = fp->prevpc(&site);
JS_ASSERT_IF(fp->prev(), !site); JS_ASSERT_IF(fp->prev(), !site);
} }
@ -701,7 +807,7 @@ ContextStack::ensureOnTop(JSContext *cx, MaybeReportError report, unsigned nvars
*/ */
if (FrameRegs *regs = cx->maybeRegs()) { if (FrameRegs *regs = cx->maybeRegs()) {
JSFunction *fun = NULL; JSFunction *fun = NULL;
if (JSInlinedSite *site = regs->inlined()) { if (InlinedSite *site = regs->inlined()) {
mjit::JITChunk *chunk = regs->fp()->jit()->chunk(regs->pc); mjit::JITChunk *chunk = regs->fp()->jit()->chunk(regs->pc);
fun = chunk->inlineFrames()[site->inlineIndex].fun; fun = chunk->inlineFrames()[site->inlineIndex].fun;
} else { } else {
@ -852,7 +958,7 @@ ContextStack::pushExecuteFrame(JSContext *cx, JSScript *script, const Value &thi
StackFrame *prev = evalInFrame ? evalInFrame : maybefp(); StackFrame *prev = evalInFrame ? evalInFrame : maybefp();
StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused + 2); StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused + 2);
fp->initExecuteFrame(script, prev, seg_->maybeRegs(), thisv, scopeChain, type); fp->initExecuteFrame(script, prev, seg_->maybeRegs(), thisv, scopeChain, type);
SetValueRangeToUndefined(fp->slots(), script->nfixed); fp->initVarsToUndefined();
efg->regs_.prepareToRun(*fp, script); efg->regs_.prepareToRun(*fp, script);
/* pushRegs() below links the prev-frame; manually link the prev-call. */ /* pushRegs() below links the prev-frame; manually link the prev-call. */
@ -894,9 +1000,6 @@ ContextStack::popFrame(const FrameGuard &fg)
JS_ASSERT(space().firstUnused() == fg.regs_.sp); JS_ASSERT(space().firstUnused() == fg.regs_.sp);
JS_ASSERT(&fg.regs_ == &seg_->regs()); JS_ASSERT(&fg.regs_ == &seg_->regs());
if (fg.regs_.fp()->isNonEvalFunctionFrame())
fg.regs_.fp()->functionEpilogue(cx_);
seg_->popRegs(fg.prevRegs_); seg_->popRegs(fg.prevRegs_);
if (fg.pushedSeg_) if (fg.pushedSeg_)
popSegment(); popSegment();
@ -912,11 +1015,11 @@ ContextStack::popFrame(const FrameGuard &fg)
bool bool
ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrameGuard *gfg) ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrameGuard *gfg)
{ {
StackFrame *genfp = gen->floatingFrame(); HeapValue *genvp = gen->stackSnapshot;
HeapValue *genvp = gen->floatingStack; JS_ASSERT(genvp == HeapValueify(gen->fp->generatorArgsSnapshotBegin()));
unsigned vplen = (HeapValue *)genfp - genvp; unsigned vplen = HeapValueify(gen->fp->generatorArgsSnapshotEnd()) - genvp;
unsigned nvars = vplen + VALUES_PER_STACK_FRAME + genfp->numSlots(); unsigned nvars = vplen + VALUES_PER_STACK_FRAME + gen->fp->script()->nslots;
Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &gfg->pushedSeg_); Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &gfg->pushedSeg_);
if (!firstUnused) if (!firstUnused)
return false; return false;
@ -935,15 +1038,13 @@ ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrame
* We don't need to worry about generational barriers as the generator * We don't need to worry about generational barriers as the generator
* object has a trace hook and cannot be nursery allocated. * object has a trace hook and cannot be nursery allocated.
*/ */
JSObject *genobj = js_FloatingFrameToGenerator(genfp)->obj; JS_ASSERT(gen->obj->getClass()->trace);
JS_ASSERT(genobj->getClass()->trace); JSObject::writeBarrierPre(gen->obj);
JSObject::writeBarrierPre(genobj);
/* Copy from the generator's floating frame to the stack. */ /* Copy from the generator's floating frame to the stack. */
stackfp->stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>( stackfp->copyFrameAndValues<Value, HeapValue, StackFrame::NoPostBarrier>(
cx, stackfp, stackvp, genfp, genvp, gen->regs.sp); cx, stackfp, stackvp, gen->fp, genvp, gen->regs.sp);
stackfp->resetGeneratorPrev(cx); stackfp->resetGeneratorPrev(cx);
stackfp->unsetFloatingGenerator();
gfg->regs_.rebaseFromTo(gen->regs, *stackfp); gfg->regs_.rebaseFromTo(gen->regs, *stackfp);
gfg->prevRegs_ = seg_->pushRegs(gfg->regs_); gfg->prevRegs_ = seg_->pushRegs(gfg->regs_);
@ -956,18 +1057,19 @@ void
ContextStack::popGeneratorFrame(const GeneratorFrameGuard &gfg) ContextStack::popGeneratorFrame(const GeneratorFrameGuard &gfg)
{ {
JSGenerator *gen = gfg.gen_; JSGenerator *gen = gfg.gen_;
StackFrame *genfp = gen->floatingFrame(); HeapValue *genvp = gen->stackSnapshot;
HeapValue *genvp = gen->floatingStack; JS_ASSERT(genvp == HeapValueify(gen->fp->generatorArgsSnapshotBegin()));
const FrameRegs &stackRegs = gfg.regs_; const FrameRegs &stackRegs = gfg.regs_;
StackFrame *stackfp = stackRegs.fp(); StackFrame *stackfp = stackRegs.fp();
Value *stackvp = gfg.stackvp_; Value *stackvp = gfg.stackvp_;
/* Copy from the stack to the generator's floating frame. */ /* Copy from the stack to the generator's floating frame. */
gen->regs.rebaseFromTo(stackRegs, *genfp); if (stackfp->isYielding()) {
genfp->stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>( gen->regs.rebaseFromTo(stackRegs, *gen->fp);
cx_, genfp, genvp, stackfp, stackvp, stackRegs.sp); gen->fp->copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
genfp->setFloatingGenerator(); cx_, gen->fp, genvp, stackfp, stackvp, stackRegs.sp);
}
/* ~FrameGuard/popFrame will finish the popping. */ /* ~FrameGuard/popFrame will finish the popping. */
JS_ASSERT(ImplicitCast<const FrameGuard>(gfg).pushed()); JS_ASSERT(ImplicitCast<const FrameGuard>(gfg).pushed());
@ -1016,7 +1118,7 @@ StackIter::popFrame()
JS_ASSERT(seg_->contains(oldfp)); JS_ASSERT(seg_->contains(oldfp));
fp_ = fp_->prev(); fp_ = fp_->prev();
if (seg_->contains(fp_)) { if (seg_->contains(fp_)) {
JSInlinedSite *inline_; InlinedSite *inline_;
pc_ = oldfp->prevpc(&inline_); pc_ = oldfp->prevpc(&inline_);
JS_ASSERT(!inline_); JS_ASSERT(!inline_);
@ -1029,7 +1131,7 @@ StackIter::popFrame()
*/ */
if (oldfp->isGeneratorFrame()) { if (oldfp->isGeneratorFrame()) {
/* Generator's args do not overlap with the caller's expr stack. */ /* Generator's args do not overlap with the caller's expr stack. */
sp_ = (Value *)oldfp->actualArgs() - 2; sp_ = oldfp->generatorArgsSnapshotBegin();
} else if (oldfp->isNonEvalFunctionFrame()) { } else if (oldfp->isNonEvalFunctionFrame()) {
/* /*
* When Invoke is called from a native, there will be an enclosing * When Invoke is called from a native, there will be an enclosing
@ -1039,7 +1141,7 @@ StackIter::popFrame()
* cases, the actual arguments of the callee should be included in * cases, the actual arguments of the callee should be included in
* the caller's expr stack. * the caller's expr stack.
*/ */
sp_ = oldfp->actualArgsEnd(); sp_ = oldfp->actuals() + oldfp->numActualArgs();
} else if (oldfp->isFramePushedByExecute()) { } else if (oldfp->isFramePushedByExecute()) {
/* pushExecuteFrame pushes exactly (callee, this) before frame. */ /* pushExecuteFrame pushes exactly (callee, this) before frame. */
sp_ = (Value *)oldfp - 2; sp_ = (Value *)oldfp - 2;
@ -1094,7 +1196,8 @@ StackIter::startOnSegment(StackSegment *seg)
static void JS_NEVER_INLINE static void JS_NEVER_INLINE
CrashIfInvalidSlot(StackFrame *fp, Value *vp) CrashIfInvalidSlot(StackFrame *fp, Value *vp)
{ {
if (vp < fp->slots() || vp >= fp->slots() + fp->script()->nslots) { Value *slots = (Value *)(fp + 1);
if (vp < slots || vp >= slots + fp->script()->nslots) {
JS_ASSERT(false && "About to dereference invalid slot"); JS_ASSERT(false && "About to dereference invalid slot");
*(int *)0xbad = 0; // show up nicely in crash-stats *(int *)0xbad = 0; // show up nicely in crash-stats
MOZ_Assert("About to dereference invalid slot", __FILE__, __LINE__); MOZ_Assert("About to dereference invalid slot", __FILE__, __LINE__);

Просмотреть файл

@ -14,14 +14,7 @@
struct JSContext; struct JSContext;
struct JSCompartment; struct JSCompartment;
#ifdef JS_METHODJIT extern void js_DumpStackFrame(JSContext *, js::StackFrame *);
namespace js { namespace mjit { struct CallSite; }}
typedef js::mjit::CallSite JSInlinedSite;
#else
struct JSInlinedSite {};
#endif
typedef /* js::mjit::RejoinState */ size_t JSRejoinState;
namespace js { namespace js {
@ -49,10 +42,18 @@ class StaticBlockObject;
#ifdef JS_METHODJIT #ifdef JS_METHODJIT
namespace mjit { namespace mjit {
class CallCompiler;
class GetPropCompiler;
struct CallSite;
struct JITScript; struct JITScript;
jsbytecode *NativeToPC(JITScript *jit, void *ncode, CallSite **pinline); jsbytecode *NativeToPC(JITScript *jit, void *ncode, CallSite **pinline);
namespace ic { struct GetElementIC; }
} }
typedef mjit::CallSite InlinedSite;
#else
struct InlinedSite {};
#endif #endif
typedef size_t FrameRejoinState;
namespace detail { namespace detail {
struct OOMCheck; struct OOMCheck;
@ -63,10 +64,9 @@ namespace detail {
/* /*
* VM stack layout * VM stack layout
* *
* SpiderMonkey uses a per-thread stack to store the activation records, * SpiderMonkey uses a per-runtime stack to store the activation records,
* parameters, locals, and expression temporaries for the stack of actively * parameters, locals, and expression temporaries for the stack of actively
* executing scripts, functions and generators. The stack is owned by the * executing scripts, functions and generators.
* StackSpace object stored in the runtime.
* *
* The stack is subdivided into contiguous segments of memory which * The stack is subdivided into contiguous segments of memory which
* have a memory layout invariant that allows fixed offsets to be used for stack * have a memory layout invariant that allows fixed offsets to be used for stack
@ -78,13 +78,13 @@ namespace detail {
* A sample memory layout of a segment looks like: * A sample memory layout of a segment looks like:
* *
* regs * regs
* .---------------------------------------------. * .------------------------------------------------.
* | V * | V
* | fp .--FrameRegs--. sp * | fp .--FrameRegs--. sp
* | V V * | V V
* |StackSegment| slots |StackFrame| slots |StackFrame| slots | * |StackSegment| values |StackFrame| values |StackFrame| values |
* | ^ | * | ^ |
* ? <----------' `-----------' * ? <-----------' `------------'
* prev prev * prev prev
* *
* A segment starts with a fixed-size header (js::StackSegment) which logically * A segment starts with a fixed-size header (js::StackSegment) which logically
@ -92,14 +92,14 @@ namespace detail {
* end of the stack. * end of the stack.
* *
* Each script activation (global or function code) is given a fixed-size header * Each script activation (global or function code) is given a fixed-size header
* (js::StackFrame) which is associated with the values (called "slots") before * (js::StackFrame) which is associated with the values before and after it.
* and after it. The frame contains bookkeeping information about the activation * The frame contains bookkeeping information about the activation and links to
* and links to the previous frame. * the previous frame.
* *
* The slots preceding a (function) StackFrame in memory are the arguments of * The value preceding a (function) StackFrame in memory are the arguments of
* the call. The slots after a StackFrame in memory are its locals followed by * the call. The values after a StackFrame in memory are its locals followed by
* its expression stack. There is no clean line between the arguments of a * its expression stack. There is no clean line between the arguments of a
* frame and the expression stack of the previous frame since the top slots of * frame and the expression stack of the previous frame since the top values of
* the expression become the arguments of a call. There are also layout * the expression become the arguments of a call. There are also layout
* invariants concerning the arguments and StackFrame; see "Arguments" comment * invariants concerning the arguments and StackFrame; see "Arguments" comment
* in StackFrame for more details. * in StackFrame for more details.
@ -115,19 +115,19 @@ namespace detail {
* A call to a native (C++) function does not push a frame. Instead, an array * A call to a native (C++) function does not push a frame. Instead, an array
* of values is passed to the native. The layout of this array is abstracted by * of values is passed to the native. The layout of this array is abstracted by
* js::CallArgs. With respect to the StackSegment layout above, the args to a * js::CallArgs. With respect to the StackSegment layout above, the args to a
* native call are inserted anywhere there can be slots. A sample memory layout * native call are inserted anywhere there can be values. A sample memory layout
* looks like: * looks like:
* *
* regs * regs
* .----------------------------------------. * .------------------------------------------.
* | V * | V
* | fp .--FrameRegs--. sp * | fp .--FrameRegs--. sp
* | V V * | V V
* |StackSegment| native call | slots |StackFrame| slots | native call | * |StackSegment| native call | values |StackFrame| values | native call |
* | vp <--argc--> end vp <--argc--> end * | vp <--argc--> end vp <--argc--> end
* | CallArgs <------------------------------ CallArgs * | CallArgs <------------------------------ CallArgs
* | prev ^ * | prev ^
* `-----------------------------------------------------' * `-------------------------------------------------------'
* calls * calls
* *
* Here there are two native calls on the stack. The start of each native arg * Here there are two native calls on the stack. The start of each native arg
@ -295,10 +295,14 @@ CallArgsListFromVp(unsigned argc, Value *vp, CallArgsList *prev)
/*****************************************************************************/ /*****************************************************************************/
enum MaybeCheckAliasing { CHECK_ALIASING = true, DONT_CHECK_ALIASING = false };
/*****************************************************************************/
/* Flags specified for a frame as it is constructed. */ /* Flags specified for a frame as it is constructed. */
enum InitialFrameFlags { enum InitialFrameFlags {
INITIAL_NONE = 0, INITIAL_NONE = 0,
INITIAL_CONSTRUCT = 0x80, /* == StackFrame::CONSTRUCTING, asserted below */ INITIAL_CONSTRUCT = 0x40, /* == StackFrame::CONSTRUCTING, asserted below */
INITIAL_LOWERED = 0x200000 /* == StackFrame::LOWERED_CALL_APPLY, asserted below */ INITIAL_LOWERED = 0x200000 /* == StackFrame::LOWERED_CALL_APPLY, asserted below */
}; };
@ -324,20 +328,22 @@ class StackFrame
EVAL = 0x8, /* frame pushed for eval() or debugger eval */ EVAL = 0x8, /* frame pushed for eval() or debugger eval */
DEBUGGER = 0x10, /* frame pushed for debugger eval */ DEBUGGER = 0x10, /* frame pushed for debugger eval */
GENERATOR = 0x20, /* frame is associated with a generator */ GENERATOR = 0x20, /* frame is associated with a generator */
FLOATING_GENERATOR = 0x40, /* frame is is in generator obj, not on stack */ CONSTRUCTING = 0x40, /* frame is for a constructor invocation */
CONSTRUCTING = 0x80, /* frame is for a constructor invocation */
/* Temporary frame states */ /* Temporary frame states */
YIELDING = 0x100, /* js::Interpret dispatched JSOP_YIELD */ YIELDING = 0x80, /* Interpret dispatched JSOP_YIELD */
FINISHED_IN_INTERP = 0x200, /* set if frame finished in Interpret() */ FINISHED_IN_INTERP = 0x100, /* set if frame finished in Interpret() */
/* Function arguments */ /* Function arguments */
OVERFLOW_ARGS = 0x400, /* numActualArgs > numFormalArgs */ OVERFLOW_ARGS = 0x200, /* numActualArgs > numFormalArgs */
UNDERFLOW_ARGS = 0x800, /* numActualArgs < numFormalArgs */ UNDERFLOW_ARGS = 0x400, /* numActualArgs < numFormalArgs */
/* Function prologue state */
HAS_CALL_OBJ = 0x800, /* CallObject created for heavyweight fun */
HAS_ARGS_OBJ = 0x1000, /* ArgumentsObject created for needsArgsObj script */
HAS_NESTING = 0x2000, /* NestingPrologue called for frame */
/* Lazy frame initialization */ /* Lazy frame initialization */
HAS_CALL_OBJ = 0x1000, /* frame has a callobj reachable from scopeChain_ */
HAS_ARGS_OBJ = 0x2000, /* frame has an argsobj in StackFrame::args */
HAS_HOOK_DATA = 0x4000, /* frame has hookData_ set */ HAS_HOOK_DATA = 0x4000, /* frame has hookData_ set */
HAS_ANNOTATION = 0x8000, /* frame has annotation_ set */ HAS_ANNOTATION = 0x8000, /* frame has annotation_ set */
HAS_RVAL = 0x10000, /* frame has rval_ set */ HAS_RVAL = 0x10000, /* frame has rval_ set */
@ -363,19 +369,17 @@ class StackFrame
unsigned nactual; /* for non-eval frames */ unsigned nactual; /* for non-eval frames */
JSScript *evalScript; /* the script of an eval-in-function */ JSScript *evalScript; /* the script of an eval-in-function */
} u; } u;
mutable JSObject *scopeChain_; /* current scope chain */ mutable JSObject *scopeChain_; /* if HAS_SCOPECHAIN, current scope chain */
StackFrame *prev_; /* previous cx->regs->fp */ StackFrame *prev_; /* if HAS_PREVPC, previous cx->regs->fp */
void *ncode_; /* return address for method JIT */ void *ncode_; /* for a jit frame, return address for method JIT */
Value rval_; /* if HAS_RVAL, return value of the frame */
/* Lazily initialized */ StaticBlockObject *blockChain_; /* if HAS_BLOCKCHAIN, innermost let block */
Value rval_; /* return value of the frame */ ArgumentsObject *argsObj_; /* if HAS_ARGS_OBJ, the call's arguments object */
StaticBlockObject *blockChain_; /* innermost let block */ jsbytecode *prevpc_; /* if HAS_PREVPC, pc of previous frame*/
ArgumentsObject *argsObj_; /* if has HAS_ARGS_OBJ */ InlinedSite *prevInline_; /* for a jit frame, inlined site in previous frame */
jsbytecode *prevpc_; /* pc of previous frame*/ void *hookData_; /* if HAS_HOOK_DATA, closure returned by call hook */
JSInlinedSite *prevInline_; /* inlined site in previous frame */ void *annotation_; /* if HAS_ANNOTATION, perhaps remove with bug 546848 */
void *hookData_; /* closure returned by call hook */ FrameRejoinState rejoin_; /* for a jit frame rejoining the interpreter
void *annotation_; /* perhaps remove with bug 546848 */
JSRejoinState rejoin_; /* If rejoining into the interpreter
* from JIT code, state at rejoin. */ * from JIT code, state at rejoin. */
static void staticAsserts() { static void staticAsserts() {
@ -384,15 +388,39 @@ class StackFrame
} }
inline void initPrev(JSContext *cx); inline void initPrev(JSContext *cx);
jsbytecode *prevpcSlow(JSInlinedSite **pinlined); jsbytecode *prevpcSlow(InlinedSite **pinlined);
void writeBarrierPost();
public:
/* /*
* Frame initialization * These utilities provide raw access to the values associated with a
* * StackFrame (see "VM stack layout" comment). The utilities are private
* After acquiring a pointer to an uninitialized stack frame on the VM * since they are not able to assert that only unaliased vars/formals are
* stack from StackSpace, these members are used to initialize the stack * accessed. Normal code should prefer the StackFrame::unaliased* members
* frame before officially pushing the frame into the context. * (or FrameRegs::stackDepth for the usual "depth is at least" assertions).
*/
Value *slots() const { return (Value *)(this + 1); }
Value *base() const { return slots() + script()->nfixed; }
Value *formals() const { return (Value *)this - fun()->nargs; }
Value *actuals() const { return formals() - (flags_ & OVERFLOW_ARGS ? 2 + u.nactual : 0); }
friend class FrameRegs;
friend class ContextStack;
friend class StackSpace;
friend class StackIter;
friend class CallObject;
friend class ClonedBlockObject;
friend class ArgumentsObject;
friend void ::js_DumpStackFrame(JSContext *, StackFrame *);
friend void ::js_ReportIsNotFunction(JSContext *, const js::Value *, unsigned);
#ifdef JS_METHODJIT
friend class mjit::CallCompiler;
friend class mjit::GetPropCompiler;
friend class mjit::ic::GetElementIC;
#endif
/*
* Frame initialization, called by ContextStack operations after acquiring
* the raw memory for the frame:
*/ */
/* Used for Invoke, Interpret, trace-jit LeaveTree, and method-jit stubs. */ /* Used for Invoke, Interpret, trace-jit LeaveTree, and method-jit stubs. */
@ -406,19 +434,43 @@ class StackFrame
void initExecuteFrame(JSScript *script, StackFrame *prev, FrameRegs *regs, void initExecuteFrame(JSScript *script, StackFrame *prev, FrameRegs *regs,
const Value &thisv, JSObject &scopeChain, ExecuteType type); const Value &thisv, JSObject &scopeChain, ExecuteType type);
/* Used when activating generators. */
enum TriggerPostBarriers {
DoPostBarrier = true,
NoPostBarrier = false
};
template <class T, class U, TriggerPostBarriers doPostBarrier>
void stealFrameAndSlots(JSContext *cx, StackFrame *fp, T *vp,
StackFrame *otherfp, U *othervp, Value *othersp);
void writeBarrierPost();
/* Perhaps one fine day we will remove dummy frames. */ /* Perhaps one fine day we will remove dummy frames. */
void initDummyFrame(JSContext *cx, JSObject &chain); void initDummyFrame(JSContext *cx, JSObject &chain);
public:
/*
* Frame prologue/epilogue
*
* Every stack frame must have 'prologue' called before executing the
* first op and 'epilogue' called after executing the last op and before
* popping the frame (whether the exit is exceptional or not).
*
* For inline JS calls/returns, it is easy to call the prologue/epilogue
* exactly once. When calling JS from C++, Invoke/Execute push the stack
* frame but do *not* call the prologue/epilogue. That means Interpret
* must call the prologue/epilogue for the entry frame. This scheme
* simplifies jit compilation.
*
* An important corner case is what happens when an error occurs (OOM,
* over-recursed) after pushing the stack frame but before 'prologue' is
* called or completes fully. To simplify usage, 'epilogue' does not assume
* 'prologue' has completed and handles all the intermediate state details.
*
* The 'newType' option indicates whether the constructed 'this' value (if
* there is one) should be given a new singleton type.
*/
bool prologue(JSContext *cx, bool newType);
void epilogue(JSContext *cx);
/* Subsets of 'prologue' called from jit code. */
inline bool jitHeavyweightFunctionPrologue(JSContext *cx);
inline void jitTypeNestingPrologue(JSContext *cx);
bool jitStrictEvalPrologue(JSContext *cx);
/* Initialize local variables of newly-pushed frame. */
void initVarsToUndefined();
/* /*
* Stack frame type * Stack frame type
* *
@ -507,39 +559,124 @@ class StackFrame
} }
inline void resetGeneratorPrev(JSContext *cx); inline void resetGeneratorPrev(JSContext *cx);
inline void resetInlinePrev(StackFrame *prevfp, jsbytecode *prevpc);
inline void initInlineFrame(JSFunction *fun, StackFrame *prevfp, jsbytecode *prevpc); /*
* (Unaliased) locals and arguments
*
* Only non-eval function frames have arguments. The arguments pushed by
* the caller are the 'actual' arguments. The declared arguments of the
* callee are the 'formal' arguments. When the caller passes less or equal
* actual arguments, the actual and formal arguments are the same array
* (but with different extents). When the caller passes too many arguments,
* the formal subset of the actual arguments is copied onto the top of the
* stack. This allows the engine to maintain a jit-time constant offset of
* arguments from the frame pointer. Since the formal subset of the actual
* arguments is potentially on the stack twice, it is important for all
* reads/writes to refer to the same canonical memory location. This is
* abstracted by the unaliased{Formal,Actual} methods.
*
* When a local/formal variable is "aliased" (accessed by nested closures,
* dynamic scope operations, or 'arguments), the canonical location for
* that value is the slot of an activation object (scope or arguments).
* Currently, all variables are given slots in *both* the stack frame and
* heap objects, even though, as just described, only one should ever be
* accessed. Thus, it is up to the code performing an access to access the
* correct value. These functions assert that accesses to stack values are
* unaliased. For more about canonical values locations.
*/
inline Value &unaliasedVar(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
inline Value &unaliasedLocal(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
bool hasArgs() const { return isNonEvalFunctionFrame(); }
inline Value &unaliasedFormal(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
inline Value &unaliasedActual(unsigned i);
template <class Op> inline void forEachUnaliasedActual(Op op);
inline unsigned numFormalArgs() const;
inline unsigned numActualArgs() const;
/*
* Arguments object
*
* If a non-eval function has script->needsArgsObj, an arguments object is
* created in the prologue and stored in the local variable for the
* 'arguments' binding (script->argumentsLocal). Since this local is
* mutable, the arguments object can be overwritten and we can "lose" the
* arguments object. Thus, StackFrame keeps an explicit argsObj_ field so
* that the original arguments object is always available.
*/
ArgumentsObject &argsObj() const;
void initArgsObj(ArgumentsObject &argsobj);
inline JSObject *createRestParameter(JSContext *cx); inline JSObject *createRestParameter(JSContext *cx);
/* /*
* Frame slots * Scope chain
* *
* A frame's 'slots' are the fixed slots associated with the frame (like * In theory, the scope chain would contain an object for every lexical
* local variables) followed by an expression stack holding temporary * scope. However, only objects that are required for dynamic lookup are
* values. A frame's 'base' is the base of the expression stack. * actually created.
*
* Given that a (non-dummy) StackFrame corresponds roughly to a ES5
* Execution Context (ES5 10.3), StackFrame::varObj corresponds to the
* VariableEnvironment component of a Exection Context. Intuitively, the
* variables object is where new bindings (variables and functions) are
* stored. One might expect that this is either the Call object or
* scopeChain.globalObj for function or global code, respectively, however
* the JSAPI allows calls of Execute to specify a variables object on the
* scope chain other than the call/global object. This allows embeddings to
* run multiple scripts under the same global, each time using a new
* variables object to collect and discard the script's global variables.
*/ */
Value *slots() const { inline HandleObject scopeChain() const;
return (Value *)(this + 1);
inline ScopeObject &aliasedVarScope(ScopeCoordinate sc) const;
inline GlobalObject &global() const;
inline CallObject &callObj() const;
inline JSObject &varObj();
inline void pushOnScopeChain(ScopeObject &scope);
inline void popOffScopeChain();
/*
* Block chain
*
* Entering/leaving a let (or exception) block may do 1 or 2 things: First,
* a static block object (created at compiled time and stored in the
* script) is pushed on StackFrame::blockChain. Second, if the static block
* may be cloned to hold the dynamic values if this is needed for dynamic
* scope access. A clone is created for a static block iff
* StaticBlockObject::needsClone.
*/
bool hasBlockChain() const {
return (flags_ & HAS_BLOCKCHAIN) && blockChain_;
} }
Value *base() const { StaticBlockObject *maybeBlockChain() {
return slots() + script()->nfixed; return (flags_ & HAS_BLOCKCHAIN) ? blockChain_ : NULL;
} }
Value &varSlot(unsigned i) { StaticBlockObject &blockChain() const {
JS_ASSERT(i < script()->nfixed); JS_ASSERT(hasBlockChain());
JS_ASSERT_IF(maybeFun(), i < script()->bindings.numVars()); return *blockChain_;
return slots()[i];
} }
Value &localSlot(unsigned i) { bool pushBlock(JSContext *cx, StaticBlockObject &block);
/* Let variables can be above script->nfixed. */ void popBlock(JSContext *cx);
JS_ASSERT(i < script()->nslots);
return slots()[i]; /*
} * With
*
* Entering/leaving a with (or E4X filter) block pushes/pops an object
* on the scope chain. Pushing uses pushOnScopeChain, popping should use
* popWith.
*/
void popWith(JSContext *cx);
/* /*
* Script * Script
@ -556,6 +693,17 @@ class StackFrame
* the same VMFrame. Other calls force expansion of the inlined frames. * the same VMFrame. Other calls force expansion of the inlined frames.
*/ */
JSScript *script() const {
JS_ASSERT(isScriptFrame());
return isFunctionFrame()
? isEvalFrame() ? u.evalScript : fun()->script()
: exec.script;
}
JSScript *maybeScript() const {
return isScriptFrame() ? script() : NULL;
}
/* /*
* Get the frame's current bytecode, assuming |this| is in |cx|. next is * Get the frame's current bytecode, assuming |this| is in |cx|. next is
* frame whose prev == this, NULL if not known or if this == cx->fp(). * frame whose prev == this, NULL if not known or if this == cx->fp().
@ -571,15 +719,12 @@ class StackFrame
* *
* Using next can avoid this, but in most cases prefer ScriptFrameIter; * Using next can avoid this, but in most cases prefer ScriptFrameIter;
* it is amortized O(1). * it is amortized O(1).
*
* When I get to the bottom I go back to the top of the stack
* Where I stop and I turn and I go right back
* Till I get to the bottom and I see you again...
*/ */
jsbytecode *pcQuadratic(const ContextStack &stack, StackFrame *next = NULL,
JSInlinedSite **pinlined = NULL);
jsbytecode *prevpc(JSInlinedSite **pinlined) { jsbytecode *pcQuadratic(const ContextStack &stack, StackFrame *next = NULL,
InlinedSite **pinlined = NULL);
jsbytecode *prevpc(InlinedSite **pinlined) {
if (flags_ & HAS_PREVPC) { if (flags_ & HAS_PREVPC) {
if (pinlined) if (pinlined)
*pinlined = prevInline_; *pinlined = prevInline_;
@ -588,45 +733,11 @@ class StackFrame
return prevpcSlow(pinlined); return prevpcSlow(pinlined);
} }
JSInlinedSite *prevInline() { InlinedSite *prevInline() {
JS_ASSERT(flags_ & HAS_PREVPC); JS_ASSERT(flags_ & HAS_PREVPC);
return prevInline_; return prevInline_;
} }
JSScript *script() const {
JS_ASSERT(isScriptFrame());
return isFunctionFrame()
? isEvalFrame() ? u.evalScript : fun()->script()
: exec.script;
}
JSScript *functionScript() const {
JS_ASSERT(isFunctionFrame());
return isEvalFrame() ? u.evalScript : fun()->script();
}
JSScript *globalScript() const {
JS_ASSERT(isGlobalFrame());
return exec.script;
}
JSScript *maybeScript() const {
return isScriptFrame() ? script() : NULL;
}
size_t numFixed() const {
return script()->nfixed;
}
size_t numSlots() const {
return script()->nslots;
}
size_t numGlobalVars() const {
JS_ASSERT(isGlobalFrame());
return exec.script->nfixed;
}
/* /*
* Function * Function
* *
@ -654,94 +765,6 @@ class StackFrame
return fp->script()->function(); return fp->script()->function();
} }
/*
* Arguments
*
* Only non-eval function frames have arguments. A frame follows its
* arguments contiguously in memory. The arguments pushed by the caller are
* the 'actual' arguments. The declared arguments of the callee are the
* 'formal' arguments. When the caller passes less or equal actual
* arguments, the actual and formal arguments are the same array (but with
* different extents). When the caller passes too many arguments, the
* formal subset of the actual arguments is copied onto the top of the
* stack. This allows the engine to maintain a jit-time constant offset of
* arguments from the frame pointer. Since the formal subset of the actual
* arguments is potentially on the stack twice, it is important for all
* reads/writes to refer to the same canonical memory location.
*
* An arguments object (the object returned by the 'arguments' keyword) is
* lazily created, so a given function frame may or may not have one.
*/
/* True if this frame has arguments. Contrast with hasArgsObj. */
bool hasArgs() const {
return isNonEvalFunctionFrame();
}
unsigned numFormalArgs() const {
JS_ASSERT(hasArgs());
return fun()->nargs;
}
Value &formalArg(unsigned i) const {
JS_ASSERT(i < numFormalArgs());
return formalArgs()[i];
}
Value *formalArgs() const {
JS_ASSERT(hasArgs());
return (Value *)this - numFormalArgs();
}
Value *formalArgsEnd() const {
JS_ASSERT(hasArgs());
return (Value *)this;
}
Value *maybeFormalArgs() const {
return (flags_ & (FUNCTION | EVAL)) == FUNCTION
? formalArgs()
: NULL;
}
inline unsigned numActualArgs() const;
inline Value *actualArgs() const;
inline Value *actualArgsEnd() const;
inline Value &canonicalActualArg(unsigned i) const;
template <class Op>
inline bool forEachCanonicalActualArg(Op op, unsigned start = 0, unsigned count = unsigned(-1));
template <class Op> inline bool forEachFormalArg(Op op);
/* XXX: all these argsObj functions will be removed with bug 659577. */
bool hasArgsObj() const {
/*
* HAS_ARGS_OBJ is still technically not equivalent to
* script()->needsArgsObj() during functionPrologue (where GC can
* observe a frame that needsArgsObj but has not yet been given the
* args). This can be fixed by creating and rooting the args/call
* object before pushing the frame, which should be done eventually.
*/
return !!(flags_ & HAS_ARGS_OBJ);
}
ArgumentsObject &argsObj() const {
JS_ASSERT(hasArgsObj());
return *argsObj_;
}
ArgumentsObject *maybeArgsObj() const {
return hasArgsObj() ? &argsObj() : NULL;
}
void initArgsObj(ArgumentsObject &argsObj) {
JS_ASSERT(script()->needsArgsObj());
JS_ASSERT(!hasArgsObj());
argsObj_ = &argsObj;
flags_ |= HAS_ARGS_OBJ;
}
/* /*
* This value * This value
* *
@ -752,31 +775,25 @@ class StackFrame
* frames and directly before the frame for global frames. The *Args * frames and directly before the frame for global frames. The *Args
* members assert !isEvalFrame(), so we implement specialized inline * members assert !isEvalFrame(), so we implement specialized inline
* methods for accessing 'this'. When the caller has static knowledge that * methods for accessing 'this'. When the caller has static knowledge that
* a frame is a function or global frame, 'functionThis' and 'globalThis', * a frame is a function, 'functionThis' allows more efficient access.
* respectively, allow more efficient access.
*/ */
Value &functionThis() const { Value &functionThis() const {
JS_ASSERT(isFunctionFrame()); JS_ASSERT(isFunctionFrame());
if (isEvalFrame()) if (isEvalFrame())
return ((Value *)this)[-1]; return ((Value *)this)[-1];
return formalArgs()[-1]; return formals()[-1];
} }
JSObject &constructorThis() const { JSObject &constructorThis() const {
JS_ASSERT(hasArgs()); JS_ASSERT(hasArgs());
return formalArgs()[-1].toObject(); return formals()[-1].toObject();
}
Value &globalThis() const {
JS_ASSERT(isGlobalFrame());
return ((Value *)this)[-1];
} }
Value &thisValue() const { Value &thisValue() const {
if (flags_ & (EVAL | GLOBAL)) if (flags_ & (EVAL | GLOBAL))
return ((Value *)this)[-1]; return ((Value *)this)[-1];
return formalArgs()[-1]; return formals()[-1];
} }
/* /*
@ -802,7 +819,7 @@ class StackFrame
JS_ASSERT(isScriptFrame()); JS_ASSERT(isScriptFrame());
Value &calleev = flags_ & (EVAL | GLOBAL) Value &calleev = flags_ & (EVAL | GLOBAL)
? ((Value *)this)[-2] ? ((Value *)this)[-2]
: formalArgs()[-2]; : formals()[-2];
JS_ASSERT(calleev.isObjectOrNull()); JS_ASSERT(calleev.isObjectOrNull());
return calleev; return calleev;
} }
@ -811,118 +828,13 @@ class StackFrame
JS_ASSERT(isFunctionFrame()); JS_ASSERT(isFunctionFrame());
if (isEvalFrame()) if (isEvalFrame())
return ((Value *)this)[-2]; return ((Value *)this)[-2];
return formalArgs()[-2]; return formals()[-2];
} }
CallReceiver callReceiver() const { CallReceiver callReceiver() const {
return CallReceiverFromArgv(formalArgs()); return CallReceiverFromArgv(formals());
} }
/*
* Scope chain
*
* Every frame has a scopeChain which, when traversed via the 'parent' link
* to the root, indicates the current global object. A 'call object' is a
* node on a scope chain representing a function's activation record. A
* call object is used for dynamically-scoped name lookup and lexically-
* scoped upvar access. The call object holds the values of locals and
* arguments when a function returns (and its stack frame is popped). For
* performance reasons, call objects are created lazily for 'lightweight'
* functions, i.e., functions which are not statically known to require a
* call object. Thus, a given function frame may or may not have a call
* object. When a function does have a call object, it is found by walking
* up the scope chain until the first call object. Thus, it is important,
* when setting the scope chain, to indicate whether the new scope chain
* contains a new call object and thus changes the 'hasCallObj' state.
*
* The method JIT requires that HAS_SCOPECHAIN be set for all frames which
* use NAME or related opcodes that can access the scope chain (so it does
* not have to test the bit). To ensure this, we always initialize the
* scope chain when pushing frames in the VM, and only initialize it when
* pushing frames in JIT code when the above situation applies.
*
* NB: 'fp->hasCallObj()' implies that fp->callObj() needs to be 'put' when
* the frame is popped. Since the scope chain of a non-strict eval frame
* contains the call object of the parent (function) frame, it is possible
* to have:
* !fp->hasCall() && fp->scopeChain().isCall()
*/
inline HandleObject scopeChain() const;
inline ScopeObject &aliasedVarScope(ScopeCoordinate sc) const;
inline GlobalObject &global() const;
bool hasCallObj() const {
bool ret = !!(flags_ & HAS_CALL_OBJ);
JS_ASSERT_IF(ret, !isNonStrictEvalFrame());
return ret;
}
inline CallObject &callObj() const;
inline void initScopeChain(CallObject &callobj);
inline void setScopeChain(JSObject &obj);
/*
* Variables object
*
* Given that a (non-dummy) StackFrame corresponds roughly to a ES5
* Execution Context (ES5 10.3), StackFrame::varObj corresponds to the
* VariableEnvironment component of a Exection Context. Intuitively, the
* variables object is where new bindings (variables and functions) are
* stored. One might expect that this is either the callObj or
* scopeChain.globalObj for function or global code, respectively, however
* the JSAPI allows calls of Execute to specify a variables object on the
* scope chain other than the call/global object. This allows embeddings to
* run multiple scripts under the same global, each time using a new
* variables object to collect and discard the script's global variables.
*/
inline JSObject &varObj();
/* Block chain */
bool hasBlockChain() const {
return (flags_ & HAS_BLOCKCHAIN) && blockChain_;
}
StaticBlockObject *maybeBlockChain() {
return (flags_ & HAS_BLOCKCHAIN) ? blockChain_ : NULL;
}
StaticBlockObject &blockChain() const {
JS_ASSERT(hasBlockChain());
return *blockChain_;
}
/* Enter/exit execution of a lexical block. */
bool pushBlock(JSContext *cx, StaticBlockObject &block);
void popBlock(JSContext *cx);
/* Exits (via execution or exception) a with block. */
void popWith(JSContext *cx);
/*
* Prologue for function frames: make a call object for heavyweight
* functions, and maintain type nesting invariants.
*/
inline bool functionPrologue(JSContext *cx);
/*
* Epilogue for function frames: put any args or call object for the frame
* which may still be live, and maintain type nesting invariants. Note:
* this does mark the epilogue as having been completed, since the frame is
* about to be popped. Use updateEpilogueFlags for this.
*/
inline void functionEpilogue(JSContext *cx);
/*
* If callObj() or argsObj() have already been put, update our flags
* accordingly. This call must be followed by a later functionEpilogue.
*/
inline void updateEpilogueFlags();
inline bool maintainNestingState() const;
/* /*
* Frame compartment * Frame compartment
* *
@ -945,11 +857,11 @@ class StackFrame
/* JIT rejoin state */ /* JIT rejoin state */
JSRejoinState rejoin() const { FrameRejoinState rejoin() const {
return rejoin_; return rejoin_;
} }
void setRejoin(JSRejoinState state) { void setRejoin(FrameRejoinState state) {
rejoin_ = state; rejoin_ = state;
} }
@ -1024,35 +936,53 @@ class StackFrame
} }
/* /*
* Generator-specific members * A "generator" frame is a function frame associated with a generator.
* * Since generators are not executed LIFO, the VM copies a single abstract
* A non-eval function frame may optionally be the activation of a * generator frame back and forth between the LIFO VM stack (when the
* generator. For the most part, generator frames act like ordinary frames. * generator is active) and a snapshot stored in JSGenerator (when the
* For exceptions, see js_FloatingFrameIfGenerator. * generator is inactive). A generator frame is comprised of a StackFrame
* structure and the values that make up the arguments, locals, and
* expression stack. The layout in the JSGenerator snapshot matches the
* layout on the stack (see the "VM stack layout" comment above).
*/ */
bool isGeneratorFrame() const { bool isGeneratorFrame() const {
return !!(flags_ & GENERATOR); bool ret = flags_ & GENERATOR;
JS_ASSERT_IF(ret, isNonEvalFunctionFrame());
return ret;
} }
bool isFloatingGenerator() const { void initGeneratorFrame() const {
JS_ASSERT_IF(flags_ & FLOATING_GENERATOR, isGeneratorFrame()); JS_ASSERT(!isGeneratorFrame());
return !!(flags_ & FLOATING_GENERATOR); JS_ASSERT(isNonEvalFunctionFrame());
flags_ |= GENERATOR;
} }
void initFloatingGenerator() { Value *generatorArgsSnapshotBegin() const {
JS_ASSERT(!(flags_ & GENERATOR)); JS_ASSERT(isGeneratorFrame());
flags_ |= (GENERATOR | FLOATING_GENERATOR); return actuals() - 2;
} }
void unsetFloatingGenerator() { Value *generatorArgsSnapshotEnd() const {
flags_ &= ~FLOATING_GENERATOR; JS_ASSERT(isGeneratorFrame());
return (Value *)this;
} }
void setFloatingGenerator() { Value *generatorSlotsSnapshotBegin() const {
flags_ |= FLOATING_GENERATOR; JS_ASSERT(isGeneratorFrame());
return (Value *)(this + 1);
} }
enum TriggerPostBarriers {
DoPostBarrier = true,
NoPostBarrier = false
};
template <class T, class U, TriggerPostBarriers doPostBarrier>
void copyFrameAndValues(JSContext *cx, StackFrame *fp, T *vp,
StackFrame *otherfp, U *othervp, Value *othersp);
JSGenerator *maybeSuspendedGenerator(JSRuntime *rt);
/* /*
* js::Execute pushes both global and function frames (since eval() in a * js::Execute pushes both global and function frames (since eval() in a
* function pushes a frame with isFunctionFrame() && isEvalFrame()). Most * function pushes a frame with isFunctionFrame() && isEvalFrame()). Most
@ -1081,6 +1011,11 @@ class StackFrame
return !!(flags_ & CONSTRUCTING); return !!(flags_ & CONSTRUCTING);
} }
bool hasCallObj() const {
JS_ASSERT(isStrictEvalFrame() || fun()->isHeavyweight());
return flags_ & HAS_CALL_OBJ;
}
/* /*
* The method JIT call/apply optimization can erase Function.{call,apply} * The method JIT call/apply optimization can erase Function.{call,apply}
* invocations from the stack and push the callee frame directly. The base * invocations from the stack and push the callee frame directly. The base
@ -1103,10 +1038,6 @@ class StackFrame
flags_ |= PREV_UP_TO_DATE; flags_ |= PREV_UP_TO_DATE;
} }
bool hasOverflowArgs() const {
return !!(flags_ & OVERFLOW_ARGS);
}
bool isYielding() { bool isYielding() {
return !!(flags_ & YIELDING); return !!(flags_ & YIELDING);
} }
@ -1130,6 +1061,9 @@ class StackFrame
public: public:
/* Public, but only for JIT use: */ /* Public, but only for JIT use: */
inline void resetInlinePrev(StackFrame *prevfp, jsbytecode *prevpc);
inline void initInlineFrame(JSFunction *fun, StackFrame *prevfp, jsbytecode *prevpc);
static size_t offsetOfFlags() { static size_t offsetOfFlags() {
return offsetof(StackFrame, flags_); return offsetof(StackFrame, flags_);
} }
@ -1154,14 +1088,14 @@ class StackFrame
return offsetof(StackFrame, rval_); return offsetof(StackFrame, rval_);
} }
static size_t offsetOfArgsObj() {
return offsetof(StackFrame, argsObj_);
}
static ptrdiff_t offsetOfNcode() { static ptrdiff_t offsetOfNcode() {
return offsetof(StackFrame, ncode_); return offsetof(StackFrame, ncode_);
} }
static ptrdiff_t offsetOfArgsObj() {
return offsetof(StackFrame, argsObj_);
}
static ptrdiff_t offsetOfCallee(JSFunction *fun) { static ptrdiff_t offsetOfCallee(JSFunction *fun) {
JS_ASSERT(fun != NULL); JS_ASSERT(fun != NULL);
return -(fun->nargs + 2) * sizeof(Value); return -(fun->nargs + 2) * sizeof(Value);
@ -1235,11 +1169,11 @@ class FrameRegs
Value *sp; Value *sp;
jsbytecode *pc; jsbytecode *pc;
private: private:
JSInlinedSite *inlined_; InlinedSite *inlined_;
StackFrame *fp_; StackFrame *fp_;
public: public:
StackFrame *fp() const { return fp_; } StackFrame *fp() const { return fp_; }
JSInlinedSite *inlined() const { return inlined_; } InlinedSite *inlined() const { return inlined_; }
/* For jit use (need constant): */ /* For jit use (need constant): */
static const size_t offsetOfFp = 3 * sizeof(void *); static const size_t offsetOfFp = 3 * sizeof(void *);
@ -1250,6 +1184,16 @@ class FrameRegs
} }
void clearInlined() { inlined_ = NULL; } void clearInlined() { inlined_ = NULL; }
unsigned stackDepth() const {
JS_ASSERT(sp >= fp_->base());
return sp - fp_->base();
}
Value *spForStackDepth(unsigned depth) const {
JS_ASSERT(fp_->script()->nfixed + depth <= fp_->script()->nslots);
return fp_->base() + depth;
}
/* For generator: */ /* For generator: */
void rebaseFromTo(const FrameRegs &from, StackFrame &to) { void rebaseFromTo(const FrameRegs &from, StackFrame &to) {
fp_ = &to; fp_ = &to;
@ -1496,6 +1440,10 @@ class StackSpace
StackSegment &findContainingSegment(const StackFrame *target) const; StackSegment &findContainingSegment(const StackFrame *target) const;
bool containsFast(StackFrame *fp) {
return (Value *)fp >= base_ && (Value *)fp <= trustedEnd_;
}
public: public:
StackSpace(); StackSpace();
bool init(); bool init();
@ -1548,7 +1496,7 @@ class StackSpace
/* Called during GC: mark segments, frames, and slots under firstUnused. */ /* Called during GC: mark segments, frames, and slots under firstUnused. */
void mark(JSTracer *trc); void mark(JSTracer *trc);
void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc); void markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
/* Called during GC: sets active flag on compartments with active frames. */ /* Called during GC: sets active flag on compartments with active frames. */
void markActiveCompartments(); void markActiveCompartments();