зеркало из https://github.com/mozilla/gecko-dev.git
Back out ee940e4debd0:7636c7036e2e (bug 659577) for asserting in testBug550743.js
This commit is contained in:
Родитель
d58996023e
Коммит
c5f55bf631
|
@ -805,7 +805,7 @@ static bool
|
|||
EmitObjectOp(JSContext *cx, ObjectBox *objbox, JSOp op, BytecodeEmitter *bce)
|
||||
{
|
||||
JS_ASSERT(JOF_OPTYPE(op) == JOF_OBJECT);
|
||||
return EmitIndex32(cx, op, bce->objectList.add(objbox), bce);
|
||||
return EmitIndex32(cx, op, bce->objectList.index(objbox), bce);
|
||||
}
|
||||
|
||||
static bool
|
||||
|
@ -833,81 +833,43 @@ EmitUnaliasedVarOp(JSContext *cx, JSOp op, uint16_t slot, BytecodeEmitter *bce)
|
|||
}
|
||||
|
||||
static bool
|
||||
EmitAliasedVarOp(JSContext *cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter *bce)
|
||||
EmitAliasedVarOp(JSContext *cx, JSOp op, uint16_t binding, JSAtom *atom, BytecodeEmitter *bce)
|
||||
{
|
||||
JS_ASSERT(JOF_OPTYPE(op) == JOF_SCOPECOORD);
|
||||
|
||||
uint32_t maybeBlockIndex = UINT32_MAX;
|
||||
if (bce->sc->blockChain)
|
||||
maybeBlockIndex = bce->objectList.indexOf(bce->sc->blockChain);
|
||||
/*
|
||||
* XXX This is temporary: bug 659577 will need to compute the number of
|
||||
* cloned block objects to hop over.
|
||||
*/
|
||||
uint16_t hops = 0;
|
||||
|
||||
jsatomid atomIndex;
|
||||
if (!bce->makeAtomIndex(atom, &atomIndex))
|
||||
return false;
|
||||
|
||||
bool decomposed = js_CodeSpec[op].format & JOF_DECOMPOSE;
|
||||
unsigned n = 2 * sizeof(uint16_t) + sizeof(uint32_t) + (decomposed ? 1 : 0);
|
||||
JS_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length);
|
||||
|
||||
ptrdiff_t off = EmitN(cx, bce, op, n);
|
||||
if (off < 0)
|
||||
return false;
|
||||
|
||||
jsbytecode *pc = bce->code(off);
|
||||
SET_UINT16(pc, sc.hops);
|
||||
SET_UINT16(pc, hops);
|
||||
pc += sizeof(uint16_t);
|
||||
SET_UINT16(pc, sc.slot);
|
||||
SET_UINT16(pc, binding);
|
||||
pc += sizeof(uint16_t);
|
||||
SET_UINT32_INDEX(pc, maybeBlockIndex);
|
||||
SET_UINT32_INDEX(pc, atomIndex);
|
||||
return true;
|
||||
}
|
||||
|
||||
static unsigned
|
||||
ClonedBlockDepth(BytecodeEmitter *bce)
|
||||
{
|
||||
unsigned clonedBlockDepth = 0;
|
||||
for (StaticBlockObject *b = bce->sc->blockChain; b; b = b->enclosingBlock()) {
|
||||
if (b->needsClone())
|
||||
++clonedBlockDepth;
|
||||
}
|
||||
|
||||
return clonedBlockDepth;
|
||||
}
|
||||
|
||||
static bool
|
||||
EmitAliasedVarOp(JSContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *bce)
|
||||
{
|
||||
/*
|
||||
* The contents of the dynamic scope chain (fp->scopeChain) exactly reflect
|
||||
* the needsClone-subset of the block chain. Use this to determine the
|
||||
* number of ClonedBlockObjects on fp->scopeChain to skip to find the scope
|
||||
* object containing the var to which pn is bound. ALIASEDVAR ops cannot
|
||||
* reach across with scopes so ClonedBlockObjects is the only NestedScope
|
||||
* on the scope chain.
|
||||
*/
|
||||
ScopeCoordinate sc;
|
||||
if (JOF_OPTYPE(pn->getOp()) == JOF_QARG) {
|
||||
JS_ASSERT(bce->sc->funIsHeavyweight());
|
||||
sc.hops = ClonedBlockDepth(bce);
|
||||
sc.slot = bce->sc->bindings.argToSlot(pn->pn_cookie.slot());
|
||||
} else {
|
||||
JS_ASSERT(JOF_OPTYPE(pn->getOp()) == JOF_LOCAL || pn->isKind(PNK_FUNCTION));
|
||||
unsigned local = pn->pn_cookie.slot();
|
||||
if (local < bce->sc->bindings.numVars()) {
|
||||
JS_ASSERT(bce->sc->funIsHeavyweight());
|
||||
sc.hops = ClonedBlockDepth(bce);
|
||||
sc.slot = bce->sc->bindings.localToSlot(local);
|
||||
} else {
|
||||
unsigned depth = local - bce->sc->bindings.numVars();
|
||||
unsigned hops = 0;
|
||||
StaticBlockObject *b = bce->sc->blockChain;
|
||||
while (!b->containsVarAtDepth(depth)) {
|
||||
if (b->needsClone())
|
||||
hops++;
|
||||
b = b->enclosingBlock();
|
||||
}
|
||||
sc.hops = hops;
|
||||
sc.slot = depth - b->stackDepth();
|
||||
}
|
||||
}
|
||||
|
||||
return EmitAliasedVarOp(cx, op, sc, bce);
|
||||
uint16_t binding = JOF_OPTYPE(pn->getOp()) == JOF_QARG
|
||||
? bce->sc->bindings.argToBinding(pn->pn_cookie.slot())
|
||||
: bce->sc->bindings.localToBinding(pn->pn_cookie.slot());
|
||||
return EmitAliasedVarOp(cx, op, binding, pn->atom(), bce);
|
||||
}
|
||||
|
||||
static bool
|
||||
|
@ -1070,6 +1032,7 @@ EmitEnterBlock(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn, JSOp op)
|
|||
|
||||
/* Beware the empty destructuring dummy. */
|
||||
if (!dn) {
|
||||
JS_ASSERT(i + 1 <= blockObj->slotCount());
|
||||
blockObj->setAliased(i, bce->sc->bindingsAccessedDynamically());
|
||||
continue;
|
||||
}
|
||||
|
@ -2628,13 +2591,12 @@ frontend::EmitFunctionScript(JSContext *cx, BytecodeEmitter *bce, ParseNode *bod
|
|||
if (Emit1(cx, bce, JSOP_ARGUMENTS) < 0)
|
||||
return false;
|
||||
if (bce->sc->bindingsAccessedDynamically()) {
|
||||
ScopeCoordinate sc;
|
||||
sc.hops = 0;
|
||||
sc.slot = bce->sc->bindings.localToSlot(bce->sc->argumentsLocal());
|
||||
if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce))
|
||||
JSAtom *atom = cx->runtime->atomState.argumentsAtom;
|
||||
uint16_t binding = bce->sc->bindings.localToBinding(bce->sc->argumentsLocalSlot());
|
||||
if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, binding, atom, bce))
|
||||
return false;
|
||||
} else {
|
||||
if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, bce->sc->argumentsLocal(), bce))
|
||||
if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, bce->sc->argumentsLocalSlot(), bce))
|
||||
return false;
|
||||
}
|
||||
if (Emit1(cx, bce, JSOP_POP) < 0)
|
||||
|
@ -4870,7 +4832,7 @@ EmitFunc(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
|
|||
}
|
||||
|
||||
/* Make the function object a literal in the outer script's pool. */
|
||||
unsigned index = bce->objectList.add(pn->pn_funbox);
|
||||
unsigned index = bce->objectList.index(pn->pn_funbox);
|
||||
|
||||
/* Emit a bytecode pointing to the closure object in its immediate. */
|
||||
if (pn->getOp() != JSOP_NOP) {
|
||||
|
@ -5803,7 +5765,7 @@ EmitObject(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
|
|||
ObjectBox *objbox = bce->parser->newObjectBox(obj);
|
||||
if (!objbox)
|
||||
return false;
|
||||
unsigned index = bce->objectList.add(objbox);
|
||||
unsigned index = bce->objectList.index(objbox);
|
||||
MOZ_STATIC_ASSERT(JSOP_NEWINIT_LENGTH == JSOP_NEWOBJECT_LENGTH,
|
||||
"newinit and newobject must have equal length to edit in-place");
|
||||
EMIT_UINT32_IN_PLACE(offset, JSOP_NEWOBJECT, uint32_t(index));
|
||||
|
@ -6435,7 +6397,7 @@ frontend::EmitTree(JSContext *cx, BytecodeEmitter *bce, ParseNode *pn)
|
|||
|
||||
case PNK_REGEXP:
|
||||
JS_ASSERT(pn->isOp(JSOP_REGEXP));
|
||||
ok = EmitRegExp(cx, bce->regexpList.add(pn->pn_objbox), bce);
|
||||
ok = EmitRegExp(cx, bce->regexpList.index(pn->pn_objbox), bce);
|
||||
break;
|
||||
|
||||
#if JS_HAS_XML_SUPPORT
|
||||
|
@ -6935,7 +6897,7 @@ frontend::FinishTakingTryNotes(BytecodeEmitter *bce, TryNoteArray *array)
|
|||
* the pre-compilation prototype, a pigeon-hole problem for instanceof tests.
|
||||
*/
|
||||
unsigned
|
||||
CGObjectList::add(ObjectBox *objbox)
|
||||
CGObjectList::index(ObjectBox *objbox)
|
||||
{
|
||||
JS_ASSERT(!objbox->emitLink);
|
||||
objbox->emitLink = lastbox;
|
||||
|
@ -6943,16 +6905,6 @@ CGObjectList::add(ObjectBox *objbox)
|
|||
return length++;
|
||||
}
|
||||
|
||||
unsigned
|
||||
CGObjectList::indexOf(JSObject *obj)
|
||||
{
|
||||
JS_ASSERT(length > 0);
|
||||
unsigned index = length - 1;
|
||||
for (ObjectBox *box = lastbox; box->object != obj; box = box->emitLink)
|
||||
index--;
|
||||
return index;
|
||||
}
|
||||
|
||||
void
|
||||
CGObjectList::finish(ObjectArray *array)
|
||||
{
|
||||
|
|
|
@ -49,8 +49,7 @@ struct CGObjectList {
|
|||
|
||||
CGObjectList() : length(0), lastbox(NULL) {}
|
||||
|
||||
unsigned add(ObjectBox *objbox);
|
||||
unsigned indexOf(JSObject *obj);
|
||||
unsigned index(ObjectBox *objbox);
|
||||
void finish(ObjectArray *array);
|
||||
};
|
||||
|
||||
|
|
|
@ -54,8 +54,7 @@ SharedContext::needStrictChecks() {
|
|||
}
|
||||
|
||||
inline unsigned
|
||||
SharedContext::argumentsLocal() const
|
||||
{
|
||||
SharedContext::argumentsLocalSlot() const {
|
||||
PropertyName *arguments = context->runtime->atomState.argumentsAtom;
|
||||
unsigned slot;
|
||||
DebugOnly<BindingKind> kind = bindings.lookup(context, arguments, &slot);
|
||||
|
|
|
@ -140,7 +140,9 @@ struct SharedContext {
|
|||
StmtInfo *topStmt; /* top of statement info stack */
|
||||
StmtInfo *topScopeStmt; /* top lexical scope statement */
|
||||
Rooted<StaticBlockObject *> blockChain;
|
||||
/* compile time block scope chain */
|
||||
/* compile time block scope chain (NB: one
|
||||
deeper than the topScopeStmt/downScope
|
||||
chain when in head of let block/expr) */
|
||||
|
||||
private:
|
||||
RootedFunction fun_; /* function to store argument and variable
|
||||
|
@ -186,7 +188,7 @@ struct SharedContext {
|
|||
void setFunDefinitelyNeedsArgsObj() { JS_ASSERT(cxFlags.funArgumentsHasLocalBinding);
|
||||
cxFlags.funDefinitelyNeedsArgsObj = true; }
|
||||
|
||||
unsigned argumentsLocal() const;
|
||||
unsigned argumentsLocalSlot() const;
|
||||
|
||||
JSFunction *fun() const {
|
||||
JS_ASSERT(inFunction);
|
||||
|
|
|
@ -338,7 +338,6 @@ class EncapsulatedValue
|
|||
bool isDouble() const { return value.isDouble(); }
|
||||
bool isString() const { return value.isString(); }
|
||||
bool isObject() const { return value.isObject(); }
|
||||
bool isMagic() const { return value.isMagic(); }
|
||||
bool isMagic(JSWhyMagic why) const { return value.isMagic(why); }
|
||||
bool isGCThing() const { return value.isGCThing(); }
|
||||
bool isMarkable() const { return value.isMarkable(); }
|
||||
|
@ -464,14 +463,6 @@ Valueify(const EncapsulatedValue *array)
|
|||
return (const Value *)array;
|
||||
}
|
||||
|
||||
static inline HeapValue *
|
||||
HeapValueify(Value *v)
|
||||
{
|
||||
JS_STATIC_ASSERT(sizeof(HeapValue) == sizeof(Value));
|
||||
JS_STATIC_ASSERT(sizeof(HeapSlot) == sizeof(Value));
|
||||
return (HeapValue *)v;
|
||||
}
|
||||
|
||||
class HeapSlotArray
|
||||
{
|
||||
HeapSlot *array;
|
||||
|
|
|
@ -129,12 +129,6 @@ MarkValue(JSTracer *trc, EncapsulatedValue *v, const char *name);
|
|||
void
|
||||
MarkValueRange(JSTracer *trc, size_t len, EncapsulatedValue *vec, const char *name);
|
||||
|
||||
inline void
|
||||
MarkValueRange(JSTracer *trc, HeapValue *begin, HeapValue *end, const char *name)
|
||||
{
|
||||
return MarkValueRange(trc, end - begin, begin, name);
|
||||
}
|
||||
|
||||
void
|
||||
MarkValueRoot(JSTracer *trc, Value *v, const char *name);
|
||||
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
function f() {
|
||||
let (x, y, z) {
|
||||
eval('x = 1; y = 2; z = 3');
|
||||
for (var i = 0; i < 10000; ++i) {
|
||||
assertEq(x, 1);
|
||||
assertEq(y, 2);
|
||||
assertEq(z, 3);
|
||||
}
|
||||
}
|
||||
}
|
||||
f();
|
|
@ -1,10 +0,0 @@
|
|||
function g(x,y) {
|
||||
return x + y;
|
||||
}
|
||||
|
||||
function f(x) {
|
||||
return g.apply(null, arguments);
|
||||
}
|
||||
|
||||
for (var i = 0; i < 100; ++i)
|
||||
assertEq(f(i, 1), i+1);
|
|
@ -1,16 +0,0 @@
|
|||
var g = newGlobal('new-compartment');
|
||||
var dbg = new Debugger(g);
|
||||
|
||||
|
||||
var hits = 0;
|
||||
dbg.onDebuggerStatement = function(frame) {
|
||||
++hits;
|
||||
frame.older.eval("escaped = function() { return y }");
|
||||
}
|
||||
|
||||
g.escaped = undefined;
|
||||
g.eval("function h() { debugger }");
|
||||
g.eval("(function () { var y = 42; h(); yield })().next();");
|
||||
assertEq(g.eval("escaped()"), 42);
|
||||
gc();
|
||||
assertEq(g.eval("escaped()"), 42);
|
|
@ -1,25 +0,0 @@
|
|||
// Test the corner case of accessing an unaliased variable of a block
|
||||
// while the block is not live.
|
||||
|
||||
var g = newGlobal('new-compartment');
|
||||
g.eval("function h() { debugger }");
|
||||
g.eval("function f() { let (x = 1, y) { (function() { y = 0 })(); h() } }");
|
||||
g.eval("var surprise = null");
|
||||
|
||||
var dbg = new Debugger(g);
|
||||
dbg.onDebuggerStatement = function(hFrame) {
|
||||
var fFrame = hFrame.older;
|
||||
assertEq(fFrame.environment.getVariable('x'), 1);
|
||||
assertEq(fFrame.environment.getVariable('y'), 0);
|
||||
fFrame.eval("surprise = function() { return ++x }");
|
||||
assertEq(g.surprise(), 2);
|
||||
}
|
||||
g.f();
|
||||
assertEq(g.surprise !== null, true);
|
||||
|
||||
// Either succeed or throw an error about 'x' not being live
|
||||
try {
|
||||
assertEq(g.surprise(), 3);
|
||||
} catch (e) {
|
||||
assertEq(e+'', 'Error: x is not live');
|
||||
}
|
|
@ -225,7 +225,7 @@ MSG_DEF(JSMSG_BAD_XML_CHARACTER, 171, 0, JSEXN_SYNTAXERR, "illegal XML char
|
|||
MSG_DEF(JSMSG_BAD_DEFAULT_XML_NAMESPACE,172,0,JSEXN_SYNTAXERR, "invalid default XML namespace")
|
||||
MSG_DEF(JSMSG_BAD_XML_NAME_SYNTAX, 173, 0, JSEXN_SYNTAXERR, "invalid XML name")
|
||||
MSG_DEF(JSMSG_BRACKET_AFTER_ATTR_EXPR,174, 0, JSEXN_SYNTAXERR, "missing ] after attribute expression")
|
||||
MSG_DEF(JSMSG_NESTING_GENERATOR, 175, 0, JSEXN_TYPEERR, "already executing generator")
|
||||
MSG_DEF(JSMSG_NESTING_GENERATOR, 175, 1, JSEXN_TYPEERR, "already executing generator {0}")
|
||||
MSG_DEF(JSMSG_CURLY_IN_XML_EXPR, 176, 0, JSEXN_SYNTAXERR, "missing } in XML expression")
|
||||
MSG_DEF(JSMSG_BAD_XML_NAMESPACE, 177, 1, JSEXN_TYPEERR, "invalid XML namespace {0}")
|
||||
MSG_DEF(JSMSG_BAD_XML_ATTR_NAME, 178, 1, JSEXN_TYPEERR, "invalid XML attribute name {0}")
|
||||
|
|
|
@ -320,11 +320,13 @@ ScriptAnalysis::analyzeBytecode(JSContext *cx)
|
|||
|
||||
case JSOP_GETALIASEDVAR:
|
||||
case JSOP_CALLALIASEDVAR:
|
||||
case JSOP_SETALIASEDVAR: {
|
||||
case JSOP_SETALIASEDVAR:
|
||||
JS_ASSERT(!isInlineable);
|
||||
usesScopeChain_ = true;
|
||||
/* XXX: this can be removed after bug 659577. */
|
||||
if (ScopeCoordinate(pc).binding >= script->nfixed)
|
||||
localsAliasStack_ = true;
|
||||
break;
|
||||
}
|
||||
|
||||
case JSOP_DEFFUN:
|
||||
case JSOP_DEFVAR:
|
||||
|
@ -1916,15 +1918,15 @@ ScriptAnalysis::needsArgsObj(NeedsArgsObjState &state, SSAUseChain *use)
|
|||
if (op == JSOP_POP || op == JSOP_POPN)
|
||||
return false;
|
||||
|
||||
#ifdef JS_METHODJIT
|
||||
/* SplatApplyArgs can read fp->canonicalActualArg(i) directly. */
|
||||
if (state.canOptimizeApply && op == JSOP_FUNAPPLY && GET_ARGC(pc) == 2 && use->u.which == 0) {
|
||||
#ifdef JS_METHODJIT
|
||||
JS_ASSERT(mjit::IsLowerableFunCallOrApply(pc));
|
||||
#endif
|
||||
state.haveOptimizedApply = true;
|
||||
state.canOptimizeApply = false;
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
/* arguments[i] can read fp->canonicalActualArg(i) directly. */
|
||||
if (!state.haveOptimizedApply && op == JSOP_GETELEM && use->u.which == 1) {
|
||||
|
@ -1964,11 +1966,8 @@ ScriptAnalysis::needsArgsObj(JSContext *cx)
|
|||
* soundly perform this analysis in their presence. Also, debuggers may
|
||||
* want to see 'arguments', so assume every arguments object escapes.
|
||||
*/
|
||||
if (script->bindingsAccessedDynamically || script->numClosedArgs() > 0 ||
|
||||
localsAliasStack() || cx->compartment->debugMode())
|
||||
{
|
||||
if (script->bindingsAccessedDynamically || localsAliasStack() || cx->compartment->debugMode())
|
||||
return true;
|
||||
}
|
||||
|
||||
unsigned pcOff = script->argumentsBytecode() - script->code;
|
||||
|
||||
|
|
|
@ -363,14 +363,13 @@ static inline uint32_t GetBytecodeSlot(JSScript *script, jsbytecode *pc)
|
|||
case JSOP_CALLALIASEDVAR:
|
||||
case JSOP_SETALIASEDVAR:
|
||||
{
|
||||
ScopeCoordinate sc(pc);
|
||||
if (StaticBlockObject *block = ScopeCoordinateBlockChain(script, pc))
|
||||
return LocalSlot(script, block->slotToFrameLocal(script, sc.slot));
|
||||
if (script->bindings.slotIsArg(sc.slot))
|
||||
return ArgSlot(script->bindings.slotToArg(sc.slot));
|
||||
return LocalSlot(script, script->bindings.slotToLocal(sc.slot));
|
||||
ScopeCoordinate sc = ScopeCoordinate(pc);
|
||||
return script->bindings.bindingIsArg(sc.binding)
|
||||
? ArgSlot(script->bindings.bindingToArg(sc.binding))
|
||||
: LocalSlot(script, script->bindings.bindingToLocal(sc.binding));
|
||||
}
|
||||
|
||||
|
||||
case JSOP_THIS:
|
||||
return ThisSlot();
|
||||
|
||||
|
|
|
@ -97,7 +97,7 @@ ExhaustiveTest(const char funcode[])
|
|||
for (size_t i = 0; i <= ArgCount; i++) {
|
||||
for (size_t j = 0; j <= ArgCount - i; j++) {
|
||||
ClearElements(elems);
|
||||
CHECK(argsobj.maybeGetElements(i, j, elems));
|
||||
CHECK(argsobj.getElements(i, j, elems));
|
||||
for (size_t k = 0; k < j; k++)
|
||||
CHECK_SAME(elems[k], INT_TO_JSVAL(i + k));
|
||||
for (size_t k = j; k < MAX_ELEMS - 1; k++)
|
||||
|
|
|
@ -399,7 +399,7 @@ GetElement(JSContext *cx, JSObject *obj, IndexType index, JSBool *hole, Value *v
|
|||
return JS_TRUE;
|
||||
}
|
||||
if (obj->isArguments()) {
|
||||
if (obj->asArguments().maybeGetElement(uint32_t(index), vp)) {
|
||||
if (obj->asArguments().getElement(uint32_t(index), vp)) {
|
||||
*hole = JS_FALSE;
|
||||
return true;
|
||||
}
|
||||
|
@ -438,7 +438,7 @@ GetElements(JSContext *cx, HandleObject aobj, uint32_t length, Value *vp)
|
|||
if (aobj->isArguments()) {
|
||||
ArgumentsObject &argsobj = aobj->asArguments();
|
||||
if (!argsobj.hasOverriddenLength()) {
|
||||
if (argsobj.maybeGetElements(0, length, vp))
|
||||
if (argsobj.getElements(0, length, vp))
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
|
|
@ -994,9 +994,9 @@ JSContext::JSContext(JSRuntime *rt)
|
|||
functionCallback(NULL),
|
||||
#endif
|
||||
enumerators(NULL),
|
||||
innermostGenerator_(NULL),
|
||||
#ifdef DEBUG
|
||||
stackIterAssertionEnabled(true),
|
||||
okToAccessUnaliasedBindings(0),
|
||||
#endif
|
||||
activeCompilations(0)
|
||||
{
|
||||
|
@ -1080,23 +1080,25 @@ JSContext::wrapPendingException()
|
|||
setPendingException(v);
|
||||
}
|
||||
|
||||
|
||||
void
|
||||
JSContext::enterGenerator(JSGenerator *gen)
|
||||
JSGenerator *
|
||||
JSContext::generatorFor(StackFrame *fp) const
|
||||
{
|
||||
JS_ASSERT(!gen->prevGenerator);
|
||||
gen->prevGenerator = innermostGenerator_;
|
||||
innermostGenerator_ = gen;
|
||||
}
|
||||
JS_ASSERT(stack.containsSlow(fp));
|
||||
JS_ASSERT(fp->isGeneratorFrame());
|
||||
JS_ASSERT(!fp->isFloatingGenerator());
|
||||
JS_ASSERT(!genStack.empty());
|
||||
|
||||
void
|
||||
JSContext::leaveGenerator(JSGenerator *gen)
|
||||
{
|
||||
JS_ASSERT(innermostGenerator_ == gen);
|
||||
innermostGenerator_ = innermostGenerator_->prevGenerator;
|
||||
gen->prevGenerator = NULL;
|
||||
}
|
||||
if (JS_LIKELY(fp == genStack.back()->liveFrame()))
|
||||
return genStack.back();
|
||||
|
||||
/* General case; should only be needed for debug APIs. */
|
||||
for (size_t i = 0; i < genStack.length(); ++i) {
|
||||
if (genStack[i]->liveFrame() == fp)
|
||||
return genStack[i];
|
||||
}
|
||||
JS_NOT_REACHED("no matching generator");
|
||||
return NULL;
|
||||
}
|
||||
|
||||
bool
|
||||
JSContext::runningWithTrustedPrincipals() const
|
||||
|
|
|
@ -1242,12 +1242,29 @@ struct JSContext : js::ContextFriendFields
|
|||
JSObject *enumerators;
|
||||
|
||||
private:
|
||||
/* Innermost-executing generator or null if no generator are executing. */
|
||||
JSGenerator *innermostGenerator_;
|
||||
/*
|
||||
* To go from a live generator frame (on the stack) to its generator object
|
||||
* (see comment js_FloatingFrameIfGenerator), we maintain a stack of active
|
||||
* generators, pushing and popping when entering and leaving generator
|
||||
* frames, respectively.
|
||||
*/
|
||||
js::Vector<JSGenerator *, 2, js::SystemAllocPolicy> genStack;
|
||||
|
||||
public:
|
||||
JSGenerator *innermostGenerator() const { return innermostGenerator_; }
|
||||
void enterGenerator(JSGenerator *gen);
|
||||
void leaveGenerator(JSGenerator *gen);
|
||||
/* Return the generator object for the given generator frame. */
|
||||
JSGenerator *generatorFor(js::StackFrame *fp) const;
|
||||
|
||||
/* Early OOM-check. */
|
||||
inline bool ensureGeneratorStackSpace();
|
||||
|
||||
bool enterGenerator(JSGenerator *gen) {
|
||||
return genStack.append(gen);
|
||||
}
|
||||
|
||||
void leaveGenerator(JSGenerator *gen) {
|
||||
JS_ASSERT(genStack.back() == gen);
|
||||
genStack.popBack();
|
||||
}
|
||||
|
||||
inline void* malloc_(size_t bytes) {
|
||||
return runtime->malloc_(bytes, this);
|
||||
|
@ -1279,6 +1296,9 @@ struct JSContext : js::ContextFriendFields
|
|||
|
||||
void purge();
|
||||
|
||||
/* For DEBUG. */
|
||||
inline void assertValidStackDepth(unsigned depth);
|
||||
|
||||
bool isExceptionPending() {
|
||||
return throwing;
|
||||
}
|
||||
|
@ -1301,6 +1321,12 @@ struct JSContext : js::ContextFriendFields
|
|||
* stack iteration; defaults to true.
|
||||
*/
|
||||
bool stackIterAssertionEnabled;
|
||||
|
||||
/*
|
||||
* When greather than zero, it is ok to accessed non-aliased fields of
|
||||
* ScopeObjects because the accesses are coming from the DebugScopeProxy.
|
||||
*/
|
||||
unsigned okToAccessUnaliasedBindings;
|
||||
#endif
|
||||
|
||||
/*
|
||||
|
@ -1337,6 +1363,23 @@ struct JSContext : js::ContextFriendFields
|
|||
|
||||
namespace js {
|
||||
|
||||
class AutoAllowUnaliasedVarAccess
|
||||
{
|
||||
JSContext *cx;
|
||||
public:
|
||||
AutoAllowUnaliasedVarAccess(JSContext *cx) : cx(cx) {
|
||||
#ifdef DEBUG
|
||||
cx->okToAccessUnaliasedBindings++;
|
||||
#endif
|
||||
}
|
||||
~AutoAllowUnaliasedVarAccess() {
|
||||
#ifdef DEBUG
|
||||
JS_ASSERT(cx->okToAccessUnaliasedBindings);
|
||||
cx->okToAccessUnaliasedBindings--;
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
struct AutoResolving {
|
||||
public:
|
||||
enum Kind {
|
||||
|
|
|
@ -541,6 +541,14 @@ JSContext::setCompileOptions(unsigned newcopts)
|
|||
maybeOverrideVersion(newVersion);
|
||||
}
|
||||
|
||||
inline void
|
||||
JSContext::assertValidStackDepth(unsigned depth)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
JS_ASSERT(0 <= regs().sp - fp()->base());
|
||||
JS_ASSERT(depth <= uintptr_t(regs().sp - fp()->base()));
|
||||
#endif
|
||||
}
|
||||
|
||||
inline js::LifoAlloc &
|
||||
JSContext::typeLifoAlloc()
|
||||
|
@ -548,6 +556,15 @@ JSContext::typeLifoAlloc()
|
|||
return compartment->typeLifoAlloc;
|
||||
}
|
||||
|
||||
inline bool
|
||||
JSContext::ensureGeneratorStackSpace()
|
||||
{
|
||||
bool ok = genStack.reserve(genStack.length() + 1);
|
||||
if (!ok)
|
||||
js_ReportOutOfMemory(this);
|
||||
return ok;
|
||||
}
|
||||
|
||||
inline void
|
||||
JSContext::setPendingException(js::Value v) {
|
||||
JS_ASSERT(!IsPoisonedValue(v));
|
||||
|
|
|
@ -565,8 +565,11 @@ JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fpArg)
|
|||
*/
|
||||
while (o) {
|
||||
ScopeObject &scope = o->asDebugScope().scope();
|
||||
if (scope.isCall())
|
||||
if (scope.isCall()) {
|
||||
JS_ASSERT_IF(cx->compartment->debugMode() && fp->isNonEvalFunctionFrame(),
|
||||
fp == scope.asCall().maybeStackFrame());
|
||||
return o;
|
||||
}
|
||||
o = o->enclosingScope();
|
||||
}
|
||||
return NULL;
|
||||
|
@ -802,10 +805,10 @@ GetPropertyDesc(JSContext *cx, JSObject *obj_, Shape *shape, JSPropertyDesc *pd)
|
|||
| (!shape->writable() ? JSPD_READONLY : 0)
|
||||
| (!shape->configurable() ? JSPD_PERMANENT : 0);
|
||||
pd->spare = 0;
|
||||
if (shape->setter() == CallObject::setArgOp) {
|
||||
if (shape->getter() == CallObject::getArgOp) {
|
||||
pd->slot = shape->shortid();
|
||||
pd->flags |= JSPD_ARGUMENT;
|
||||
} else if (shape->setter() == CallObject::setVarOp) {
|
||||
} else if (shape->getter() == CallObject::getVarOp) {
|
||||
pd->slot = shape->shortid();
|
||||
pd->flags |= JSPD_VARIABLE;
|
||||
} else {
|
||||
|
|
|
@ -127,7 +127,7 @@ fun_getProperty(JSContext *cx, HandleObject obj_, HandleId id, Value *vp)
|
|||
* innermost function as uninlineable to expand its frame and allow us
|
||||
* to recover its callee object.
|
||||
*/
|
||||
InlinedSite *inlined;
|
||||
JSInlinedSite *inlined;
|
||||
jsbytecode *prevpc = fp->prev()->pcQuadratic(cx->stack, fp, &inlined);
|
||||
if (inlined) {
|
||||
mjit::JITChunk *chunk = fp->prev()->jit()->chunk(prevpc);
|
||||
|
@ -696,7 +696,7 @@ js_fun_apply(JSContext *cx, unsigned argc, Value *vp)
|
|||
args.thisv() = vp[2];
|
||||
|
||||
/* Steps 7-8. */
|
||||
cx->fp()->forEachUnaliasedActual(CopyTo(args.array()));
|
||||
cx->fp()->forEachCanonicalActualArg(CopyTo(args.array()));
|
||||
} else {
|
||||
/* Step 3. */
|
||||
if (!vp[3].isObject()) {
|
||||
|
|
|
@ -244,6 +244,9 @@ js_ValueToCallableObject(JSContext *cx, js::Value *vp, unsigned flags);
|
|||
extern void
|
||||
js_ReportIsNotFunction(JSContext *cx, const js::Value *vp, unsigned flags);
|
||||
|
||||
extern void
|
||||
js_PutCallObject(js::StackFrame *fp, js::CallObject &callobj);
|
||||
|
||||
namespace js {
|
||||
|
||||
/*
|
||||
|
@ -275,6 +278,9 @@ JSFunction::toExtended() const
|
|||
return static_cast<const js::FunctionExtended *>(this);
|
||||
}
|
||||
|
||||
extern void
|
||||
js_PutArgsObject(js::StackFrame *fp);
|
||||
|
||||
inline bool
|
||||
js_IsNamedLambda(JSFunction *fun) { return (fun->flags & JSFUN_LAMBDA) && fun->atom; }
|
||||
|
||||
|
|
|
@ -3241,7 +3241,7 @@ SweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool *startBackgroundSweep)
|
|||
|
||||
/* Finalize unreachable (key,value) pairs in all weak maps. */
|
||||
WeakMapBase::sweepAll(&rt->gcMarker);
|
||||
rt->debugScopes->sweep(rt);
|
||||
rt->debugScopes->sweep();
|
||||
|
||||
SweepAtomState(rt);
|
||||
|
||||
|
|
|
@ -5192,8 +5192,8 @@ NestingPrologue(JSContext *cx, StackFrame *fp)
|
|||
}
|
||||
|
||||
nesting->activeCall = &fp->callObj();
|
||||
nesting->argArray = Valueify(nesting->activeCall->argArray());
|
||||
nesting->varArray = Valueify(nesting->activeCall->varArray());
|
||||
nesting->argArray = fp->formalArgs();
|
||||
nesting->varArray = fp->slots();
|
||||
}
|
||||
|
||||
/* Maintain stack frame count for the function. */
|
||||
|
|
|
@ -23,9 +23,6 @@ struct TypeInferenceSizes;
|
|||
}
|
||||
|
||||
namespace js {
|
||||
|
||||
class CallObject;
|
||||
|
||||
namespace types {
|
||||
|
||||
/* Type set entry for either a JSObject with singleton type or a non-singleton TypeObject. */
|
||||
|
@ -964,7 +961,7 @@ struct TypeScriptNesting
|
|||
JSScript *next;
|
||||
|
||||
/* If this is an outer function, the most recent activation. */
|
||||
CallObject *activeCall;
|
||||
JSObject *activeCall;
|
||||
|
||||
/*
|
||||
* If this is an outer function, pointers to the most recent activation's
|
||||
|
|
|
@ -223,7 +223,6 @@ js::RunScript(JSContext *cx, JSScript *script, StackFrame *fp)
|
|||
JS_ASSERT(fp == cx->fp());
|
||||
JS_ASSERT(fp->script() == script);
|
||||
JS_ASSERT_IF(!fp->isGeneratorFrame(), cx->regs().pc == script->code);
|
||||
JS_ASSERT_IF(fp->isEvalFrame(), script->isActiveEval);
|
||||
#ifdef JS_METHODJIT_SPEW
|
||||
JMCheckLogging();
|
||||
#endif
|
||||
|
@ -318,11 +317,16 @@ js::InvokeKernel(JSContext *cx, CallArgs args, MaybeConstruct construct)
|
|||
if (!cx->stack.pushInvokeFrame(cx, args, initial, &ifg))
|
||||
return false;
|
||||
|
||||
/* Now that the new frame is rooted, maybe create a call object. */
|
||||
StackFrame *fp = ifg.fp();
|
||||
if (!fp->functionPrologue(cx))
|
||||
return false;
|
||||
|
||||
/* Run function until JSOP_STOP, JSOP_RETURN or error. */
|
||||
JSBool ok = RunScript(cx, fun->script(), ifg.fp());
|
||||
JSBool ok = RunScript(cx, fun->script(), fp);
|
||||
|
||||
/* Propagate the return value out. */
|
||||
args.rval() = ifg.fp()->returnValue();
|
||||
args.rval() = fp->returnValue();
|
||||
JS_ASSERT_IF(ok && construct, !args.rval().isPrimitive());
|
||||
return ok;
|
||||
}
|
||||
|
@ -449,15 +453,26 @@ js::ExecuteKernel(JSContext *cx, JSScript *script_, JSObject &scopeChain, const
|
|||
|
||||
if (!script->ensureRanAnalysis(cx, &scopeChain))
|
||||
return false;
|
||||
TypeScript::SetThis(cx, script, efg.fp()->thisValue());
|
||||
|
||||
/* Give strict mode eval its own fresh lexical environment. */
|
||||
StackFrame *fp = efg.fp();
|
||||
if (fp->isStrictEvalFrame() && !CallObject::createForStrictEval(cx, fp))
|
||||
return false;
|
||||
|
||||
Probes::startExecution(cx, script);
|
||||
bool ok = RunScript(cx, script, efg.fp());
|
||||
|
||||
TypeScript::SetThis(cx, script, fp->thisValue());
|
||||
|
||||
bool ok = RunScript(cx, script, fp);
|
||||
|
||||
if (fp->isStrictEvalFrame())
|
||||
js_PutCallObject(fp, fp->callObj());
|
||||
|
||||
Probes::stopExecution(cx, script);
|
||||
|
||||
/* Propgate the return value out. */
|
||||
if (result)
|
||||
*result = efg.fp()->returnValue();
|
||||
*result = fp->returnValue();
|
||||
return ok;
|
||||
}
|
||||
|
||||
|
@ -680,7 +695,7 @@ EnterWith(JSContext *cx, int stackIndex)
|
|||
StackFrame *fp = cx->fp();
|
||||
Value *sp = cx->regs().sp;
|
||||
JS_ASSERT(stackIndex < 0);
|
||||
JS_ASSERT(int(cx->regs().stackDepth()) + stackIndex >= 0);
|
||||
JS_ASSERT(fp->base() <= sp + stackIndex);
|
||||
|
||||
RootedObject obj(cx);
|
||||
if (sp[-1].isObject()) {
|
||||
|
@ -688,17 +703,17 @@ EnterWith(JSContext *cx, int stackIndex)
|
|||
} else {
|
||||
obj = js_ValueToNonNullObject(cx, sp[-1]);
|
||||
if (!obj)
|
||||
return false;
|
||||
return JS_FALSE;
|
||||
sp[-1].setObject(*obj);
|
||||
}
|
||||
|
||||
WithObject *withobj = WithObject::create(cx, obj, fp->scopeChain(),
|
||||
cx->regs().stackDepth() + stackIndex);
|
||||
JSObject *withobj = WithObject::create(cx, obj, fp->scopeChain(),
|
||||
sp + stackIndex - fp->base());
|
||||
if (!withobj)
|
||||
return false;
|
||||
return JS_FALSE;
|
||||
|
||||
fp->pushOnScopeChain(*withobj);
|
||||
return true;
|
||||
fp->setScopeChain(*withobj);
|
||||
return JS_TRUE;
|
||||
}
|
||||
|
||||
/* Unwind block and scope chains to match the given depth. */
|
||||
|
@ -706,7 +721,7 @@ void
|
|||
js::UnwindScope(JSContext *cx, uint32_t stackDepth)
|
||||
{
|
||||
StackFrame *fp = cx->fp();
|
||||
JS_ASSERT(stackDepth <= cx->regs().stackDepth());
|
||||
JS_ASSERT(fp->base() + stackDepth <= cx->regs().sp);
|
||||
|
||||
for (ScopeIter si(fp); !si.done(); si = si.enclosing()) {
|
||||
switch (si.type()) {
|
||||
|
@ -735,7 +750,7 @@ js::UnwindForUncatchableException(JSContext *cx, const FrameRegs ®s)
|
|||
for (TryNoteIter tni(regs); !tni.done(); ++tni) {
|
||||
JSTryNote *tn = *tni;
|
||||
if (tn->kind == JSTRY_ITER) {
|
||||
Value *sp = regs.spForStackDepth(tn->stackDepth);
|
||||
Value *sp = regs.fp()->base() + tn->stackDepth;
|
||||
UnwindIteratorForUncatchableException(cx, &sp[-1].toObject());
|
||||
}
|
||||
}
|
||||
|
@ -795,7 +810,7 @@ TryNoteIter::settle()
|
|||
* depth exceeding the current one and this condition is what we use to
|
||||
* filter them out.
|
||||
*/
|
||||
if (tn->stackDepth <= regs.stackDepth())
|
||||
if (tn->stackDepth <= regs.sp - regs.fp()->base())
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
@ -832,6 +847,56 @@ DoIncDec(JSContext *cx, JSScript *script, jsbytecode *pc, const Value &v, Value
|
|||
return true;
|
||||
}
|
||||
|
||||
static inline void
|
||||
CheckLocalAccess(StackFrame *fp, unsigned index, bool aliased = false)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
if (index < fp->numFixed()) {
|
||||
JS_ASSERT(fp->script()->varIsAliased(index) == aliased);
|
||||
} else {
|
||||
unsigned depth = index - fp->numFixed();
|
||||
for (StaticBlockObject *b = fp->maybeBlockChain(); b; b = b->enclosingBlock()) {
|
||||
if (b->containsVarAtDepth(depth)) {
|
||||
JS_ASSERT(b->isAliased(depth - b->stackDepth()) == aliased);
|
||||
return;
|
||||
}
|
||||
}
|
||||
/*
|
||||
* Unfortunately, strange uses of JSOP_GETLOCAL (e.g., comprehensions
|
||||
* and group assignment) access slots above script->nfixed and not in
|
||||
* any block so we cannot use JS_NOT_REACHED here.
|
||||
*/
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
static inline void
|
||||
CheckArgAccess(StackFrame *fp, unsigned index)
|
||||
{
|
||||
JS_ASSERT(fp->script()->formalLivesInArgumentsObject(index) ==
|
||||
fp->script()->argsObjAliasesFormals());
|
||||
}
|
||||
|
||||
/*
|
||||
* This function is temporary. Bug 659577 will change all ALIASEDVAR
|
||||
* access to use the scope chain instead.
|
||||
*/
|
||||
static inline Value &
|
||||
AliasedVar(StackFrame *fp, ScopeCoordinate sc)
|
||||
{
|
||||
JSScript *script = fp->script();
|
||||
#ifdef DEBUG
|
||||
JS_ASSERT(sc.hops == 0); /* Temporary */
|
||||
if (script->bindings.bindingIsArg(sc.binding))
|
||||
JS_ASSERT(script->formalLivesInCallObject(script->bindings.bindingToArg(sc.binding)));
|
||||
else
|
||||
CheckLocalAccess(fp, script->bindings.bindingToLocal(sc.binding), true);
|
||||
#endif
|
||||
return script->bindings.bindingIsArg(sc.binding)
|
||||
? fp->formalArg(script->bindings.bindingToArg(sc.binding))
|
||||
: fp->localSlot(script->bindings.bindingToLocal(sc.binding));
|
||||
}
|
||||
|
||||
#define PUSH_COPY(v) do { *regs.sp++ = v; assertSameCompartment(cx, regs.sp[-1]); } while (0)
|
||||
#define PUSH_COPY_SKIP_CHECK(v) *regs.sp++ = v
|
||||
#define PUSH_NULL() regs.sp++->setNull()
|
||||
|
@ -1110,8 +1175,6 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
|
|||
# define END_CASE_LEN8 len = 8; goto advance_pc;
|
||||
# define END_CASE_LEN9 len = 9; goto advance_pc;
|
||||
# define END_CASE_LEN10 len = 10; goto advance_pc;
|
||||
# define END_CASE_LEN11 len = 11; goto advance_pc;
|
||||
# define END_CASE_LEN12 len = 12; goto advance_pc;
|
||||
# define END_VARLEN_CASE goto advance_pc;
|
||||
# define ADD_EMPTY_CASE(OP) BEGIN_CASE(OP)
|
||||
# define END_EMPTY_CASES goto advance_pc_by_one;
|
||||
|
@ -1174,6 +1237,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
|
|||
#define RESTORE_INTERP_VARS() \
|
||||
JS_BEGIN_MACRO \
|
||||
SET_SCRIPT(regs.fp()->script()); \
|
||||
argv = regs.fp()->maybeFormalArgs(); \
|
||||
atoms = FrameAtomBase(cx, regs.fp()); \
|
||||
JS_ASSERT(&cx->regs() == ®s); \
|
||||
JS_END_MACRO
|
||||
|
@ -1236,6 +1300,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
|
|||
JSRuntime *const rt = cx->runtime;
|
||||
Rooted<JSScript*> script(cx);
|
||||
SET_SCRIPT(regs.fp()->script());
|
||||
Value *argv = regs.fp()->maybeFormalArgs();
|
||||
CHECK_INTERRUPT_HANDLER();
|
||||
|
||||
/*
|
||||
|
@ -1270,8 +1335,8 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
|
|||
|
||||
#if JS_HAS_GENERATORS
|
||||
if (JS_UNLIKELY(regs.fp()->isGeneratorFrame())) {
|
||||
JS_ASSERT(size_t(regs.pc - script->code) <= script->length);
|
||||
JS_ASSERT(regs.stackDepth() <= script->nslots);
|
||||
JS_ASSERT((size_t) (regs.pc - script->code) <= script->length);
|
||||
JS_ASSERT((size_t) (regs.sp - regs.fp()->base()) <= StackDepth(script));
|
||||
|
||||
/*
|
||||
* To support generator_throw and to catch ignored exceptions,
|
||||
|
@ -1289,7 +1354,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
|
|||
if (interpMode == JSINTERP_NORMAL) {
|
||||
StackFrame *fp = regs.fp();
|
||||
JS_ASSERT_IF(!fp->isGeneratorFrame(), regs.pc == script->code);
|
||||
if (!fp->isGeneratorFrame() && !fp->prologue(cx, UseNewTypeAtEntry(cx, fp)))
|
||||
if (!ScriptPrologueOrGeneratorResume(cx, fp, UseNewTypeAtEntry(cx, fp)))
|
||||
goto error;
|
||||
if (cx->compartment->debugMode()) {
|
||||
JSTrapStatus status = ScriptDebugPrologue(cx, fp);
|
||||
|
@ -1524,12 +1589,25 @@ BEGIN_CASE(JSOP_POP)
|
|||
END_CASE(JSOP_POP)
|
||||
|
||||
BEGIN_CASE(JSOP_POPN)
|
||||
JS_ASSERT(GET_UINT16(regs.pc) <= regs.stackDepth());
|
||||
{
|
||||
regs.sp -= GET_UINT16(regs.pc);
|
||||
#ifdef DEBUG
|
||||
if (StaticBlockObject *block = regs.fp()->maybeBlockChain())
|
||||
JS_ASSERT(regs.stackDepth() >= block->stackDepth() + block->slotCount());
|
||||
JS_ASSERT(regs.fp()->base() <= regs.sp);
|
||||
StaticBlockObject *block = regs.fp()->maybeBlockChain();
|
||||
JS_ASSERT_IF(block,
|
||||
block->stackDepth() + block->slotCount()
|
||||
<= (size_t) (regs.sp - regs.fp()->base()));
|
||||
for (JSObject *obj = regs.fp()->scopeChain(); obj; obj = obj->enclosingScope()) {
|
||||
if (!obj->isBlock() || !obj->isWith())
|
||||
continue;
|
||||
if (obj->getPrivate() != js_FloatingFrameIfGenerator(cx, regs.fp()))
|
||||
break;
|
||||
JS_ASSERT(regs.fp()->base() + obj->asBlock().stackDepth()
|
||||
+ (obj->isBlock() ? obj->asBlock().slotCount() : 1)
|
||||
<= regs.sp);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
END_CASE(JSOP_POPN)
|
||||
|
||||
BEGIN_CASE(JSOP_SETRVAL)
|
||||
|
@ -1576,12 +1654,14 @@ BEGIN_CASE(JSOP_STOP)
|
|||
if (entryFrame != regs.fp())
|
||||
inline_return:
|
||||
{
|
||||
AssertValidFunctionScopeChainAtExit(regs.fp());
|
||||
|
||||
if (cx->compartment->debugMode())
|
||||
interpReturnOK = ScriptDebugEpilogue(cx, regs.fp(), interpReturnOK);
|
||||
|
||||
regs.fp()->epilogue(cx);
|
||||
interpReturnOK = ScriptEpilogue(cx, regs.fp(), interpReturnOK);
|
||||
|
||||
/* The JIT inlines the epilogue. */
|
||||
/* The JIT inlines ScriptEpilogue. */
|
||||
#ifdef JS_METHODJIT
|
||||
jit_return:
|
||||
#endif
|
||||
|
@ -1614,7 +1694,7 @@ BEGIN_CASE(JSOP_STOP)
|
|||
regs.pc += JSOP_CALL_LENGTH;
|
||||
goto error;
|
||||
} else {
|
||||
JS_ASSERT(regs.stackDepth() == 0);
|
||||
JS_ASSERT(regs.sp == regs.fp()->base());
|
||||
}
|
||||
interpReturnOK = true;
|
||||
goto exit;
|
||||
|
@ -1731,7 +1811,7 @@ END_CASE(JSOP_IN)
|
|||
|
||||
BEGIN_CASE(JSOP_ITER)
|
||||
{
|
||||
JS_ASSERT(regs.stackDepth() >= 1);
|
||||
JS_ASSERT(regs.sp > regs.fp()->base());
|
||||
uint8_t flags = GET_UINT8(regs.pc);
|
||||
if (!ValueToIterator(cx, flags, ®s.sp[-1]))
|
||||
goto error;
|
||||
|
@ -1742,7 +1822,7 @@ END_CASE(JSOP_ITER)
|
|||
|
||||
BEGIN_CASE(JSOP_MOREITER)
|
||||
{
|
||||
JS_ASSERT(regs.stackDepth() >= 1);
|
||||
JS_ASSERT(regs.sp - 1 >= regs.fp()->base());
|
||||
JS_ASSERT(regs.sp[-1].isObject());
|
||||
PUSH_NULL();
|
||||
bool cond;
|
||||
|
@ -1755,8 +1835,8 @@ END_CASE(JSOP_MOREITER)
|
|||
|
||||
BEGIN_CASE(JSOP_ITERNEXT)
|
||||
{
|
||||
JS_ASSERT(regs.stackDepth() >= GET_INT8(regs.pc));
|
||||
Value *itervp = regs.sp - GET_INT8(regs.pc);
|
||||
JS_ASSERT(itervp >= regs.fp()->base());
|
||||
JS_ASSERT(itervp->isObject());
|
||||
PUSH_NULL();
|
||||
if (!IteratorNext(cx, &itervp->toObject(), ®s.sp[-1]))
|
||||
|
@ -1766,7 +1846,7 @@ END_CASE(JSOP_ITERNEXT)
|
|||
|
||||
BEGIN_CASE(JSOP_ENDITER)
|
||||
{
|
||||
JS_ASSERT(regs.stackDepth() >= 1);
|
||||
JS_ASSERT(regs.sp - 1 >= regs.fp()->base());
|
||||
bool ok = CloseIterator(cx, ®s.sp[-1].toObject());
|
||||
regs.sp--;
|
||||
if (!ok)
|
||||
|
@ -1776,7 +1856,7 @@ END_CASE(JSOP_ENDITER)
|
|||
|
||||
BEGIN_CASE(JSOP_DUP)
|
||||
{
|
||||
JS_ASSERT(regs.stackDepth() >= 1);
|
||||
JS_ASSERT(regs.sp > regs.fp()->base());
|
||||
const Value &rref = regs.sp[-1];
|
||||
PUSH_COPY(rref);
|
||||
}
|
||||
|
@ -1784,7 +1864,7 @@ END_CASE(JSOP_DUP)
|
|||
|
||||
BEGIN_CASE(JSOP_DUP2)
|
||||
{
|
||||
JS_ASSERT(regs.stackDepth() >= 2);
|
||||
JS_ASSERT(regs.sp - 2 >= regs.fp()->base());
|
||||
const Value &lref = regs.sp[-2];
|
||||
const Value &rref = regs.sp[-1];
|
||||
PUSH_COPY(lref);
|
||||
|
@ -1794,7 +1874,7 @@ END_CASE(JSOP_DUP2)
|
|||
|
||||
BEGIN_CASE(JSOP_SWAP)
|
||||
{
|
||||
JS_ASSERT(regs.stackDepth() >= 2);
|
||||
JS_ASSERT(regs.sp - 2 >= regs.fp()->base());
|
||||
Value &lref = regs.sp[-2];
|
||||
Value &rref = regs.sp[-1];
|
||||
lref.swap(rref);
|
||||
|
@ -1804,7 +1884,7 @@ END_CASE(JSOP_SWAP)
|
|||
BEGIN_CASE(JSOP_PICK)
|
||||
{
|
||||
unsigned i = GET_UINT8(regs.pc);
|
||||
JS_ASSERT(regs.stackDepth() >= i + 1);
|
||||
JS_ASSERT(regs.sp - (i + 1) >= regs.fp()->base());
|
||||
Value lval = regs.sp[-int(i + 1)];
|
||||
memmove(regs.sp - (i + 1), regs.sp - i, sizeof(Value) * i);
|
||||
regs.sp[-1] = lval;
|
||||
|
@ -2290,17 +2370,10 @@ BEGIN_CASE(JSOP_INCARG)
|
|||
BEGIN_CASE(JSOP_ARGINC)
|
||||
{
|
||||
unsigned i = GET_ARGNO(regs.pc);
|
||||
if (script->argsObjAliasesFormals()) {
|
||||
const Value &arg = regs.fp()->argsObj().arg(i);
|
||||
Value v;
|
||||
if (!DoIncDec(cx, script, regs.pc, arg, &v, ®s.sp[0]))
|
||||
goto error;
|
||||
regs.fp()->argsObj().setArg(i, v);
|
||||
} else {
|
||||
Value &arg = regs.fp()->unaliasedFormal(i);
|
||||
if (!DoIncDec(cx, script, regs.pc, arg, &arg, ®s.sp[0]))
|
||||
goto error;
|
||||
}
|
||||
CheckArgAccess(regs.fp(), i);
|
||||
Value &arg = regs.fp()->formalArg(i);
|
||||
if (!DoIncDec(cx, script, regs.pc, arg, &arg, ®s.sp[0]))
|
||||
goto error;
|
||||
regs.sp++;
|
||||
}
|
||||
END_CASE(JSOP_ARGINC);
|
||||
|
@ -2311,7 +2384,8 @@ BEGIN_CASE(JSOP_INCLOCAL)
|
|||
BEGIN_CASE(JSOP_LOCALINC)
|
||||
{
|
||||
unsigned i = GET_SLOTNO(regs.pc);
|
||||
Value &local = regs.fp()->unaliasedLocal(i);
|
||||
CheckLocalAccess(regs.fp(), i);
|
||||
Value &local = regs.fp()->localSlot(i);
|
||||
if (!DoIncDec(cx, script, regs.pc, local, &local, ®s.sp[0]))
|
||||
goto error;
|
||||
regs.sp++;
|
||||
|
@ -2422,12 +2496,13 @@ BEGIN_CASE(JSOP_NEW)
|
|||
BEGIN_CASE(JSOP_CALL)
|
||||
BEGIN_CASE(JSOP_FUNCALL)
|
||||
{
|
||||
JS_ASSERT(regs.stackDepth() >= 2 + GET_ARGC(regs.pc));
|
||||
CallArgs args = CallArgsFromSp(GET_ARGC(regs.pc), regs.sp);
|
||||
JS_ASSERT(args.base() >= regs.fp()->base());
|
||||
|
||||
bool construct = (*regs.pc == JSOP_NEW);
|
||||
|
||||
RootedFunction &fun = rootFunction0;
|
||||
|
||||
/* Don't bother trying to fast-path calls to scripted non-constructors. */
|
||||
if (!IsFunctionObject(args.calleev(), fun.address()) || !fun->isInterpretedConstructor()) {
|
||||
if (construct) {
|
||||
|
@ -2461,6 +2536,10 @@ BEGIN_CASE(JSOP_FUNCALL)
|
|||
goto error;
|
||||
|
||||
RESTORE_INTERP_VARS();
|
||||
|
||||
if (!regs.fp()->functionPrologue(cx))
|
||||
goto error;
|
||||
|
||||
RESET_USE_METHODJIT();
|
||||
|
||||
bool newType = cx->typeInferenceEnabled() && UseNewType(cx, script, regs.pc);
|
||||
|
@ -2483,7 +2562,7 @@ BEGIN_CASE(JSOP_FUNCALL)
|
|||
}
|
||||
#endif
|
||||
|
||||
if (!regs.fp()->prologue(cx, newType))
|
||||
if (!ScriptPrologue(cx, regs.fp(), newType))
|
||||
goto error;
|
||||
|
||||
if (cx->compartment->debugMode()) {
|
||||
|
@ -2740,7 +2819,7 @@ END_CASE(JSOP_ACTUALSFILLED)
|
|||
BEGIN_CASE(JSOP_ARGUMENTS)
|
||||
JS_ASSERT(!regs.fp()->fun()->hasRest());
|
||||
if (script->needsArgsObj()) {
|
||||
ArgumentsObject *obj = ArgumentsObject::createExpected(cx, regs.fp());
|
||||
ArgumentsObject *obj = ArgumentsObject::create(cx, regs.fp());
|
||||
if (!obj)
|
||||
goto error;
|
||||
PUSH_COPY(ObjectValue(*obj));
|
||||
|
@ -2764,14 +2843,16 @@ BEGIN_CASE(JSOP_CALLALIASEDVAR)
|
|||
BEGIN_CASE(JSOP_GETALIASEDVAR)
|
||||
{
|
||||
ScopeCoordinate sc = ScopeCoordinate(regs.pc);
|
||||
PUSH_COPY(regs.fp()->aliasedVarScope(sc).aliasedVar(sc));
|
||||
Value &var = AliasedVar(regs.fp(), sc);
|
||||
PUSH_COPY(var);
|
||||
}
|
||||
END_CASE(JSOP_GETALIASEDVAR)
|
||||
|
||||
BEGIN_CASE(JSOP_SETALIASEDVAR)
|
||||
{
|
||||
ScopeCoordinate sc = ScopeCoordinate(regs.pc);
|
||||
regs.fp()->aliasedVarScope(sc).setAliasedVar(sc, regs.sp[-1]);
|
||||
Value &var = AliasedVar(regs.fp(), sc);
|
||||
var = regs.sp[-1];
|
||||
}
|
||||
END_CASE(JSOP_SETALIASEDVAR)
|
||||
|
||||
|
@ -2779,20 +2860,16 @@ BEGIN_CASE(JSOP_GETARG)
|
|||
BEGIN_CASE(JSOP_CALLARG)
|
||||
{
|
||||
unsigned i = GET_ARGNO(regs.pc);
|
||||
if (script->argsObjAliasesFormals())
|
||||
PUSH_COPY(regs.fp()->argsObj().arg(i));
|
||||
else
|
||||
PUSH_COPY(regs.fp()->unaliasedFormal(i));
|
||||
CheckArgAccess(regs.fp(), i);
|
||||
PUSH_COPY(regs.fp()->formalArg(i));
|
||||
}
|
||||
END_CASE(JSOP_GETARG)
|
||||
|
||||
BEGIN_CASE(JSOP_SETARG)
|
||||
{
|
||||
unsigned i = GET_ARGNO(regs.pc);
|
||||
if (script->argsObjAliasesFormals())
|
||||
regs.fp()->argsObj().setArg(i, regs.sp[-1]);
|
||||
else
|
||||
regs.fp()->unaliasedFormal(i) = regs.sp[-1];
|
||||
CheckArgAccess(regs.fp(), i);
|
||||
regs.fp()->formalArg(i) = regs.sp[-1];
|
||||
}
|
||||
END_CASE(JSOP_SETARG)
|
||||
|
||||
|
@ -2800,7 +2877,8 @@ BEGIN_CASE(JSOP_GETLOCAL)
|
|||
BEGIN_CASE(JSOP_CALLLOCAL)
|
||||
{
|
||||
unsigned i = GET_SLOTNO(regs.pc);
|
||||
PUSH_COPY_SKIP_CHECK(regs.fp()->unaliasedLocal(i));
|
||||
CheckLocalAccess(regs.fp(), i);
|
||||
PUSH_COPY_SKIP_CHECK(regs.fp()->localSlot(i));
|
||||
|
||||
/*
|
||||
* Skip the same-compartment assertion if the local will be immediately
|
||||
|
@ -2816,7 +2894,8 @@ END_CASE(JSOP_GETLOCAL)
|
|||
BEGIN_CASE(JSOP_SETLOCAL)
|
||||
{
|
||||
unsigned i = GET_SLOTNO(regs.pc);
|
||||
regs.fp()->unaliasedLocal(i) = regs.sp[-1];
|
||||
CheckLocalAccess(regs.fp(), i);
|
||||
regs.fp()->localSlot(i) = regs.sp[-1];
|
||||
}
|
||||
END_CASE(JSOP_SETLOCAL)
|
||||
|
||||
|
@ -2963,7 +3042,7 @@ END_CASE(JSOP_LAMBDA)
|
|||
|
||||
BEGIN_CASE(JSOP_CALLEE)
|
||||
JS_ASSERT(regs.fp()->isNonEvalFunctionFrame());
|
||||
PUSH_COPY(regs.fp()->calleev());
|
||||
PUSH_COPY(argv[-2]);
|
||||
END_CASE(JSOP_CALLEE)
|
||||
|
||||
BEGIN_CASE(JSOP_GETTER)
|
||||
|
@ -2995,7 +3074,7 @@ BEGIN_CASE(JSOP_SETTER)
|
|||
|
||||
case JSOP_INITPROP:
|
||||
{
|
||||
JS_ASSERT(regs.stackDepth() >= 2);
|
||||
JS_ASSERT(regs.sp - regs.fp()->base() >= 2);
|
||||
rval = regs.sp[-1];
|
||||
i = -1;
|
||||
PropertyName *name;
|
||||
|
@ -3005,7 +3084,8 @@ BEGIN_CASE(JSOP_SETTER)
|
|||
}
|
||||
default:
|
||||
JS_ASSERT(op2 == JSOP_INITELEM);
|
||||
JS_ASSERT(regs.stackDepth() >= 3);
|
||||
|
||||
JS_ASSERT(regs.sp - regs.fp()->base() >= 3);
|
||||
rval = regs.sp[-1];
|
||||
id = JSID_VOID;
|
||||
i = -2;
|
||||
|
@ -3123,7 +3203,7 @@ END_CASE(JSOP_NEWOBJECT)
|
|||
BEGIN_CASE(JSOP_ENDINIT)
|
||||
{
|
||||
/* FIXME remove JSOP_ENDINIT bug 588522 */
|
||||
JS_ASSERT(regs.stackDepth() >= 1);
|
||||
JS_ASSERT(regs.sp - regs.fp()->base() >= 1);
|
||||
JS_ASSERT(regs.sp[-1].isObject());
|
||||
}
|
||||
END_CASE(JSOP_ENDINIT)
|
||||
|
@ -3131,7 +3211,7 @@ END_CASE(JSOP_ENDINIT)
|
|||
BEGIN_CASE(JSOP_INITPROP)
|
||||
{
|
||||
/* Load the property's initial value into rval. */
|
||||
JS_ASSERT(regs.stackDepth() >= 2);
|
||||
JS_ASSERT(regs.sp - regs.fp()->base() >= 2);
|
||||
Value rval = regs.sp[-1];
|
||||
|
||||
/* Load the object being initialized into lval/obj. */
|
||||
|
@ -3159,7 +3239,7 @@ END_CASE(JSOP_INITPROP);
|
|||
BEGIN_CASE(JSOP_INITELEM)
|
||||
{
|
||||
/* Pop the element's value into rval. */
|
||||
JS_ASSERT(regs.stackDepth() >= 3);
|
||||
JS_ASSERT(regs.sp - regs.fp()->base() >= 3);
|
||||
const Value &rref = regs.sp[-1];
|
||||
|
||||
RootedObject &obj = rootObject0;
|
||||
|
@ -3645,17 +3725,24 @@ BEGIN_CASE(JSOP_ENTERLET1)
|
|||
{
|
||||
StaticBlockObject &blockObj = script->getObject(GET_UINT32_INDEX(regs.pc))->asStaticBlock();
|
||||
|
||||
if (op == JSOP_ENTERBLOCK) {
|
||||
JS_ASSERT(regs.stackDepth() == blockObj.stackDepth());
|
||||
JS_ASSERT(regs.stackDepth() + blockObj.slotCount() <= script->nslots);
|
||||
Value *vp = regs.sp + blockObj.slotCount();
|
||||
SetValueRangeToUndefined(regs.sp, vp);
|
||||
regs.sp = vp;
|
||||
}
|
||||
|
||||
/* Clone block iff there are any closed-over variables. */
|
||||
if (!regs.fp()->pushBlock(cx, blockObj))
|
||||
goto error;
|
||||
|
||||
if (op == JSOP_ENTERBLOCK) {
|
||||
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() == regs.sp);
|
||||
Value *vp = regs.sp + blockObj.slotCount();
|
||||
JS_ASSERT(regs.sp < vp);
|
||||
JS_ASSERT(vp <= regs.fp()->slots() + script->nslots);
|
||||
SetValueRangeToUndefined(regs.sp, vp);
|
||||
regs.sp = vp;
|
||||
} else if (op == JSOP_ENTERLET0) {
|
||||
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
|
||||
== regs.sp);
|
||||
} else if (op == JSOP_ENTERLET1) {
|
||||
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
|
||||
== regs.sp - 1);
|
||||
}
|
||||
}
|
||||
END_CASE(JSOP_ENTERBLOCK)
|
||||
|
||||
|
@ -3670,12 +3757,12 @@ BEGIN_CASE(JSOP_LEAVEBLOCKEXPR)
|
|||
if (op == JSOP_LEAVEBLOCK) {
|
||||
/* Pop the block's slots. */
|
||||
regs.sp -= GET_UINT16(regs.pc);
|
||||
JS_ASSERT(regs.stackDepth() == blockDepth);
|
||||
JS_ASSERT(regs.fp()->base() + blockDepth == regs.sp);
|
||||
} else if (op == JSOP_LEAVEBLOCKEXPR) {
|
||||
/* Pop the block's slots maintaining the topmost expr. */
|
||||
Value *vp = ®s.sp[-1];
|
||||
regs.sp -= GET_UINT16(regs.pc);
|
||||
JS_ASSERT(regs.stackDepth() == blockDepth + 1);
|
||||
JS_ASSERT(regs.fp()->base() + blockDepth == regs.sp - 1);
|
||||
regs.sp[-1] = *vp;
|
||||
} else {
|
||||
/* Another op will pop; nothing to do here. */
|
||||
|
@ -3689,7 +3776,6 @@ END_CASE(JSOP_LEAVEBLOCK)
|
|||
BEGIN_CASE(JSOP_GENERATOR)
|
||||
{
|
||||
JS_ASSERT(!cx->isExceptionPending());
|
||||
regs.fp()->initGeneratorFrame();
|
||||
regs.pc += JSOP_GENERATOR_LENGTH;
|
||||
JSObject *obj = js_NewGenerator(cx);
|
||||
if (!obj)
|
||||
|
@ -3704,9 +3790,9 @@ BEGIN_CASE(JSOP_GENERATOR)
|
|||
BEGIN_CASE(JSOP_YIELD)
|
||||
JS_ASSERT(!cx->isExceptionPending());
|
||||
JS_ASSERT(regs.fp()->isNonEvalFunctionFrame());
|
||||
if (cx->innermostGenerator()->state == JSGEN_CLOSING) {
|
||||
js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD, JSDVG_SEARCH_STACK,
|
||||
ObjectValue(regs.fp()->callee()), NULL);
|
||||
if (cx->generatorFor(regs.fp())->state == JSGEN_CLOSING) {
|
||||
js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD,
|
||||
JSDVG_SEARCH_STACK, argv[-2], NULL);
|
||||
goto error;
|
||||
}
|
||||
regs.fp()->setReturnValue(regs.sp[-1]);
|
||||
|
@ -3720,8 +3806,9 @@ BEGIN_CASE(JSOP_ARRAYPUSH)
|
|||
uint32_t slot = GET_UINT16(regs.pc);
|
||||
JS_ASSERT(script->nfixed <= slot);
|
||||
JS_ASSERT(slot < script->nslots);
|
||||
CheckLocalAccess(regs.fp(), slot);
|
||||
RootedObject &obj = rootObject0;
|
||||
obj = ®s.fp()->unaliasedLocal(slot).toObject();
|
||||
obj = ®s.fp()->slots()[slot].toObject();
|
||||
if (!js_NewbornArrayPush(cx, obj, regs.sp[-1]))
|
||||
goto error;
|
||||
regs.sp--;
|
||||
|
@ -3837,7 +3924,7 @@ END_CASE(JSOP_ARRAYPUSH)
|
|||
* the for-in loop.
|
||||
*/
|
||||
regs.pc = (script)->main() + tn->start + tn->length;
|
||||
regs.sp = regs.spForStackDepth(tn->stackDepth);
|
||||
regs.sp = regs.fp()->base() + tn->stackDepth;
|
||||
|
||||
switch (tn->kind) {
|
||||
case JSTRY_CATCH:
|
||||
|
@ -3907,10 +3994,19 @@ END_CASE(JSOP_ARRAYPUSH)
|
|||
exit:
|
||||
if (cx->compartment->debugMode())
|
||||
interpReturnOK = ScriptDebugEpilogue(cx, regs.fp(), interpReturnOK);
|
||||
if (!regs.fp()->isGeneratorFrame())
|
||||
regs.fp()->epilogue(cx);
|
||||
interpReturnOK = ScriptEpilogueOrGeneratorYield(cx, regs.fp(), interpReturnOK);
|
||||
regs.fp()->setFinishedInInterpreter();
|
||||
|
||||
#ifdef DEBUG
|
||||
JS_ASSERT(entryFrame == regs.fp());
|
||||
if (regs.fp()->isFunctionFrame())
|
||||
AssertValidFunctionScopeChainAtExit(regs.fp());
|
||||
else if (regs.fp()->isEvalFrame())
|
||||
AssertValidEvalFrameScopeChainAtExit(regs.fp());
|
||||
else if (!regs.fp()->isGeneratorFrame())
|
||||
JS_ASSERT(!regs.fp()->scopeChain()->isScope());
|
||||
#endif
|
||||
|
||||
#ifdef JS_METHODJIT
|
||||
/*
|
||||
* This path is used when it's guaranteed the method can be finished
|
||||
|
|
|
@ -18,6 +18,31 @@
|
|||
|
||||
namespace js {
|
||||
|
||||
/*
|
||||
* ScriptPrologue/ScriptEpilogue must be called in pairs. ScriptPrologue
|
||||
* must be called before the script executes. ScriptEpilogue must be called
|
||||
* after the script returns or exits via exception.
|
||||
*/
|
||||
|
||||
inline bool
|
||||
ScriptPrologue(JSContext *cx, StackFrame *fp, JSScript *script);
|
||||
|
||||
inline bool
|
||||
ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok);
|
||||
|
||||
/*
|
||||
* It is not valid to call ScriptPrologue when a generator is resumed or to
|
||||
* call ScriptEpilogue when a generator yields. However, the debugger still
|
||||
* needs LIFO notification of generator start/stop. This pair of functions does
|
||||
* the right thing based on the state of 'fp'.
|
||||
*/
|
||||
|
||||
inline bool
|
||||
ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp);
|
||||
|
||||
inline bool
|
||||
ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok);
|
||||
|
||||
/* Implemented in jsdbgapi: */
|
||||
|
||||
/*
|
||||
|
@ -249,6 +274,9 @@ UnwindForUncatchableException(JSContext *cx, const FrameRegs ®s);
|
|||
extern bool
|
||||
OnUnknownMethod(JSContext *cx, HandleObject obj, Value idval, Value *vp);
|
||||
|
||||
inline void
|
||||
AssertValidFunctionScopeChainAtExit(StackFrame *fp);
|
||||
|
||||
class TryNoteIter
|
||||
{
|
||||
const FrameRegs ®s;
|
||||
|
|
|
@ -421,6 +421,70 @@ DefVarOrConstOperation(JSContext *cx, HandleObject varobj, PropertyName *dn, uns
|
|||
return true;
|
||||
}
|
||||
|
||||
inline bool
|
||||
FunctionNeedsPrologue(JSContext *cx, JSFunction *fun)
|
||||
{
|
||||
/* Heavyweight functions need call objects created. */
|
||||
if (fun->isHeavyweight())
|
||||
return true;
|
||||
|
||||
/* Outer and inner functions need to preserve nesting invariants. */
|
||||
if (cx->typeInferenceEnabled() && fun->script()->nesting())
|
||||
return true;
|
||||
|
||||
return false;
|
||||
}
|
||||
|
||||
inline bool
|
||||
ScriptPrologue(JSContext *cx, StackFrame *fp, bool newType)
|
||||
{
|
||||
JS_ASSERT_IF(fp->isNonEvalFunctionFrame() && fp->fun()->isHeavyweight(), fp->hasCallObj());
|
||||
|
||||
if (fp->isConstructing()) {
|
||||
JSObject *obj = js_CreateThisForFunction(cx, RootedObject(cx, &fp->callee()), newType);
|
||||
if (!obj)
|
||||
return false;
|
||||
fp->functionThis().setObject(*obj);
|
||||
}
|
||||
|
||||
Probes::enterJSFun(cx, fp->maybeFun(), fp->script());
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
inline bool
|
||||
ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok)
|
||||
{
|
||||
Probes::exitJSFun(cx, fp->maybeFun(), fp->script());
|
||||
|
||||
/*
|
||||
* If inline-constructing, replace primitive rval with the new object
|
||||
* passed in via |this|, and instrument this constructor invocation.
|
||||
*/
|
||||
if (fp->isConstructing() && ok) {
|
||||
if (fp->returnValue().isPrimitive())
|
||||
fp->setReturnValue(ObjectValue(fp->constructorThis()));
|
||||
}
|
||||
|
||||
return ok;
|
||||
}
|
||||
|
||||
inline bool
|
||||
ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp, bool newType)
|
||||
{
|
||||
if (!fp->isGeneratorFrame())
|
||||
return ScriptPrologue(cx, fp, newType);
|
||||
return true;
|
||||
}
|
||||
|
||||
inline bool
|
||||
ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok)
|
||||
{
|
||||
if (!fp->isYielding())
|
||||
return ScriptEpilogue(cx, fp, ok);
|
||||
return ok;
|
||||
}
|
||||
|
||||
inline void
|
||||
InterpreterFrames::enableInterruptsIfRunning(JSScript *script)
|
||||
{
|
||||
|
@ -428,6 +492,49 @@ InterpreterFrames::enableInterruptsIfRunning(JSScript *script)
|
|||
enabler.enableInterrupts();
|
||||
}
|
||||
|
||||
inline void
|
||||
AssertValidEvalFrameScopeChainAtExit(StackFrame *fp)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
JS_ASSERT(fp->isEvalFrame());
|
||||
|
||||
JS_ASSERT(!fp->hasBlockChain());
|
||||
JSObject &scope = *fp->scopeChain();
|
||||
|
||||
if (fp->isStrictEvalFrame())
|
||||
JS_ASSERT(scope.asCall().maybeStackFrame() == fp);
|
||||
else if (fp->isDebuggerFrame())
|
||||
JS_ASSERT(!scope.isScope());
|
||||
else if (fp->isDirectEvalFrame())
|
||||
JS_ASSERT(scope == *fp->prev()->scopeChain());
|
||||
else
|
||||
JS_ASSERT(scope.isGlobal());
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
AssertValidFunctionScopeChainAtExit(StackFrame *fp)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
JS_ASSERT(fp->isFunctionFrame());
|
||||
if (fp->isGeneratorFrame() || fp->isYielding())
|
||||
return;
|
||||
|
||||
if (fp->isEvalFrame()) {
|
||||
AssertValidEvalFrameScopeChainAtExit(fp);
|
||||
return;
|
||||
}
|
||||
|
||||
JS_ASSERT(!fp->hasBlockChain());
|
||||
JSObject &scope = *fp->scopeChain();
|
||||
|
||||
if (fp->fun()->isHeavyweight() && fp->hasCallObj())
|
||||
JS_ASSERT(scope.asCall().maybeStackFrame() == fp);
|
||||
else if (scope.isCall() || scope.isBlock())
|
||||
JS_ASSERT(scope.asScope().maybeStackFrame() != fp);
|
||||
#endif
|
||||
}
|
||||
|
||||
static JS_ALWAYS_INLINE bool
|
||||
AddOperation(JSContext *cx, const Value &lhs, const Value &rhs, Value *res)
|
||||
{
|
||||
|
@ -615,7 +722,7 @@ GetObjectElementOperation(JSContext *cx, JSOp op, HandleObject obj, const Value
|
|||
break;
|
||||
}
|
||||
} else if (obj->isArguments()) {
|
||||
if (obj->asArguments().maybeGetElement(index, res))
|
||||
if (obj->asArguments().getElement(index, res))
|
||||
break;
|
||||
}
|
||||
if (!obj->getElement(cx, index, res))
|
||||
|
@ -781,7 +888,7 @@ GuardFunApplySpeculation(JSContext *cx, FrameRegs ®s)
|
|||
if (!IsNativeFunction(args.calleev(), js_fun_apply)) {
|
||||
if (!JSScript::applySpeculationFailed(cx, regs.fp()->script()))
|
||||
return false;
|
||||
regs.sp[-1] = ObjectValue(regs.fp()->argsObj());
|
||||
args[1] = ObjectValue(regs.fp()->argsObj());
|
||||
}
|
||||
}
|
||||
return true;
|
||||
|
|
|
@ -1324,23 +1324,32 @@ generator_finalize(FreeOp *fop, JSObject *obj)
|
|||
JS_ASSERT(gen->state == JSGEN_NEWBORN ||
|
||||
gen->state == JSGEN_CLOSED ||
|
||||
gen->state == JSGEN_OPEN);
|
||||
JS_POISON(gen->fp, JS_FREE_PATTERN, sizeof(StackFrame));
|
||||
JS_POISON(gen, JS_FREE_PATTERN, sizeof(JSGenerator));
|
||||
fop->free_(gen);
|
||||
}
|
||||
|
||||
static void
|
||||
MarkGenerator(JSTracer *trc, JSGenerator *gen)
|
||||
{
|
||||
MarkValueRange(trc,
|
||||
HeapValueify(gen->fp->generatorArgsSnapshotBegin()),
|
||||
HeapValueify(gen->fp->generatorArgsSnapshotEnd()),
|
||||
"Generator Floating Args");
|
||||
gen->fp->mark(trc);
|
||||
MarkValueRange(trc,
|
||||
HeapValueify(gen->fp->generatorSlotsSnapshotBegin()),
|
||||
HeapValueify(gen->regs.sp),
|
||||
"Generator Floating Stack");
|
||||
StackFrame *fp = gen->floatingFrame();
|
||||
|
||||
/*
|
||||
* MarkGenerator should only be called when regs is based on the floating frame.
|
||||
* See calls to RebaseRegsFromTo.
|
||||
*/
|
||||
JS_ASSERT(size_t(gen->regs.sp - fp->slots()) <= fp->numSlots());
|
||||
|
||||
/*
|
||||
* Currently, generators are not mjitted. Still, (overflow) args can be
|
||||
* pushed by the mjit and need to be conservatively marked. Technically, the
|
||||
* formal args and generator slots are safe for exact marking, but since the
|
||||
* plan is to eventually mjit generators, it makes sense to future-proof
|
||||
* this code and save someone an hour later.
|
||||
*/
|
||||
MarkValueRange(trc, (HeapValue *)fp->formalArgsEnd() - gen->floatingStack,
|
||||
gen->floatingStack, "Generator Floating Args");
|
||||
fp->mark(trc);
|
||||
MarkValueRange(trc, gen->regs.sp - fp->slots(),
|
||||
(HeapValue *)fp->slots(), "Generator Floating Stack");
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -1365,6 +1374,7 @@ generator_trace(JSTracer *trc, JSObject *obj)
|
|||
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING)
|
||||
return;
|
||||
|
||||
JS_ASSERT(gen->liveFrame() == gen->floatingFrame());
|
||||
MarkGenerator(trc, gen);
|
||||
}
|
||||
|
||||
|
@ -1405,8 +1415,9 @@ JSObject *
|
|||
js_NewGenerator(JSContext *cx)
|
||||
{
|
||||
FrameRegs &stackRegs = cx->regs();
|
||||
JS_ASSERT(stackRegs.stackDepth() == 0);
|
||||
StackFrame *stackfp = stackRegs.fp();
|
||||
JS_ASSERT(stackfp->base() == cx->regs().sp);
|
||||
JS_ASSERT(stackfp->actualArgs() <= stackfp->formalArgs());
|
||||
|
||||
Rooted<GlobalObject*> global(cx, &stackfp->global());
|
||||
JSObject *proto = global->getOrCreateGeneratorPrototype(cx);
|
||||
|
@ -1417,15 +1428,15 @@ js_NewGenerator(JSContext *cx)
|
|||
return NULL;
|
||||
|
||||
/* Load and compute stack slot counts. */
|
||||
Value *stackvp = stackfp->generatorArgsSnapshotBegin();
|
||||
unsigned vplen = stackfp->generatorArgsSnapshotEnd() - stackvp;
|
||||
Value *stackvp = stackfp->actualArgs() - 2;
|
||||
unsigned vplen = stackfp->formalArgsEnd() - stackvp;
|
||||
|
||||
/* Compute JSGenerator size. */
|
||||
unsigned nbytes = sizeof(JSGenerator) +
|
||||
(-1 + /* one Value included in JSGenerator */
|
||||
vplen +
|
||||
VALUES_PER_STACK_FRAME +
|
||||
stackfp->script()->nslots) * sizeof(HeapValue);
|
||||
stackfp->numSlots()) * sizeof(HeapValue);
|
||||
|
||||
JS_ASSERT(nbytes % sizeof(Value) == 0);
|
||||
JS_STATIC_ASSERT(sizeof(StackFrame) % sizeof(HeapValue) == 0);
|
||||
|
@ -1436,25 +1447,35 @@ js_NewGenerator(JSContext *cx)
|
|||
SetValueRangeToUndefined((Value *)gen, nbytes / sizeof(Value));
|
||||
|
||||
/* Cut up floatingStack space. */
|
||||
HeapValue *genvp = gen->stackSnapshot;
|
||||
HeapValue *genvp = gen->floatingStack;
|
||||
StackFrame *genfp = reinterpret_cast<StackFrame *>(genvp + vplen);
|
||||
|
||||
/* Initialize JSGenerator. */
|
||||
gen->obj.init(obj);
|
||||
gen->state = JSGEN_NEWBORN;
|
||||
gen->enumerators = NULL;
|
||||
gen->fp = genfp;
|
||||
gen->prevGenerator = NULL;
|
||||
gen->floating = genfp;
|
||||
|
||||
/* Copy from the stack to the generator's floating frame. */
|
||||
gen->regs.rebaseFromTo(stackRegs, *genfp);
|
||||
genfp->copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
|
||||
genfp->stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
|
||||
cx, genfp, genvp, stackfp, stackvp, stackRegs.sp);
|
||||
genfp->initFloatingGenerator();
|
||||
stackfp->setYielding(); /* XXX: to be removed */
|
||||
|
||||
obj->setPrivate(gen);
|
||||
return obj;
|
||||
}
|
||||
|
||||
JSGenerator *
|
||||
js_FloatingFrameToGenerator(StackFrame *fp)
|
||||
{
|
||||
JS_ASSERT(fp->isGeneratorFrame() && fp->isFloatingGenerator());
|
||||
char *floatingStackp = (char *)(fp->actualArgs() - 2);
|
||||
char *p = floatingStackp - offsetof(JSGenerator, floatingStack);
|
||||
return reinterpret_cast<JSGenerator *>(p);
|
||||
}
|
||||
|
||||
typedef enum JSGeneratorOp {
|
||||
JSGENOP_NEXT,
|
||||
JSGENOP_SEND,
|
||||
|
@ -1471,10 +1492,16 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
|
|||
JSGenerator *gen, const Value &arg)
|
||||
{
|
||||
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING) {
|
||||
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NESTING_GENERATOR);
|
||||
js_ReportValueError(cx, JSMSG_NESTING_GENERATOR,
|
||||
JSDVG_SEARCH_STACK, ObjectOrNullValue(obj),
|
||||
JS_GetFunctionId(gen->floatingFrame()->fun()));
|
||||
return JS_FALSE;
|
||||
}
|
||||
|
||||
/* Check for OOM errors here, where we can fail easily. */
|
||||
if (!cx->ensureGeneratorStackSpace())
|
||||
return JS_FALSE;
|
||||
|
||||
/*
|
||||
* Write barrier is needed since the generator stack can be updated,
|
||||
* and it's not barriered in any other way. We need to do it before
|
||||
|
@ -1514,6 +1541,8 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
|
|||
break;
|
||||
}
|
||||
|
||||
StackFrame *genfp = gen->floatingFrame();
|
||||
|
||||
JSBool ok;
|
||||
{
|
||||
GeneratorFrameGuard gfg;
|
||||
|
@ -1524,6 +1553,7 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
|
|||
|
||||
StackFrame *fp = gfg.fp();
|
||||
gen->regs = cx->regs();
|
||||
JS_ASSERT(gen->liveFrame() == fp);
|
||||
|
||||
cx->enterGenerator(gen); /* OOM check above. */
|
||||
JSObject *enumerators = cx->enumerators;
|
||||
|
@ -1536,18 +1566,18 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
|
|||
cx->leaveGenerator(gen);
|
||||
}
|
||||
|
||||
if (gen->fp->isYielding()) {
|
||||
if (gen->floatingFrame()->isYielding()) {
|
||||
/* Yield cannot fail, throw or be called on closing. */
|
||||
JS_ASSERT(ok);
|
||||
JS_ASSERT(!cx->isExceptionPending());
|
||||
JS_ASSERT(gen->state == JSGEN_RUNNING);
|
||||
JS_ASSERT(op != JSGENOP_CLOSE);
|
||||
gen->fp->clearYielding();
|
||||
genfp->clearYielding();
|
||||
gen->state = JSGEN_OPEN;
|
||||
return JS_TRUE;
|
||||
}
|
||||
|
||||
gen->fp->clearReturnValue();
|
||||
genfp->clearReturnValue();
|
||||
gen->state = JSGEN_CLOSED;
|
||||
if (ok) {
|
||||
/* Returned, explicitly or by falling off the end. */
|
||||
|
@ -1639,7 +1669,7 @@ generator_op(JSContext *cx, Native native, JSGeneratorOp op, Value *vp, unsigned
|
|||
if (!SendToGenerator(cx, op, obj, gen, undef ? args[0] : UndefinedValue()))
|
||||
return false;
|
||||
|
||||
args.rval() = gen->fp->returnValue();
|
||||
args.rval() = gen->floatingFrame()->returnValue();
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -262,28 +262,65 @@ ForOf(JSContext *cx, const Value &iterable, Op op)
|
|||
/*
|
||||
* Generator state codes.
|
||||
*/
|
||||
enum JSGeneratorState
|
||||
{
|
||||
typedef enum JSGeneratorState {
|
||||
JSGEN_NEWBORN, /* not yet started */
|
||||
JSGEN_OPEN, /* started by a .next() or .send(undefined) call */
|
||||
JSGEN_RUNNING, /* currently executing via .next(), etc., call */
|
||||
JSGEN_CLOSING, /* close method is doing asynchronous return */
|
||||
JSGEN_CLOSED /* closed, cannot be started or closed again */
|
||||
};
|
||||
} JSGeneratorState;
|
||||
|
||||
struct JSGenerator
|
||||
{
|
||||
struct JSGenerator {
|
||||
js::HeapPtrObject obj;
|
||||
JSGeneratorState state;
|
||||
js::FrameRegs regs;
|
||||
JSObject *enumerators;
|
||||
JSGenerator *prevGenerator;
|
||||
js::StackFrame *fp;
|
||||
js::HeapValue stackSnapshot[1];
|
||||
js::StackFrame *floating;
|
||||
js::HeapValue floatingStack[1];
|
||||
|
||||
js::StackFrame *floatingFrame() {
|
||||
return floating;
|
||||
}
|
||||
|
||||
js::StackFrame *liveFrame() {
|
||||
JS_ASSERT((state == JSGEN_RUNNING || state == JSGEN_CLOSING) ==
|
||||
(regs.fp() != floatingFrame()));
|
||||
return regs.fp();
|
||||
}
|
||||
};
|
||||
|
||||
extern JSObject *
|
||||
js_NewGenerator(JSContext *cx);
|
||||
|
||||
/*
|
||||
* Generator stack frames do not have stable pointers since they get copied to
|
||||
* and from the generator object and the stack (see SendToGenerator). This is a
|
||||
* problem for Block and With objects, which need to store a pointer to the
|
||||
* enclosing stack frame. The solution is for Block and With objects to store
|
||||
* a pointer to the "floating" stack frame stored in the generator object,
|
||||
* since it is stable, and maintain, in the generator object, a pointer to the
|
||||
* "live" stack frame (either a copy on the stack or the floating frame). Thus,
|
||||
* Block and With objects must "normalize" to and from the floating/live frames
|
||||
* in the case of generators using the following functions.
|
||||
*/
|
||||
inline js::StackFrame *
|
||||
js_FloatingFrameIfGenerator(JSContext *cx, js::StackFrame *fp)
|
||||
{
|
||||
if (JS_UNLIKELY(fp->isGeneratorFrame()))
|
||||
return cx->generatorFor(fp)->floatingFrame();
|
||||
return fp;
|
||||
}
|
||||
|
||||
/* Given a floating frame, given the JSGenerator containing it. */
|
||||
extern JSGenerator *
|
||||
js_FloatingFrameToGenerator(js::StackFrame *fp);
|
||||
|
||||
inline js::StackFrame *
|
||||
js_LiveFrameIfGenerator(js::StackFrame *fp)
|
||||
{
|
||||
return fp->isGeneratorFrame() ? js_FloatingFrameToGenerator(fp)->liveFrame() : fp;
|
||||
}
|
||||
|
||||
#endif
|
||||
|
||||
extern JSObject *
|
||||
|
|
|
@ -2849,7 +2849,6 @@ js::NewObjectWithType(JSContext *cx, HandleTypeObject type, JSObject *parent, gc
|
|||
JS_ASSERT(type->proto->hasNewType(type));
|
||||
JS_ASSERT(parent);
|
||||
|
||||
JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
|
||||
if (CanBeFinalizedInBackground(kind, &ObjectClass))
|
||||
kind = GetBackgroundAllocKind(kind);
|
||||
|
||||
|
@ -3872,6 +3871,14 @@ JSObject::growSlots(JSContext *cx, uint32_t oldCount, uint32_t newCount)
|
|||
JS_ASSERT(newCount >= SLOT_CAPACITY_MIN);
|
||||
JS_ASSERT(!isDenseArray());
|
||||
|
||||
/*
|
||||
* Slots are only allocated for call objects when new properties are
|
||||
* added to them, which can only happen while the call is still on the
|
||||
* stack (and an eval, DEFFUN, etc. happens). We thus do not need to
|
||||
* worry about updating any active outer function args/vars.
|
||||
*/
|
||||
JS_ASSERT_IF(isCall(), asCall().maybeStackFrame() != NULL);
|
||||
|
||||
/*
|
||||
* Slot capacities are determined by the span of allocated objects. Due to
|
||||
* the limited number of bits to store shape slots, object growth is
|
||||
|
@ -6238,9 +6245,15 @@ js_DumpStackFrame(JSContext *cx, StackFrame *start)
|
|||
}
|
||||
}
|
||||
if (fp->hasArgs()) {
|
||||
fprintf(stderr, " actuals: %p (%u) ", (void *) fp->actuals(), (unsigned) fp->numActualArgs());
|
||||
fprintf(stderr, " formals: %p (%u)\n", (void *) fp->formals(), (unsigned) fp->numFormalArgs());
|
||||
fprintf(stderr, " actuals: %p (%u) ", (void *) fp->actualArgs(), (unsigned) fp->numActualArgs());
|
||||
fprintf(stderr, " formals: %p (%u)\n", (void *) fp->formalArgs(), (unsigned) fp->numFormalArgs());
|
||||
}
|
||||
if (fp->hasCallObj()) {
|
||||
fprintf(stderr, " has call obj: ");
|
||||
dumpValue(ObjectValue(fp->callObj()));
|
||||
fprintf(stderr, "\n");
|
||||
}
|
||||
MaybeDumpObject("argsobj", fp->maybeArgsObj());
|
||||
MaybeDumpObject("blockChain", fp->maybeBlockChain());
|
||||
if (!fp->isDummyFrame()) {
|
||||
MaybeDumpValue("this", fp->thisValue());
|
||||
|
|
|
@ -514,15 +514,14 @@ js_Disassemble1(JSContext *cx, JSScript *script, jsbytecode *pc,
|
|||
}
|
||||
|
||||
case JOF_SCOPECOORD: {
|
||||
Value v = StringValue(ScopeCoordinateName(script, pc));
|
||||
JSAutoByteString bytes;
|
||||
if (!ToDisassemblySource(cx, v, &bytes))
|
||||
return 0;
|
||||
ScopeCoordinate sc(pc);
|
||||
Sprint(sp, " %s (hops = %u, slot = %u)", bytes.ptr(), sc.hops, sc.slot);
|
||||
break;
|
||||
unsigned i = GET_UINT16(pc);
|
||||
Sprint(sp, " %u", i);
|
||||
pc += sizeof(uint16_t);
|
||||
i = GET_UINT16(pc);
|
||||
Sprint(sp, " %u", i);
|
||||
pc += sizeof(uint16_t);
|
||||
/* FALL THROUGH */
|
||||
}
|
||||
|
||||
case JOF_ATOM: {
|
||||
Value v = StringValue(script->getAtom(GET_UINT32_INDEX(pc)));
|
||||
JSAutoByteString bytes;
|
||||
|
@ -1409,12 +1408,6 @@ AddParenSlop(SprintStack *ss)
|
|||
ss->sprinter.reserveAndClear(PAREN_SLOP);
|
||||
}
|
||||
|
||||
static unsigned
|
||||
StackDepth(JSScript *script)
|
||||
{
|
||||
return script->nslots - script->nfixed;
|
||||
}
|
||||
|
||||
static JSBool
|
||||
PushOff(SprintStack *ss, ptrdiff_t off, JSOp op, jsbytecode *pc = NULL)
|
||||
{
|
||||
|
@ -1850,7 +1843,7 @@ static bool
|
|||
IsVarSlot(JSPrinter *jp, jsbytecode *pc, JSAtom **varAtom, int *localSlot)
|
||||
{
|
||||
if (JOF_OPTYPE(*pc) == JOF_SCOPECOORD) {
|
||||
*varAtom = ScopeCoordinateName(jp->script, pc);
|
||||
*varAtom = ScopeCoordinateAtom(jp->script, pc);
|
||||
LOCAL_ASSERT_RV(*varAtom, NULL);
|
||||
return true;
|
||||
}
|
||||
|
@ -5713,7 +5706,7 @@ js_DecompileValueGenerator(JSContext *cx, int spindex, jsval v,
|
|||
* calculated value matching v under assumption that it is
|
||||
* it that caused exception, see bug 328664.
|
||||
*/
|
||||
Value *stackBase = cx->regs().spForStackDepth(0);
|
||||
Value *stackBase = fp->base();
|
||||
Value *sp = cx->regs().sp;
|
||||
do {
|
||||
if (sp == stackBase) {
|
||||
|
|
|
@ -60,7 +60,7 @@ typedef enum JSOp {
|
|||
#define JOF_INT8 18 /* int8_t immediate operand */
|
||||
#define JOF_ATOMOBJECT 19 /* uint16_t constant index + object index */
|
||||
#define JOF_UINT16PAIR 20 /* pair of uint16_t immediates */
|
||||
#define JOF_SCOPECOORD 21 /* pair of uint16_t immediates followed by block index */
|
||||
#define JOF_SCOPECOORD 21 /* pair of uint16_t immediates followed by atom index */
|
||||
#define JOF_TYPEMASK 0x001f /* mask for above immediate types */
|
||||
|
||||
#define JOF_NAME (1U<<5) /* name operation */
|
||||
|
|
|
@ -333,14 +333,6 @@ OPDEF(JSOP_FINALLY, 135,"finally", NULL, 1, 0, 2, 0, JOF_BYTE)
|
|||
* function, function statements that are conditionally executed, 'eval',
|
||||
* 'with', 'arguments' and E4X filters. All of these cases require creating a
|
||||
* CallObject to own the aliased variable.
|
||||
*
|
||||
* An ALIASEDVAR opcode contains the following immediates:
|
||||
* uint16 hops: the number of scope objects to skip to find the ScopeObject
|
||||
* containing the variable being accessed
|
||||
* uint16 slot: the slot containing the variable in the ScopeObject (this
|
||||
* 'slot' does not include RESERVED_SLOTS).
|
||||
* uint32 block: the index (into the script object table) of the block chain
|
||||
* at the point of the variable access.
|
||||
*/
|
||||
OPDEF(JSOP_GETALIASEDVAR, 136,"getaliasedvar",NULL, 9, 0, 1, 19, JOF_SCOPECOORD|JOF_NAME)
|
||||
OPDEF(JSOP_CALLALIASEDVAR,137,"callaliasedvar",NULL, 9, 0, 1, 19, JOF_SCOPECOORD|JOF_NAME)
|
||||
|
|
|
@ -289,7 +289,7 @@ Shape::getChildBinding(JSContext *cx, const StackShape &child)
|
|||
* the fixed slot count here, which will feed into call objects created
|
||||
* off of the bindings.
|
||||
*/
|
||||
uint32_t slots = child.slotSpan();
|
||||
uint32_t slots = child.slotSpan() + 1; /* Add one for private data. */
|
||||
gc::AllocKind kind = gc::GetGCObjectKind(slots);
|
||||
|
||||
/*
|
||||
|
@ -300,11 +300,11 @@ Shape::getChildBinding(JSContext *cx, const StackShape &child)
|
|||
*/
|
||||
uint32_t nfixed = gc::GetGCKindSlots(kind);
|
||||
if (nfixed < slots) {
|
||||
nfixed = CallObject::RESERVED_SLOTS;
|
||||
JS_ASSERT(gc::GetGCKindSlots(gc::GetGCObjectKind(nfixed)) == CallObject::RESERVED_SLOTS);
|
||||
nfixed = CallObject::RESERVED_SLOTS + 1;
|
||||
JS_ASSERT(gc::GetGCKindSlots(gc::GetGCObjectKind(nfixed)) == CallObject::RESERVED_SLOTS + 1);
|
||||
}
|
||||
|
||||
shape->setNumFixedSlots(nfixed);
|
||||
shape->setNumFixedSlots(nfixed - 1);
|
||||
}
|
||||
return shape;
|
||||
}
|
||||
|
|
|
@ -882,11 +882,6 @@ struct Shape : public js::gc::Cell
|
|||
|
||||
inline void markChildren(JSTracer *trc);
|
||||
|
||||
inline Shape *search(JSContext *cx, jsid id) {
|
||||
Shape **_;
|
||||
return search(cx, this, id, &_);
|
||||
}
|
||||
|
||||
/* For JIT usage */
|
||||
static inline size_t offsetOfBase() { return offsetof(Shape, base_); }
|
||||
|
||||
|
|
|
@ -67,7 +67,7 @@ Bindings::lookup(JSContext *cx, JSAtom *name, unsigned *indexp) const
|
|||
if (indexp)
|
||||
*indexp = shape->shortid();
|
||||
|
||||
if (shape->setter() == CallObject::setArgOp)
|
||||
if (shape->getter() == CallObject::getArgOp)
|
||||
return ARGUMENT;
|
||||
|
||||
return shape->writable() ? VARIABLE : CONSTANT;
|
||||
|
@ -102,14 +102,14 @@ Bindings::add(JSContext *cx, HandleAtom name, BindingKind kind)
|
|||
if (kind == ARGUMENT) {
|
||||
JS_ASSERT(nvars == 0);
|
||||
indexp = &nargs;
|
||||
getter = NULL;
|
||||
getter = CallObject::getArgOp;
|
||||
setter = CallObject::setArgOp;
|
||||
slot += nargs;
|
||||
} else {
|
||||
JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
|
||||
|
||||
indexp = &nvars;
|
||||
getter = NULL;
|
||||
getter = CallObject::getVarOp;
|
||||
setter = CallObject::setVarOp;
|
||||
if (kind == CONSTANT)
|
||||
attrs |= JSPROP_READONLY;
|
||||
|
@ -208,7 +208,7 @@ Bindings::getLocalNameArray(JSContext *cx, BindingNames *namesp)
|
|||
const Shape &shape = r.front();
|
||||
unsigned index = uint16_t(shape.shortid());
|
||||
|
||||
if (shape.setter() == CallObject::setArgOp) {
|
||||
if (shape.getter() == CallObject::getArgOp) {
|
||||
JS_ASSERT(index < nargs);
|
||||
names[index].kind = ARGUMENT;
|
||||
} else {
|
||||
|
@ -221,7 +221,7 @@ Bindings::getLocalNameArray(JSContext *cx, BindingNames *namesp)
|
|||
names[index].maybeAtom = JSID_TO_ATOM(shape.propid());
|
||||
} else {
|
||||
JS_ASSERT(JSID_IS_INT(shape.propid()));
|
||||
JS_ASSERT(shape.setter() == CallObject::setArgOp);
|
||||
JS_ASSERT(shape.getter() == CallObject::getArgOp);
|
||||
names[index].maybeAtom = NULL;
|
||||
}
|
||||
}
|
||||
|
@ -241,7 +241,7 @@ Bindings::lastArgument() const
|
|||
|
||||
const js::Shape *shape = lastVariable();
|
||||
if (nvars > 0) {
|
||||
while (shape->previous() && shape->setter() != CallObject::setArgOp)
|
||||
while (shape->previous() && shape->getter() != CallObject::getArgOp)
|
||||
shape = shape->previous();
|
||||
}
|
||||
return shape;
|
||||
|
@ -604,10 +604,10 @@ js::XDRScript(XDRState<mode> *xdr, JSScript **scriptp, JSScript *parentScript)
|
|||
script->bindingsAccessedDynamically = true;
|
||||
if (scriptBits & (1 << ArgumentsHasLocalBinding)) {
|
||||
PropertyName *arguments = cx->runtime->atomState.argumentsAtom;
|
||||
unsigned local;
|
||||
DebugOnly<BindingKind> kind = script->bindings.lookup(cx, arguments, &local);
|
||||
unsigned slot;
|
||||
DebugOnly<BindingKind> kind = script->bindings.lookup(cx, arguments, &slot);
|
||||
JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
|
||||
script->setArgumentsHasLocalBinding(local);
|
||||
script->setArgumentsHasLocalBinding(slot);
|
||||
}
|
||||
if (scriptBits & (1 << NeedsArgsObj))
|
||||
script->setNeedsArgsObj(true);
|
||||
|
@ -1309,12 +1309,17 @@ JSScript::NewScriptFromEmitter(JSContext *cx, BytecodeEmitter *bce)
|
|||
script->debugMode = true;
|
||||
#endif
|
||||
|
||||
if (bce->sc->funArgumentsHasLocalBinding()) {
|
||||
// This must precede the script->bindings.transfer() call below
|
||||
script->setArgumentsHasLocalBinding(bce->sc->argumentsLocal());
|
||||
if (bce->sc->funDefinitelyNeedsArgsObj())
|
||||
script->setNeedsArgsObj(true);
|
||||
if (bce->sc->inFunction) {
|
||||
if (bce->sc->funArgumentsHasLocalBinding()) {
|
||||
// This must precede the script->bindings.transfer() call below.
|
||||
script->setArgumentsHasLocalBinding(bce->sc->argumentsLocalSlot());
|
||||
if (bce->sc->funDefinitelyNeedsArgsObj())
|
||||
script->setNeedsArgsObj(true);
|
||||
} else {
|
||||
JS_ASSERT(!bce->sc->funDefinitelyNeedsArgsObj());
|
||||
}
|
||||
} else {
|
||||
JS_ASSERT(!bce->sc->funArgumentsHasLocalBinding());
|
||||
JS_ASSERT(!bce->sc->funDefinitelyNeedsArgsObj());
|
||||
}
|
||||
|
||||
|
@ -1801,7 +1806,7 @@ js::CloneScript(JSContext *cx, JSScript *src)
|
|||
dst->nslots = src->nslots;
|
||||
dst->staticLevel = src->staticLevel;
|
||||
if (src->argumentsHasLocalBinding()) {
|
||||
dst->setArgumentsHasLocalBinding(src->argumentsLocal());
|
||||
dst->setArgumentsHasLocalBinding(src->argumentsLocalSlot());
|
||||
if (src->analyzedArgsUsage())
|
||||
dst->setNeedsArgsObj(src->needsArgsObj());
|
||||
}
|
||||
|
@ -2129,10 +2134,10 @@ JSScript::markChildren(JSTracer *trc)
|
|||
}
|
||||
|
||||
void
|
||||
JSScript::setArgumentsHasLocalBinding(uint16_t local)
|
||||
JSScript::setArgumentsHasLocalBinding(uint16_t slot)
|
||||
{
|
||||
argsHasLocalBinding_ = true;
|
||||
argsLocal_ = local;
|
||||
argsSlot_ = slot;
|
||||
needsArgsAnalysis_ = true;
|
||||
}
|
||||
|
||||
|
@ -2164,7 +2169,7 @@ JSScript::applySpeculationFailed(JSContext *cx, JSScript *script_)
|
|||
|
||||
script->needsArgsObj_ = true;
|
||||
|
||||
const unsigned local = script->argumentsLocal();
|
||||
const unsigned slot = script->argumentsLocalSlot();
|
||||
|
||||
/*
|
||||
* By design, the apply-arguments optimization is only made when there
|
||||
|
@ -2181,20 +2186,22 @@ JSScript::applySpeculationFailed(JSContext *cx, JSScript *script_)
|
|||
for (AllFramesIter i(cx->stack.space()); !i.done(); ++i) {
|
||||
StackFrame *fp = i.fp();
|
||||
if (fp->isFunctionFrame() && fp->script() == script) {
|
||||
ArgumentsObject *argsobj = ArgumentsObject::createExpected(cx, fp);
|
||||
if (!argsobj) {
|
||||
/*
|
||||
* We can't leave stack frames with script->needsArgsObj but no
|
||||
* arguments object. It is, however, safe to leave frames with
|
||||
* an arguments object but !script->needsArgsObj.
|
||||
*/
|
||||
script->needsArgsObj_ = false;
|
||||
return false;
|
||||
}
|
||||
if (!fp->hasArgsObj()) {
|
||||
ArgumentsObject *obj = ArgumentsObject::create(cx, fp);
|
||||
if (!obj) {
|
||||
/*
|
||||
* We can't leave stack frames where script->needsArgsObj
|
||||
* and !fp->hasArgsObj. It is, however, safe to leave frames
|
||||
* where fp->hasArgsObj and !fp->script->needsArgsObj.
|
||||
*/
|
||||
script->needsArgsObj_ = false;
|
||||
return false;
|
||||
}
|
||||
|
||||
/* Note: 'arguments' may have already been overwritten. */
|
||||
if (fp->unaliasedLocal(local).isMagic(JS_OPTIMIZED_ARGUMENTS))
|
||||
fp->unaliasedLocal(local) = ObjectValue(*argsobj);
|
||||
/* Note: 'arguments' may have already been overwritten. */
|
||||
if (fp->localSlot(slot).isMagic(JS_OPTIMIZED_ARGUMENTS))
|
||||
fp->localSlot(slot) = ObjectValue(*obj);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -2213,6 +2220,7 @@ JSScript::applySpeculationFailed(JSContext *cx, JSScript *script_)
|
|||
return true;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
bool
|
||||
JSScript::varIsAliased(unsigned varSlot)
|
||||
{
|
||||
|
@ -2256,3 +2264,4 @@ JSScript::formalLivesInCallObject(unsigned argSlot)
|
|||
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
|
|
@ -114,12 +114,12 @@ class Bindings
|
|||
* These functions map between argument/var indices [0, nargs/nvars) and
|
||||
* and Bindings indices [0, nargs + nvars).
|
||||
*/
|
||||
bool slotIsArg(uint16_t i) const { return i < nargs; }
|
||||
bool slotIsLocal(uint16_t i) const { return i >= nargs; }
|
||||
uint16_t argToSlot(uint16_t i) { JS_ASSERT(i < nargs); return i; }
|
||||
uint16_t localToSlot(uint16_t i) { return i + nargs; }
|
||||
uint16_t slotToArg(uint16_t i) { JS_ASSERT(slotIsArg(i)); return i; }
|
||||
uint16_t slotToLocal(uint16_t i) { JS_ASSERT(slotIsLocal(i)); return i - nargs; }
|
||||
bool bindingIsArg(uint16_t i) const { return i < nargs; }
|
||||
bool bindingIsLocal(uint16_t i) const { return i >= nargs; }
|
||||
uint16_t argToBinding(uint16_t i) { JS_ASSERT(i < nargs); return i; }
|
||||
uint16_t localToBinding(uint16_t i) { return i + nargs; }
|
||||
uint16_t bindingToArg(uint16_t i) { JS_ASSERT(bindingIsArg(i)); return i; }
|
||||
uint16_t bindingToLocal(uint16_t i) { JS_ASSERT(bindingIsLocal(i)); return i - nargs; }
|
||||
|
||||
/* Ensure these bindings have a shape lineage. */
|
||||
inline bool ensureShape(JSContext *cx);
|
||||
|
@ -493,7 +493,7 @@ struct JSScript : public js::gc::Cell
|
|||
uint16_t staticLevel;/* static level for display maintenance */
|
||||
|
||||
private:
|
||||
uint16_t argsLocal_; /* local holding 'arguments' (if argumentsHasLocalBindings) */
|
||||
uint16_t argsSlot_; /* slot holding 'arguments' (if argumentsHasLocalBindings) */
|
||||
|
||||
// 8-bit fields.
|
||||
|
||||
|
@ -588,8 +588,8 @@ struct JSScript : public js::gc::Cell
|
|||
/* See ContextFlags::funArgumentsHasLocalBinding comment. */
|
||||
bool argumentsHasLocalBinding() const { return argsHasLocalBinding_; }
|
||||
jsbytecode *argumentsBytecode() const { JS_ASSERT(code[0] == JSOP_ARGUMENTS); return code; }
|
||||
unsigned argumentsLocal() const { JS_ASSERT(argsHasLocalBinding_); return argsLocal_; }
|
||||
void setArgumentsHasLocalBinding(uint16_t local);
|
||||
unsigned argumentsLocalSlot() const { JS_ASSERT(argsHasLocalBinding_); return argsSlot_; }
|
||||
void setArgumentsHasLocalBinding(uint16_t slot);
|
||||
|
||||
/*
|
||||
* As an optimization, even when argsHasLocalBinding, the function prologue
|
||||
|
@ -861,11 +861,12 @@ struct JSScript : public js::gc::Cell
|
|||
}
|
||||
|
||||
|
||||
#ifdef DEBUG
|
||||
bool varIsAliased(unsigned varSlot);
|
||||
bool formalIsAliased(unsigned argSlot);
|
||||
bool formalLivesInArgumentsObject(unsigned argSlot);
|
||||
bool formalLivesInCallObject(unsigned argSlot);
|
||||
|
||||
#endif
|
||||
private:
|
||||
/*
|
||||
* Recompile with or without single-stepping support, as directed
|
||||
|
@ -943,6 +944,12 @@ JS_STATIC_ASSERT(sizeof(JSScript::ArrayBitsT) * 8 >= JSScript::LIMIT);
|
|||
/* If this fails, add/remove padding within JSScript. */
|
||||
JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::Cell::CellSize == 0);
|
||||
|
||||
static JS_INLINE unsigned
|
||||
StackDepth(JSScript *script)
|
||||
{
|
||||
return script->nslots - script->nfixed;
|
||||
}
|
||||
|
||||
/*
|
||||
* New-script-hook calling is factored from NewScriptFromEmitter so that it
|
||||
* and callers of XDRScript can share this code. In the case of callers
|
||||
|
|
|
@ -61,8 +61,8 @@ Shape *
|
|||
Bindings::initialShape(JSContext *cx) const
|
||||
{
|
||||
/* Get an allocation kind to match an empty call object. */
|
||||
gc::AllocKind kind = gc::FINALIZE_OBJECT2_BACKGROUND;
|
||||
JS_ASSERT(gc::GetGCKindSlots(kind) == CallObject::RESERVED_SLOTS);
|
||||
gc::AllocKind kind = gc::FINALIZE_OBJECT4;
|
||||
JS_ASSERT(gc::GetGCKindSlots(kind) == CallObject::RESERVED_SLOTS + 1);
|
||||
|
||||
return EmptyShape::getInitialShape(cx, &CallClass, NULL, NULL, kind,
|
||||
BaseShape::VAROBJ);
|
||||
|
|
|
@ -2215,13 +2215,17 @@ LambdaIsGetElem(JSObject &lambda, JSContext *cx)
|
|||
* real name lookup since this can trigger observable effects.
|
||||
*/
|
||||
Value b;
|
||||
RootedObject scope(cx);
|
||||
scope = cx->stack.currentScriptedScopeChain();
|
||||
JSObject *scope = cx->stack.currentScriptedScopeChain();
|
||||
while (true) {
|
||||
if (!scope->isCall() && !scope->isBlock())
|
||||
if (scope->isCall()) {
|
||||
if (scope->asCall().containsVarOrArg(bname, &b, cx))
|
||||
break;
|
||||
} else if (scope->isBlock()) {
|
||||
if (scope->asClonedBlock().containsVar(bname, &b, cx))
|
||||
break;
|
||||
} else {
|
||||
return NULL;
|
||||
if (HasDataProperty(cx, scope, bname, &b))
|
||||
break;
|
||||
}
|
||||
scope = &scope->asScope().enclosingScope();
|
||||
}
|
||||
|
||||
|
|
|
@ -215,11 +215,10 @@ typedef enum JSWhyMagic
|
|||
JS_ARG_POISON, /* used in debug builds to catch tracing errors */
|
||||
JS_SERIALIZE_NO_NODE, /* an empty subnode in the AST serializer */
|
||||
JS_LAZY_ARGUMENTS, /* lazy arguments value on the stack */
|
||||
JS_UNASSIGNED_ARGUMENTS, /* the initial value of callobj.arguments */
|
||||
JS_OPTIMIZED_ARGUMENTS, /* optimized-away 'arguments' value */
|
||||
JS_IS_CONSTRUCTING, /* magic value passed to natives to indicate construction */
|
||||
JS_OVERWRITTEN_CALLEE, /* arguments.callee has been overwritten */
|
||||
JS_FORWARD_TO_CALL_OBJECT, /* args object element stored in call object */
|
||||
JS_BLOCK_NEEDS_CLONE, /* value of static block object slot */
|
||||
JS_GENERIC_MAGIC /* for local use */
|
||||
} JSWhyMagic;
|
||||
|
||||
|
|
|
@ -1077,22 +1077,60 @@ mjit::Compiler::generatePrologue()
|
|||
|
||||
markUndefinedLocals();
|
||||
|
||||
types::TypeScriptNesting *nesting = script->nesting();
|
||||
|
||||
/*
|
||||
* Load the scope chain into the frame if it will be needed by NAME
|
||||
* opcodes or by the nesting prologue below. The scope chain is always
|
||||
* set for global and eval frames, and will have been set by
|
||||
* HeavyweightFunctionPrologue for heavyweight function frames.
|
||||
* Run the function prologue if necessary. This is always done in a
|
||||
* stub for heavyweight functions (including nesting outer functions).
|
||||
*/
|
||||
if (!script->function()->isHeavyweight() &&
|
||||
(analysis->usesScopeChain() || script->nesting()))
|
||||
{
|
||||
RegisterID t0 = Registers::ReturnReg;
|
||||
Jump hasScope = masm.branchTest32(Assembler::NonZero,
|
||||
FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN));
|
||||
masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(script->function())), t0);
|
||||
masm.loadPtr(Address(t0, JSFunction::offsetOfEnvironment()), t0);
|
||||
masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain()));
|
||||
hasScope.linkTo(masm.label(), &masm);
|
||||
JS_ASSERT_IF(nesting && nesting->children, script->function()->isHeavyweight());
|
||||
if (script->function()->isHeavyweight()) {
|
||||
prepareStubCall(Uses(0));
|
||||
INLINE_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
|
||||
} else {
|
||||
/*
|
||||
* Load the scope chain into the frame if it will be needed by NAME
|
||||
* opcodes or by the nesting prologue below. The scope chain is
|
||||
* always set for global and eval frames, and will have been set by
|
||||
* CreateFunCallObject for heavyweight function frames.
|
||||
*/
|
||||
if (analysis->usesScopeChain() || nesting) {
|
||||
RegisterID t0 = Registers::ReturnReg;
|
||||
Jump hasScope = masm.branchTest32(Assembler::NonZero,
|
||||
FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN));
|
||||
masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(script->function())), t0);
|
||||
masm.loadPtr(Address(t0, JSFunction::offsetOfEnvironment()), t0);
|
||||
masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain()));
|
||||
hasScope.linkTo(masm.label(), &masm);
|
||||
}
|
||||
|
||||
if (nesting) {
|
||||
/*
|
||||
* Inline the common case for the nesting prologue: the
|
||||
* function is a non-heavyweight inner function with no
|
||||
* children of its own. We ensure during inference that the
|
||||
* outer function does not add scope objects for 'let' or
|
||||
* 'with', so that the frame's scope chain will be
|
||||
* the parent's call object, and if it differs from the
|
||||
* parent's current activation then the parent is reentrant.
|
||||
*/
|
||||
JSScript *parent = nesting->parent;
|
||||
JS_ASSERT(parent);
|
||||
JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
|
||||
!parent->analysis()->addsScopeObjects());
|
||||
|
||||
RegisterID t0 = Registers::ReturnReg;
|
||||
masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
|
||||
masm.loadPtr(Address(t0), t0);
|
||||
|
||||
Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
|
||||
Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
|
||||
masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
|
||||
|
||||
stubcc.linkExitDirect(mismatch, stubcc.masm.label());
|
||||
OOL_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
|
||||
stubcc.crossJump(stubcc.masm.jump(), masm.label());
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -1123,46 +1161,9 @@ mjit::Compiler::generatePrologue()
|
|||
ensureDoubleArguments();
|
||||
}
|
||||
|
||||
/* Inline StackFrame::prologue. */
|
||||
if (script->isActiveEval && script->strictModeCode) {
|
||||
prepareStubCall(Uses(0));
|
||||
INLINE_STUBCALL(stubs::StrictEvalPrologue, REJOIN_EVAL_PROLOGUE);
|
||||
} else if (script->function()) {
|
||||
if (script->function()->isHeavyweight()) {
|
||||
prepareStubCall(Uses(0));
|
||||
INLINE_STUBCALL(stubs::HeavyweightFunctionPrologue, REJOIN_FUNCTION_PROLOGUE);
|
||||
} else if (types::TypeScriptNesting *nesting = script->nesting()) {
|
||||
/*
|
||||
* Inline the common case for the nesting prologue: the
|
||||
* function is a non-heavyweight inner function with no
|
||||
* children of its own. We ensure during inference that the
|
||||
* outer function does not add scope objects for 'let' or
|
||||
* 'with', so that the frame's scope chain will be
|
||||
* the parent's call object, and if it differs from the
|
||||
* parent's current activation then the parent is reentrant.
|
||||
*/
|
||||
JSScript *parent = nesting->parent;
|
||||
JS_ASSERT(parent);
|
||||
JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
|
||||
!parent->analysis()->addsScopeObjects());
|
||||
|
||||
RegisterID t0 = Registers::ReturnReg;
|
||||
masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
|
||||
masm.loadPtr(Address(t0), t0);
|
||||
|
||||
Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
|
||||
Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
|
||||
masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
|
||||
|
||||
stubcc.linkExitDirect(mismatch, stubcc.masm.label());
|
||||
OOL_STUBCALL(stubs::TypeNestingPrologue, REJOIN_FUNCTION_PROLOGUE);
|
||||
stubcc.crossJump(stubcc.masm.jump(), masm.label());
|
||||
}
|
||||
|
||||
if (isConstructing) {
|
||||
if (!constructThis())
|
||||
return Compile_Error;
|
||||
}
|
||||
if (isConstructing) {
|
||||
if (!constructThis())
|
||||
return Compile_Error;
|
||||
}
|
||||
|
||||
if (debugMode()) {
|
||||
|
@ -1208,8 +1209,8 @@ void
|
|||
mjit::Compiler::markUndefinedLocals()
|
||||
{
|
||||
/*
|
||||
* Set locals to undefined. Skip locals which aren't closed and are known
|
||||
* to be defined before used,
|
||||
* Set locals to undefined, as in initCallFrameLatePrologue.
|
||||
* Skip locals which aren't closed and are known to be defined before used,
|
||||
*/
|
||||
for (uint32_t i = 0; i < script->nfixed; i++)
|
||||
markUndefinedLocal(0, i);
|
||||
|
@ -2783,8 +2784,6 @@ mjit::Compiler::generateMethod()
|
|||
uint32_t arg = GET_SLOTNO(PC);
|
||||
if (JSObject *singleton = pushedSingleton(0))
|
||||
frame.push(ObjectValue(*singleton));
|
||||
else if (script->argsObjAliasesFormals())
|
||||
jsop_aliasedArg(arg, /* get = */ true);
|
||||
else
|
||||
frame.pushArg(arg);
|
||||
}
|
||||
|
@ -2798,13 +2797,7 @@ mjit::Compiler::generateMethod()
|
|||
{
|
||||
jsbytecode *next = &PC[JSOP_SETARG_LENGTH];
|
||||
bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
|
||||
|
||||
uint32_t arg = GET_SLOTNO(PC);
|
||||
if (script->argsObjAliasesFormals())
|
||||
jsop_aliasedArg(arg, /* get = */ false, pop);
|
||||
else
|
||||
frame.storeArg(arg, pop);
|
||||
|
||||
frame.storeArg(GET_SLOTNO(PC), pop);
|
||||
updateVarType();
|
||||
|
||||
if (pop) {
|
||||
|
@ -2815,11 +2808,26 @@ mjit::Compiler::generateMethod()
|
|||
}
|
||||
END_CASE(JSOP_SETARG)
|
||||
|
||||
BEGIN_CASE(JSOP_GETLOCAL)
|
||||
BEGIN_CASE(JSOP_CALLLOCAL)
|
||||
BEGIN_CASE(JSOP_GETALIASEDVAR)
|
||||
BEGIN_CASE(JSOP_CALLALIASEDVAR)
|
||||
{
|
||||
/* This is all temporary until bug 659577. */
|
||||
if (JSObject *singleton = pushedSingleton(0)) {
|
||||
frame.push(ObjectValue(*singleton));
|
||||
} else {
|
||||
ScopeCoordinate sc = ScopeCoordinate(PC);
|
||||
if (script->bindings.bindingIsArg(sc.binding))
|
||||
frame.pushArg(script->bindings.bindingToArg(sc.binding));
|
||||
else
|
||||
frame.pushLocal(script->bindings.bindingToLocal(sc.binding));
|
||||
}
|
||||
}
|
||||
END_CASE(JSOP_GETALIASEDVAR)
|
||||
|
||||
BEGIN_CASE(JSOP_GETLOCAL)
|
||||
BEGIN_CASE(JSOP_CALLLOCAL)
|
||||
{
|
||||
|
||||
/*
|
||||
* Update the var type unless we are about to pop the variable.
|
||||
* Sync is not guaranteed for types of dead locals, and GETLOCAL
|
||||
|
@ -2828,37 +2836,46 @@ mjit::Compiler::generateMethod()
|
|||
jsbytecode *next = &PC[JSOP_GETLOCAL_LENGTH];
|
||||
if (JSOp(*next) != JSOP_POP || analysis->jumpTarget(next))
|
||||
restoreVarType();
|
||||
uint32_t slot = GET_SLOTNO(PC);
|
||||
if (JSObject *singleton = pushedSingleton(0))
|
||||
frame.push(ObjectValue(*singleton));
|
||||
else if (JOF_OPTYPE(*PC) == JOF_SCOPECOORD)
|
||||
jsop_aliasedVar(ScopeCoordinate(PC), /* get = */ true);
|
||||
else
|
||||
frame.pushLocal(GET_SLOTNO(PC));
|
||||
|
||||
PC += GetBytecodeLength(PC);
|
||||
break;
|
||||
frame.pushLocal(slot);
|
||||
}
|
||||
END_CASE(JSOP_GETLOCAL)
|
||||
|
||||
BEGIN_CASE(JSOP_SETLOCAL)
|
||||
BEGIN_CASE(JSOP_SETALIASEDVAR)
|
||||
{
|
||||
jsbytecode *next = &PC[GetBytecodeLength(PC)];
|
||||
/* This is all temporary until bug 659577. */
|
||||
jsbytecode *next = &PC[JSOP_SETALIASEDVAR_LENGTH];
|
||||
bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
|
||||
if (JOF_OPTYPE(*PC) == JOF_SCOPECOORD)
|
||||
jsop_aliasedVar(ScopeCoordinate(PC), /* get = */ false, pop);
|
||||
ScopeCoordinate sc = ScopeCoordinate(PC);
|
||||
if (script->bindings.bindingIsArg(sc.binding))
|
||||
frame.storeArg(script->bindings.bindingToArg(sc.binding), pop);
|
||||
else
|
||||
frame.storeLocal(GET_SLOTNO(PC), pop);
|
||||
frame.storeLocal(script->bindings.bindingToLocal(sc.binding), pop);
|
||||
updateVarType();
|
||||
|
||||
if (pop) {
|
||||
frame.pop();
|
||||
PC = next + JSOP_POP_LENGTH;
|
||||
PC += JSOP_SETALIASEDVAR_LENGTH + JSOP_POP_LENGTH;
|
||||
break;
|
||||
}
|
||||
}
|
||||
END_CASE(JSOP_SETALIASEDVAR)
|
||||
|
||||
PC = next;
|
||||
break;
|
||||
BEGIN_CASE(JSOP_SETLOCAL)
|
||||
{
|
||||
jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
|
||||
bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
|
||||
frame.storeLocal(GET_SLOTNO(PC), pop);
|
||||
updateVarType();
|
||||
|
||||
if (pop) {
|
||||
frame.pop();
|
||||
PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
|
||||
break;
|
||||
}
|
||||
}
|
||||
END_CASE(JSOP_SETLOCAL)
|
||||
|
||||
|
@ -3756,12 +3773,47 @@ mjit::Compiler::emitReturn(FrameEntry *fe)
|
|||
return;
|
||||
}
|
||||
|
||||
/* Inline StackFrame::epilogue. */
|
||||
if (debugMode()) {
|
||||
prepareStubCall(Uses(0));
|
||||
INLINE_STUBCALL(stubs::Epilogue, REJOIN_NONE);
|
||||
} else if (script->function() && script->nesting()) {
|
||||
masm.sub32(Imm32(1), AbsoluteAddress(&script->nesting()->activeFrames));
|
||||
/*
|
||||
* Outside the mjit, activation objects (call objects and arguments objects) are put
|
||||
* by ContextStack::pop* members. For JSOP_RETURN, the interpreter only calls
|
||||
* popInlineFrame if fp != entryFrame since the VM protocol is that Invoke/Execute are
|
||||
* responsible for pushing/popping the initial frame. However, an mjit function
|
||||
* epilogue doesn't treat the initial StackFrame of its VMFrame specially: it always
|
||||
* puts activation objects. And furthermore, if the last mjit frame throws, the mjit
|
||||
* does *not* put the activation objects. So we can't assume any particular state of
|
||||
* puttedness upon exit from the mjit.
|
||||
*
|
||||
* To avoid double-putting, EnterMethodJIT calls updateEpilogueFlags to clear the
|
||||
* entry frame's hasArgsObj() and hasCallObj() flags if the given objects have already
|
||||
* been put.
|
||||
*/
|
||||
if (script->function()) {
|
||||
types::TypeScriptNesting *nesting = script->nesting();
|
||||
if (script->function()->isHeavyweight() || script->needsArgsObj() ||
|
||||
(nesting && nesting->children) || debugMode())
|
||||
{
|
||||
prepareStubCall(Uses(fe ? 1 : 0));
|
||||
INLINE_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
|
||||
} else {
|
||||
/* if hasCallObj() */
|
||||
Jump putObjs = masm.branchTest32(Assembler::NonZero,
|
||||
Address(JSFrameReg, StackFrame::offsetOfFlags()),
|
||||
Imm32(StackFrame::HAS_CALL_OBJ));
|
||||
stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
|
||||
|
||||
stubcc.leave();
|
||||
OOL_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
|
||||
|
||||
emitReturnValue(&stubcc.masm, fe);
|
||||
emitFinalReturn(stubcc.masm);
|
||||
|
||||
/*
|
||||
* Do frame count balancing inline for inner functions in a nesting
|
||||
* with no children of their own.
|
||||
*/
|
||||
if (nesting)
|
||||
masm.sub32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
|
||||
}
|
||||
}
|
||||
|
||||
emitReturnValue(&masm, fe);
|
||||
|
@ -5647,7 +5699,7 @@ mjit::Compiler::jsop_bindname(PropertyName *name)
|
|||
analysis->resolveNameAccess(cx, NameToId(name), true);
|
||||
if (access.nesting) {
|
||||
RegisterID reg = frame.allocReg();
|
||||
CallObject **pobj = &access.nesting->activeCall;
|
||||
JSObject **pobj = &access.nesting->activeCall;
|
||||
masm.move(ImmPtr(pobj), reg);
|
||||
masm.loadPtr(Address(reg), reg);
|
||||
frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
|
||||
|
@ -5758,80 +5810,6 @@ mjit::Compiler::jsop_bindname(PropertyName *name)
|
|||
}
|
||||
#endif
|
||||
|
||||
void
|
||||
mjit::Compiler::jsop_aliasedArg(unsigned arg, bool get, bool poppedAfter)
|
||||
{
|
||||
RegisterID reg = frame.allocReg();
|
||||
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfArgsObj()), reg);
|
||||
size_t dataOff = ArgumentsObject::getDataSlotOffset();
|
||||
masm.loadPrivate(Address(reg, dataOff), reg);
|
||||
int32_t argsOff = ArgumentsData::offsetOfArgs() + arg * sizeof(Value);
|
||||
masm.addPtr(Imm32(argsOff), reg, reg);
|
||||
if (get) {
|
||||
FrameEntry *fe = frame.getArg(arg);
|
||||
JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
|
||||
frame.push(Address(reg), type, true /* = reuseBase */);
|
||||
} else {
|
||||
frame.storeTo(frame.peek(-1), Address(reg), poppedAfter);
|
||||
frame.freeReg(reg);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
mjit::Compiler::jsop_aliasedVar(ScopeCoordinate sc, bool get, bool poppedAfter)
|
||||
{
|
||||
RegisterID reg = frame.allocReg();
|
||||
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), reg);
|
||||
for (unsigned i = 0; i < sc.hops; i++)
|
||||
masm.loadPayload(Address(reg, ScopeObject::offsetOfEnclosingScope()), reg);
|
||||
|
||||
unsigned slot = ScopeObject::CALL_BLOCK_RESERVED_SLOTS + sc.slot;
|
||||
|
||||
/*
|
||||
* TODO bug 753158: Call and Block objects should use the same layout
|
||||
* strategy: up to the maximum numFixedSlots and overflow (if any) in
|
||||
* dynamic slots. For now, we special case for different layouts:
|
||||
*/
|
||||
Address addr;
|
||||
if (ScopeCoordinateBlockChain(script, PC)) {
|
||||
/*
|
||||
* Block objects use a fixed AllocKind which means an invariant number
|
||||
* of fixed slots. Any slot below the fixed slot count is inline, any
|
||||
* slot over is in the dynamic slots.
|
||||
*/
|
||||
uint32_t nfixed = gc::GetGCKindSlots(BlockObject::FINALIZE_KIND);
|
||||
if (nfixed <= slot) {
|
||||
masm.loadPtr(Address(reg, JSObject::offsetOfSlots()), reg);
|
||||
addr = Address(reg, (slot - nfixed) * sizeof(Value));
|
||||
} else {
|
||||
addr = Address(reg, JSObject::getFixedSlotOffset(slot));
|
||||
}
|
||||
} else {
|
||||
/*
|
||||
* Using special-case hackery in Shape::getChildBinding, CallObject
|
||||
* slots are either altogether in fixed slots or altogether in dynamic
|
||||
* slots (by having numFixed == RESERVED_SLOTS).
|
||||
*/
|
||||
if (script->bindings.lastShape()->numFixedSlots() <= slot) {
|
||||
masm.loadPtr(Address(reg, JSObject::offsetOfSlots()), reg);
|
||||
addr = Address(reg, sc.slot * sizeof(Value));
|
||||
} else {
|
||||
addr = Address(reg, JSObject::getFixedSlotOffset(slot));
|
||||
}
|
||||
}
|
||||
|
||||
if (get) {
|
||||
FrameEntry *fe = script->bindings.slotIsLocal(sc.slot)
|
||||
? frame.getLocal(script->bindings.slotToLocal(sc.slot))
|
||||
: frame.getArg(script->bindings.slotToArg(sc.slot));
|
||||
JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
|
||||
frame.push(addr, type, true /* = reuseBase */);
|
||||
} else {
|
||||
frame.storeTo(frame.peek(-1), addr, poppedAfter);
|
||||
frame.freeReg(reg);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
mjit::Compiler::jsop_this()
|
||||
{
|
||||
|
|
|
@ -626,8 +626,8 @@ private:
|
|||
void jsop_bindname(PropertyName *name);
|
||||
void jsop_setglobal(uint32_t index);
|
||||
void jsop_getprop_slow(PropertyName *name, bool forPrototype = false);
|
||||
void jsop_aliasedArg(unsigned i, bool get, bool poppedAfter = false);
|
||||
void jsop_aliasedVar(ScopeCoordinate sc, bool get, bool poppedAfter = false);
|
||||
void jsop_getarg(uint32_t slot);
|
||||
void jsop_setarg(uint32_t slot, bool popped);
|
||||
void jsop_this();
|
||||
void emitReturn(FrameEntry *fe);
|
||||
void emitFinalReturn(Assembler &masm);
|
||||
|
|
|
@ -62,7 +62,7 @@ FindExceptionHandler(JSContext *cx)
|
|||
*/
|
||||
jsbytecode *pc = script->main() + tn->start + tn->length;
|
||||
cx->regs().pc = pc;
|
||||
cx->regs().sp = cx->regs().spForStackDepth(tn->stackDepth);
|
||||
cx->regs().sp = fp->base() + tn->stackDepth;
|
||||
|
||||
switch (tn->kind) {
|
||||
case JSTRY_CATCH:
|
||||
|
@ -119,6 +119,22 @@ FindExceptionHandler(JSContext *cx)
|
|||
/*
|
||||
* Clean up a frame and return.
|
||||
*/
|
||||
static void
|
||||
InlineReturn(VMFrame &f)
|
||||
{
|
||||
JS_ASSERT(f.fp() != f.entryfp);
|
||||
AssertValidFunctionScopeChainAtExit(f.fp());
|
||||
|
||||
f.cx->stack.popInlineFrame(f.regs);
|
||||
|
||||
DebugOnly<JSOp> op = JSOp(*f.regs.pc);
|
||||
JS_ASSERT(op == JSOP_CALL ||
|
||||
op == JSOP_NEW ||
|
||||
op == JSOP_EVAL ||
|
||||
op == JSOP_FUNCALL ||
|
||||
op == JSOP_FUNAPPLY);
|
||||
f.regs.pc += JSOP_CALL_LENGTH;
|
||||
}
|
||||
|
||||
void JS_FASTCALL
|
||||
stubs::SlowCall(VMFrame &f, uint32_t argc)
|
||||
|
@ -146,7 +162,7 @@ stubs::SlowNew(VMFrame &f, uint32_t argc)
|
|||
static inline bool
|
||||
CheckStackQuota(VMFrame &f)
|
||||
{
|
||||
JS_ASSERT(f.regs.stackDepth() == 0);
|
||||
JS_ASSERT(f.regs.sp == f.fp()->base());
|
||||
|
||||
f.stackLimit = f.cx->stack.space().getStackLimit(f.cx, DONT_REPORT_ERROR);
|
||||
if (f.stackLimit)
|
||||
|
@ -290,6 +306,10 @@ UncachedInlineCall(VMFrame &f, InitialFrameFlags initial,
|
|||
/* Finish the handoff to the new frame regs. */
|
||||
PreserveRegsGuard regsGuard(cx, regs);
|
||||
|
||||
/* Scope with a call object parented by callee's parent. */
|
||||
if (!regs.fp()->functionPrologue(cx))
|
||||
return false;
|
||||
|
||||
/*
|
||||
* If newscript was successfully compiled, run it. Skip for calls which
|
||||
* will be constructing a new type object for 'this'.
|
||||
|
@ -523,7 +543,7 @@ js_InternalThrow(VMFrame &f)
|
|||
}
|
||||
|
||||
|
||||
f.fp()->epilogue(f.cx);
|
||||
ScriptEpilogue(f.cx, f.fp(), false);
|
||||
|
||||
// Don't remove the last frame, this is the responsibility of
|
||||
// JaegerShot()'s caller. We only guarantee that ScriptEpilogue()
|
||||
|
@ -531,14 +551,8 @@ js_InternalThrow(VMFrame &f)
|
|||
if (f.entryfp == f.fp())
|
||||
break;
|
||||
|
||||
f.cx->stack.popInlineFrame(f.regs);
|
||||
DebugOnly<JSOp> op = JSOp(*f.regs.pc);
|
||||
JS_ASSERT(op == JSOP_CALL ||
|
||||
op == JSOP_NEW ||
|
||||
op == JSOP_EVAL ||
|
||||
op == JSOP_FUNCALL ||
|
||||
op == JSOP_FUNAPPLY);
|
||||
f.regs.pc += JSOP_CALL_LENGTH;
|
||||
JS_ASSERT(&cx->regs() == &f.regs);
|
||||
InlineReturn(f);
|
||||
}
|
||||
|
||||
JS_ASSERT(&cx->regs() == &f.regs);
|
||||
|
@ -601,7 +615,7 @@ stubs::CreateThis(VMFrame &f, JSObject *proto)
|
|||
JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
|
||||
if (!obj)
|
||||
THROW();
|
||||
fp->thisValue() = ObjectValue(*obj);
|
||||
fp->formalArgs()[-1].setObject(*obj);
|
||||
}
|
||||
|
||||
void JS_FASTCALL
|
||||
|
@ -692,9 +706,7 @@ FinishVarIncOp(VMFrame &f, RejoinState rejoin, Value ov, Value nv, Value *vp)
|
|||
const JSCodeSpec *cs = &js_CodeSpec[op];
|
||||
|
||||
unsigned i = GET_SLOTNO(f.pc());
|
||||
Value *var = (JOF_TYPE(cs->format) == JOF_LOCAL)
|
||||
? &f.fp()->unaliasedLocal(i)
|
||||
: &f.fp()->unaliasedFormal(i);
|
||||
Value *var = (JOF_TYPE(cs->format) == JOF_LOCAL) ? f.fp()->slots() + i : &f.fp()->formalArg(i);
|
||||
|
||||
if (rejoin == REJOIN_POS) {
|
||||
double d = ov.toNumber();
|
||||
|
@ -710,7 +722,7 @@ FinishVarIncOp(VMFrame &f, RejoinState rejoin, Value ov, Value nv, Value *vp)
|
|||
extern "C" void *
|
||||
js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VMFrame &f)
|
||||
{
|
||||
FrameRejoinState jsrejoin = f.fp()->rejoin();
|
||||
JSRejoinState jsrejoin = f.fp()->rejoin();
|
||||
RejoinState rejoin;
|
||||
if (jsrejoin & 0x1) {
|
||||
/* Rejoin after a scripted call finished. Restore f.regs.pc and f.regs.inlined (NULL) */
|
||||
|
@ -745,12 +757,12 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
|
|||
* here. Update it to its value at the start of the opcode.
|
||||
*/
|
||||
Value *oldsp = f.regs.sp;
|
||||
f.regs.sp = f.regs.spForStackDepth(analysis->getCode(pc).stackDepth);
|
||||
f.regs.sp = fp->base() + analysis->getCode(pc).stackDepth;
|
||||
|
||||
jsbytecode *nextpc = pc + GetBytecodeLength(pc);
|
||||
Value *nextsp = NULL;
|
||||
if (nextpc != script->code + script->length && analysis->maybeCode(nextpc))
|
||||
nextsp = f.regs.spForStackDepth(analysis->getCode(nextpc).stackDepth);
|
||||
nextsp = fp->base() + analysis->getCode(nextpc).stackDepth;
|
||||
|
||||
JS_ASSERT(&cx->regs() == &f.regs);
|
||||
|
||||
|
@ -855,13 +867,18 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
|
|||
f.regs.pc = nextpc;
|
||||
break;
|
||||
|
||||
case REJOIN_DEFLOCALFUN:
|
||||
fp->slots()[GET_SLOTNO(pc)].setObject(* (JSObject *) returnReg);
|
||||
f.regs.pc = nextpc;
|
||||
break;
|
||||
|
||||
case REJOIN_THIS_PROTOTYPE: {
|
||||
RootedObject callee(cx, &fp->callee());
|
||||
JSObject *proto = f.regs.sp[0].isObject() ? &f.regs.sp[0].toObject() : NULL;
|
||||
JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
|
||||
if (!obj)
|
||||
return js_InternalThrow(f);
|
||||
fp->thisValue() = ObjectValue(*obj);
|
||||
fp->formalArgs()[-1].setObject(*obj);
|
||||
|
||||
if (Probes::callTrackingActive(cx))
|
||||
Probes::enterJSFun(f.cx, f.fp()->maybeFun(), f.fp()->script());
|
||||
|
@ -885,57 +902,42 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
|
|||
break;
|
||||
}
|
||||
|
||||
/*
|
||||
* Each of these cases indicates a point of progress through
|
||||
* generatePrologue. Execute the rest of the prologue here.
|
||||
*/
|
||||
case REJOIN_CHECK_ARGUMENTS:
|
||||
/*
|
||||
* Do all the work needed in arity check JIT prologues after the
|
||||
* arguments check occurs (FixupArity has been called if needed, but
|
||||
* the stack check and late prologue have not been performed.
|
||||
*/
|
||||
if (!CheckStackQuota(f))
|
||||
return js_InternalThrow(f);
|
||||
fp->initVarsToUndefined();
|
||||
|
||||
SetValueRangeToUndefined(fp->slots(), script->nfixed);
|
||||
|
||||
if (!fp->functionPrologue(cx))
|
||||
return js_InternalThrow(f);
|
||||
/* FALLTHROUGH */
|
||||
|
||||
case REJOIN_FUNCTION_PROLOGUE:
|
||||
fp->scopeChain();
|
||||
if (!fp->prologue(cx, types::UseNewTypeAtEntry(cx, fp)))
|
||||
|
||||
/* Construct the 'this' object for the frame if necessary. */
|
||||
if (!ScriptPrologueOrGeneratorResume(cx, fp, types::UseNewTypeAtEntry(cx, fp)))
|
||||
return js_InternalThrow(f);
|
||||
|
||||
/*
|
||||
* We would normally call ScriptDebugPrologue here. But in debug mode,
|
||||
* we only use JITted functions' invokeEntry entry point, whereas
|
||||
* CheckArgumentTypes (REJOIN_CHECK_ARGUMENTS) is only reachable via
|
||||
* the other entry points.
|
||||
* Having called ScriptPrologueOrGeneratorResume, we would normally call
|
||||
* ScriptDebugPrologue here. But in debug mode, we only use JITted
|
||||
* functions' invokeEntry entry point, whereas CheckArgumentTypes
|
||||
* (REJOIN_CHECK_ARGUMENTS) and FunctionFramePrologue
|
||||
* (REJOIN_FUNCTION_PROLOGUE) are only reachable via the other entry
|
||||
* points. So we should never need either of these rejoin tails in debug
|
||||
* mode.
|
||||
*
|
||||
* If we fix bug 699196 ("Debug mode code could use inline caches
|
||||
* now"), then this case will become reachable again.
|
||||
* now"), then these cases will become reachable again.
|
||||
*/
|
||||
JS_ASSERT(!cx->compartment->debugMode());
|
||||
break;
|
||||
|
||||
/* Finish executing the tail of generatePrologue. */
|
||||
case REJOIN_FUNCTION_PROLOGUE:
|
||||
if (fp->isConstructing()) {
|
||||
JS_ASSERT(false);
|
||||
RootedObject callee(cx, &fp->callee());
|
||||
JSObject *obj = js_CreateThisForFunction(cx, callee, types::UseNewTypeAtEntry(cx, fp));
|
||||
if (!obj)
|
||||
return js_InternalThrow(f);
|
||||
fp->functionThis() = ObjectValue(*obj);
|
||||
}
|
||||
/* FALLTHROUGH */
|
||||
case REJOIN_EVAL_PROLOGUE:
|
||||
if (cx->compartment->debugMode()) {
|
||||
Probes::enterJSFun(cx, fp->maybeFun(), fp->script());
|
||||
JSTrapStatus status = ScriptDebugPrologue(cx, fp);
|
||||
switch (status) {
|
||||
case JSTRAP_CONTINUE:
|
||||
break;
|
||||
case JSTRAP_RETURN:
|
||||
return f.cx->jaegerRuntime().forceReturnFromFastCall();
|
||||
case JSTRAP_ERROR:
|
||||
case JSTRAP_THROW:
|
||||
return js_InternalThrow(f);
|
||||
default:
|
||||
JS_NOT_REACHED("bad ScriptDebugPrologue status");
|
||||
}
|
||||
}
|
||||
break;
|
||||
|
||||
case REJOIN_CALL_PROLOGUE:
|
||||
|
@ -1058,7 +1060,7 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
|
|||
|
||||
if (nextDepth == UINT32_MAX)
|
||||
nextDepth = analysis->getCode(f.regs.pc).stackDepth;
|
||||
f.regs.sp = f.regs.spForStackDepth(nextDepth);
|
||||
f.regs.sp = fp->base() + nextDepth;
|
||||
|
||||
/*
|
||||
* Monitor the result of the previous op when finishing a JOF_TYPESET op.
|
||||
|
|
|
@ -1056,6 +1056,10 @@ mjit::EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimi
|
|||
fp->markReturnValue();
|
||||
}
|
||||
|
||||
/* See comment in mjit::Compiler::emitReturn. */
|
||||
if (fp->isFunctionFrame())
|
||||
fp->updateEpilogueFlags();
|
||||
|
||||
return ok ? Jaeger_Returned : Jaeger_Throwing;
|
||||
}
|
||||
|
||||
|
|
|
@ -109,7 +109,7 @@ struct VMFrame
|
|||
Value *stackLimit;
|
||||
StackFrame *entryfp;
|
||||
FrameRegs *oldregs;
|
||||
FrameRejoinState stubRejoin; /* How to rejoin if inside a call from an IC stub. */
|
||||
JSRejoinState stubRejoin; /* How to rejoin if inside a call from an IC stub. */
|
||||
|
||||
#if defined(JS_CPU_X86)
|
||||
void *unused0, *unused1; /* For 16 byte alignment */
|
||||
|
@ -294,6 +294,9 @@ enum RejoinState {
|
|||
REJOIN_PUSH_BOOLEAN,
|
||||
REJOIN_PUSH_OBJECT,
|
||||
|
||||
/* Call returns an object, which should be assigned to a local per the current bytecode. */
|
||||
REJOIN_DEFLOCALFUN,
|
||||
|
||||
/*
|
||||
* During the prologue of constructing scripts, after the function's
|
||||
* .prototype property has been fetched.
|
||||
|
@ -307,10 +310,9 @@ enum RejoinState {
|
|||
REJOIN_CHECK_ARGUMENTS,
|
||||
|
||||
/*
|
||||
* The script's jitcode was discarded during one of the following steps of
|
||||
* a frame's prologue.
|
||||
* The script's jitcode was discarded after marking an outer function as
|
||||
* reentrant or due to a GC while creating a call object.
|
||||
*/
|
||||
REJOIN_EVAL_PROLOGUE,
|
||||
REJOIN_FUNCTION_PROLOGUE,
|
||||
|
||||
/*
|
||||
|
@ -337,14 +339,14 @@ enum RejoinState {
|
|||
};
|
||||
|
||||
/* Get the rejoin state for a StackFrame after returning from a scripted call. */
|
||||
static inline FrameRejoinState
|
||||
static inline JSRejoinState
|
||||
ScriptedRejoin(uint32_t pcOffset)
|
||||
{
|
||||
return REJOIN_SCRIPTED | (pcOffset << 1);
|
||||
}
|
||||
|
||||
/* Get the rejoin state for a StackFrame after returning from a stub call. */
|
||||
static inline FrameRejoinState
|
||||
static inline JSRejoinState
|
||||
StubRejoin(RejoinState rejoin)
|
||||
{
|
||||
return rejoin << 1;
|
||||
|
|
|
@ -1054,7 +1054,7 @@ ic::SplatApplyArgs(VMFrame &f)
|
|||
THROWV(false);
|
||||
|
||||
/* Steps 7-8. */
|
||||
f.regs.fp()->forEachUnaliasedActual(CopyTo(f.regs.sp));
|
||||
f.regs.fp()->forEachCanonicalActualArg(CopyTo(f.regs.sp));
|
||||
|
||||
f.regs.sp += length;
|
||||
f.u.call.dynamicArgc = length;
|
||||
|
|
|
@ -294,6 +294,8 @@ class SetPropCompiler : public PICStubCompiler
|
|||
|
||||
JS_ASSERT_IF(!shape->hasDefaultSetter(), obj->isCall());
|
||||
|
||||
MaybeJump skipOver;
|
||||
|
||||
if (adding) {
|
||||
JS_ASSERT(shape->hasSlot());
|
||||
pic.shapeRegHasBaseShape = false;
|
||||
|
@ -351,11 +353,29 @@ class SetPropCompiler : public PICStubCompiler
|
|||
// then we can rely on fun->nargs remaining invariant.
|
||||
JSFunction *fun = obj->asCall().getCalleeFunction();
|
||||
uint16_t slot = uint16_t(shape->shortid());
|
||||
if (shape->setterOp() == CallObject::setVarOp)
|
||||
slot += fun->nargs;
|
||||
slot += CallObject::RESERVED_SLOTS;
|
||||
Address address = masm.objPropAddress(obj, pic.objReg, slot);
|
||||
masm.storeValue(pic.u.vr, address);
|
||||
|
||||
/* Guard that the call object has a frame. */
|
||||
masm.loadObjPrivate(pic.objReg, pic.shapeReg, obj->numFixedSlots());
|
||||
Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
|
||||
|
||||
{
|
||||
Address addr(pic.shapeReg, shape->setterOp() == CallObject::setArgOp
|
||||
? StackFrame::offsetOfFormalArg(fun, slot)
|
||||
: StackFrame::offsetOfFixed(slot));
|
||||
masm.storeValue(pic.u.vr, addr);
|
||||
skipOver = masm.jump();
|
||||
}
|
||||
|
||||
escapedFrame.linkTo(masm.label(), &masm);
|
||||
{
|
||||
if (shape->setterOp() == CallObject::setVarOp)
|
||||
slot += fun->nargs;
|
||||
|
||||
slot += CallObject::RESERVED_SLOTS;
|
||||
Address address = masm.objPropAddress(obj, pic.objReg, slot);
|
||||
|
||||
masm.storeValue(pic.u.vr, address);
|
||||
}
|
||||
|
||||
pic.shapeRegHasBaseShape = false;
|
||||
}
|
||||
|
@ -390,6 +410,8 @@ class SetPropCompiler : public PICStubCompiler
|
|||
for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj)
|
||||
buffer.link(*pj, pic.slowPathStart);
|
||||
buffer.link(done, pic.fastPathRejoin);
|
||||
if (skipOver.isSet())
|
||||
buffer.link(skipOver.get(), pic.fastPathRejoin);
|
||||
CodeLocationLabel cs = buffer.finalize(f);
|
||||
JaegerSpew(JSpew_PICs, "generate setprop stub %p %p %d at %p\n",
|
||||
(void*)&pic,
|
||||
|
@ -736,9 +758,6 @@ struct GetPropHelper {
|
|||
}
|
||||
};
|
||||
|
||||
namespace js {
|
||||
namespace mjit {
|
||||
|
||||
class GetPropCompiler : public PICStubCompiler
|
||||
{
|
||||
JSObject *obj;
|
||||
|
@ -1369,9 +1388,6 @@ class GetPropCompiler : public PICStubCompiler
|
|||
}
|
||||
};
|
||||
|
||||
} // namespace mjit
|
||||
} // namespace js
|
||||
|
||||
class ScopeNameCompiler : public PICStubCompiler
|
||||
{
|
||||
private:
|
||||
|
@ -1550,9 +1566,9 @@ class ScopeNameCompiler : public PICStubCompiler
|
|||
|
||||
CallObjPropKind kind;
|
||||
const Shape *shape = getprop.shape;
|
||||
if (shape->setterOp() == CallObject::setArgOp) {
|
||||
if (shape->getterOp() == CallObject::getArgOp) {
|
||||
kind = ARG;
|
||||
} else if (shape->setterOp() == CallObject::setVarOp) {
|
||||
} else if (shape->getterOp() == CallObject::getVarOp) {
|
||||
kind = VAR;
|
||||
} else {
|
||||
return disable("unhandled callobj sprop getter");
|
||||
|
@ -1570,16 +1586,38 @@ class ScopeNameCompiler : public PICStubCompiler
|
|||
Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
|
||||
ImmPtr(getprop.holder->lastProperty()));
|
||||
|
||||
/* Get callobj's stack frame. */
|
||||
masm.loadObjPrivate(pic.objReg, pic.shapeReg, getprop.holder->numFixedSlots());
|
||||
|
||||
JSFunction *fun = getprop.holder->asCall().getCalleeFunction();
|
||||
unsigned slot = shape->shortid();
|
||||
if (kind == VAR)
|
||||
slot += fun->nargs;
|
||||
slot += CallObject::RESERVED_SLOTS;
|
||||
Address address = masm.objPropAddress(obj, pic.objReg, slot);
|
||||
uint16_t slot = uint16_t(shape->shortid());
|
||||
|
||||
/* Safe because type is loaded first. */
|
||||
masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg);
|
||||
Jump skipOver;
|
||||
Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
|
||||
|
||||
/* Not-escaped case. */
|
||||
{
|
||||
Address addr(pic.shapeReg, kind == ARG ? StackFrame::offsetOfFormalArg(fun, slot)
|
||||
: StackFrame::offsetOfFixed(slot));
|
||||
masm.loadPayload(addr, pic.objReg);
|
||||
masm.loadTypeTag(addr, pic.shapeReg);
|
||||
skipOver = masm.jump();
|
||||
}
|
||||
|
||||
escapedFrame.linkTo(masm.label(), &masm);
|
||||
|
||||
{
|
||||
if (kind == VAR)
|
||||
slot += fun->nargs;
|
||||
|
||||
slot += CallObject::RESERVED_SLOTS;
|
||||
Address address = masm.objPropAddress(obj, pic.objReg, slot);
|
||||
|
||||
/* Safe because type is loaded first. */
|
||||
masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg);
|
||||
}
|
||||
|
||||
skipOver.linkTo(masm.label(), &masm);
|
||||
Jump done = masm.jump();
|
||||
|
||||
// All failures flow to here, so there is a common point to patch.
|
||||
|
|
|
@ -912,7 +912,7 @@ stubs::InitElem(VMFrame &f, uint32_t last)
|
|||
FrameRegs ®s = f.regs;
|
||||
|
||||
/* Pop the element's value into rval. */
|
||||
JS_ASSERT(regs.stackDepth() >= 3);
|
||||
JS_ASSERT(regs.sp - f.fp()->base() >= 3);
|
||||
const Value &rref = regs.sp[-1];
|
||||
|
||||
/* Find the object being initialized at top of stack. */
|
||||
|
@ -1020,7 +1020,7 @@ InitPropOrMethod(VMFrame &f, PropertyName *name, JSOp op)
|
|||
FrameRegs ®s = f.regs;
|
||||
|
||||
/* Load the property's initial value into rval. */
|
||||
JS_ASSERT(regs.stackDepth() >= 2);
|
||||
JS_ASSERT(regs.sp - f.fp()->base() >= 2);
|
||||
Value rval;
|
||||
rval = regs.sp[-1];
|
||||
|
||||
|
@ -1048,7 +1048,7 @@ stubs::InitProp(VMFrame &f, PropertyName *name)
|
|||
void JS_FASTCALL
|
||||
stubs::IterNext(VMFrame &f, int32_t offset)
|
||||
{
|
||||
JS_ASSERT(f.regs.stackDepth() >= unsigned(offset));
|
||||
JS_ASSERT(f.regs.sp - offset >= f.fp()->base());
|
||||
JS_ASSERT(f.regs.sp[-offset].isObject());
|
||||
|
||||
JSObject *iterobj = &f.regs.sp[-offset].toObject();
|
||||
|
@ -1061,7 +1061,7 @@ stubs::IterNext(VMFrame &f, int32_t offset)
|
|||
JSBool JS_FASTCALL
|
||||
stubs::IterMore(VMFrame &f)
|
||||
{
|
||||
JS_ASSERT(f.regs.stackDepth() >= 1);
|
||||
JS_ASSERT(f.regs.sp - 1 >= f.fp()->base());
|
||||
JS_ASSERT(f.regs.sp[-1].isObject());
|
||||
|
||||
Value v;
|
||||
|
@ -1075,7 +1075,7 @@ stubs::IterMore(VMFrame &f)
|
|||
void JS_FASTCALL
|
||||
stubs::EndIter(VMFrame &f)
|
||||
{
|
||||
JS_ASSERT(f.regs.stackDepth() >= 1);
|
||||
JS_ASSERT(f.regs.sp - 1 >= f.fp()->base());
|
||||
if (!CloseIterator(f.cx, &f.regs.sp[-1].toObject()))
|
||||
THROW();
|
||||
}
|
||||
|
@ -1125,7 +1125,7 @@ stubs::Throw(VMFrame &f)
|
|||
void JS_FASTCALL
|
||||
stubs::Arguments(VMFrame &f)
|
||||
{
|
||||
ArgumentsObject *obj = ArgumentsObject::createExpected(f.cx, f.fp());
|
||||
ArgumentsObject *obj = ArgumentsObject::create(f.cx, f.fp());
|
||||
if (!obj)
|
||||
THROW();
|
||||
f.regs.sp[0] = ObjectValue(*obj);
|
||||
|
@ -1173,21 +1173,27 @@ void JS_FASTCALL
|
|||
stubs::EnterBlock(VMFrame &f, JSObject *obj)
|
||||
{
|
||||
FrameRegs ®s = f.regs;
|
||||
StackFrame *fp = f.fp();
|
||||
JS_ASSERT(!f.regs.inlined());
|
||||
|
||||
StaticBlockObject &blockObj = obj->asStaticBlock();
|
||||
if (!fp->pushBlock(f.cx, blockObj))
|
||||
THROW();
|
||||
|
||||
if (*regs.pc == JSOP_ENTERBLOCK) {
|
||||
JS_ASSERT(regs.stackDepth() == blockObj.stackDepth());
|
||||
JS_ASSERT(regs.stackDepth() + blockObj.slotCount() <= f.fp()->script()->nslots);
|
||||
JS_ASSERT(fp->base() + blockObj.stackDepth() == regs.sp);
|
||||
Value *vp = regs.sp + blockObj.slotCount();
|
||||
JS_ASSERT(regs.sp < vp);
|
||||
JS_ASSERT(vp <= fp->slots() + fp->script()->nslots);
|
||||
SetValueRangeToUndefined(regs.sp, vp);
|
||||
regs.sp = vp;
|
||||
} else if (*regs.pc == JSOP_ENTERLET0) {
|
||||
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
|
||||
== regs.sp);
|
||||
} else if (*regs.pc == JSOP_ENTERLET1) {
|
||||
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
|
||||
== regs.sp - 1);
|
||||
}
|
||||
|
||||
/* Clone block iff there are any closed-over variables. */
|
||||
if (!regs.fp()->pushBlock(f.cx, blockObj))
|
||||
THROW();
|
||||
}
|
||||
|
||||
void JS_FASTCALL
|
||||
|
@ -1516,7 +1522,7 @@ stubs::CheckArgumentTypes(VMFrame &f)
|
|||
if (!f.fp()->isConstructing())
|
||||
TypeScript::SetThis(f.cx, script, fp->thisValue());
|
||||
for (unsigned i = 0; i < fun->nargs; i++)
|
||||
TypeScript::SetArgument(f.cx, script, i, fp->unaliasedFormal(i, DONT_CHECK_ALIASING));
|
||||
TypeScript::SetArgument(f.cx, script, i, fp->formalArg(i));
|
||||
}
|
||||
|
||||
if (monitor.recompiled())
|
||||
|
@ -1546,7 +1552,7 @@ stubs::AssertArgumentTypes(VMFrame &f)
|
|||
}
|
||||
|
||||
for (unsigned i = 0; i < fun->nargs; i++) {
|
||||
Type type = GetValueType(f.cx, fp->unaliasedFormal(i, DONT_CHECK_ALIASING));
|
||||
Type type = GetValueType(f.cx, fp->formalArg(i));
|
||||
if (!TypeScript::ArgTypes(script, i)->hasType(type))
|
||||
TypeFailure(f.cx, "Missing type for arg %d: %s", i, TypeString(type));
|
||||
}
|
||||
|
@ -1603,32 +1609,16 @@ stubs::Exception(VMFrame &f)
|
|||
}
|
||||
|
||||
void JS_FASTCALL
|
||||
stubs::StrictEvalPrologue(VMFrame &f)
|
||||
stubs::FunctionFramePrologue(VMFrame &f)
|
||||
{
|
||||
JS_ASSERT(f.fp()->isStrictEvalFrame());
|
||||
CallObject *callobj = CallObject::createForStrictEval(f.cx, f.fp());
|
||||
if (!callobj)
|
||||
THROW();
|
||||
f.fp()->pushOnScopeChain(*callobj);
|
||||
}
|
||||
|
||||
void JS_FASTCALL
|
||||
stubs::HeavyweightFunctionPrologue(VMFrame &f)
|
||||
{
|
||||
if (!f.fp()->heavyweightFunctionPrologue(f.cx))
|
||||
if (!f.fp()->functionPrologue(f.cx))
|
||||
THROW();
|
||||
}
|
||||
|
||||
void JS_FASTCALL
|
||||
stubs::TypeNestingPrologue(VMFrame &f)
|
||||
stubs::FunctionFrameEpilogue(VMFrame &f)
|
||||
{
|
||||
types::NestingPrologue(f.cx, f.fp());
|
||||
}
|
||||
|
||||
void JS_FASTCALL
|
||||
stubs::Epilogue(VMFrame &f)
|
||||
{
|
||||
f.fp()->epilogue(f.cx);
|
||||
f.fp()->functionEpilogue(f.cx);
|
||||
}
|
||||
|
||||
void JS_FASTCALL
|
||||
|
@ -1636,15 +1626,17 @@ stubs::AnyFrameEpilogue(VMFrame &f)
|
|||
{
|
||||
/*
|
||||
* On the normal execution path, emitReturn calls ScriptDebugEpilogue
|
||||
* and inlines epilogue. This function implements forced early
|
||||
* and inlines ScriptEpilogue. This function implements forced early
|
||||
* returns, so it must have the same effect.
|
||||
*/
|
||||
bool ok = true;
|
||||
if (f.cx->compartment->debugMode())
|
||||
ok = js::ScriptDebugEpilogue(f.cx, f.fp(), ok);
|
||||
f.fp()->epilogue(f.cx);
|
||||
ok = ScriptEpilogue(f.cx, f.fp(), ok);
|
||||
if (!ok)
|
||||
THROW();
|
||||
if (f.fp()->isNonEvalFunctionFrame())
|
||||
f.fp()->functionEpilogue(f.cx);
|
||||
}
|
||||
|
||||
template <bool Clamped>
|
||||
|
|
|
@ -168,12 +168,10 @@ void JS_FASTCALL ConvertToTypedFloat(JSContext *cx, Value *vp);
|
|||
|
||||
void JS_FASTCALL Exception(VMFrame &f);
|
||||
|
||||
void JS_FASTCALL StrictEvalPrologue(VMFrame &f);
|
||||
void JS_FASTCALL HeavyweightFunctionPrologue(VMFrame &f);
|
||||
void JS_FASTCALL TypeNestingPrologue(VMFrame &f);
|
||||
void JS_FASTCALL FunctionFramePrologue(VMFrame &f);
|
||||
void JS_FASTCALL FunctionFrameEpilogue(VMFrame &f);
|
||||
|
||||
void JS_FASTCALL AnyFrameEpilogue(VMFrame &f);
|
||||
void JS_FASTCALL Epilogue(VMFrame &f);
|
||||
|
||||
JSObject * JS_FASTCALL
|
||||
NewDenseUnallocatedArray(VMFrame &f, uint32_t length);
|
||||
|
|
|
@ -1281,14 +1281,31 @@ AssertJit(JSContext *cx, unsigned argc, jsval *vp)
|
|||
static JSScript *
|
||||
ValueToScript(JSContext *cx, jsval v, JSFunction **funp = NULL)
|
||||
{
|
||||
JSFunction *fun = JS_ValueToFunction(cx, v);
|
||||
if (!fun)
|
||||
return NULL;
|
||||
JSScript *script = NULL;
|
||||
JSFunction *fun = NULL;
|
||||
|
||||
JSScript *script = fun->maybeScript();
|
||||
if (!script)
|
||||
JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, JSSMSG_SCRIPTS_ONLY);
|
||||
if (!JSVAL_IS_PRIMITIVE(v)) {
|
||||
JSObject *obj = JSVAL_TO_OBJECT(v);
|
||||
JSClass *clasp = JS_GetClass(obj);
|
||||
|
||||
if (clasp == Jsvalify(&GeneratorClass)) {
|
||||
if (JSGenerator *gen = (JSGenerator *) JS_GetPrivate(obj)) {
|
||||
fun = gen->floatingFrame()->fun();
|
||||
script = fun->script();
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!script) {
|
||||
fun = JS_ValueToFunction(cx, v);
|
||||
if (!fun)
|
||||
return NULL;
|
||||
script = fun->maybeScript();
|
||||
if (!script) {
|
||||
JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL,
|
||||
JSSMSG_SCRIPTS_ONLY);
|
||||
}
|
||||
}
|
||||
if (fun && funp)
|
||||
*funp = fun;
|
||||
|
||||
|
|
|
@ -10,10 +10,17 @@
|
|||
|
||||
#include "ArgumentsObject.h"
|
||||
|
||||
#include "ScopeObject-inl.h"
|
||||
|
||||
namespace js {
|
||||
|
||||
inline void
|
||||
ArgumentsObject::initInitialLength(uint32_t length)
|
||||
{
|
||||
JS_ASSERT(getFixedSlot(INITIAL_LENGTH_SLOT).isUndefined());
|
||||
initFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(length << PACKED_BITS_COUNT));
|
||||
JS_ASSERT((getFixedSlot(INITIAL_LENGTH_SLOT).toInt32() >> PACKED_BITS_COUNT) == int32_t(length));
|
||||
JS_ASSERT(!hasOverriddenLength());
|
||||
}
|
||||
|
||||
inline uint32_t
|
||||
ArgumentsObject::initialLength() const
|
||||
{
|
||||
|
@ -32,67 +39,26 @@ ArgumentsObject::markLengthOverridden()
|
|||
inline bool
|
||||
ArgumentsObject::hasOverriddenLength() const
|
||||
{
|
||||
const Value &v = getFixedSlot(INITIAL_LENGTH_SLOT);
|
||||
const js::Value &v = getFixedSlot(INITIAL_LENGTH_SLOT);
|
||||
return v.toInt32() & LENGTH_OVERRIDDEN_BIT;
|
||||
}
|
||||
|
||||
inline void
|
||||
ArgumentsObject::initData(ArgumentsData *data)
|
||||
{
|
||||
JS_ASSERT(getFixedSlot(DATA_SLOT).isUndefined());
|
||||
initFixedSlot(DATA_SLOT, PrivateValue(data));
|
||||
}
|
||||
|
||||
inline ArgumentsData *
|
||||
ArgumentsObject::data() const
|
||||
{
|
||||
return reinterpret_cast<ArgumentsData *>(getFixedSlot(DATA_SLOT).toPrivate());
|
||||
}
|
||||
|
||||
inline JSScript *
|
||||
ArgumentsObject::containingScript() const
|
||||
{
|
||||
return data()->script;
|
||||
}
|
||||
|
||||
inline const Value &
|
||||
ArgumentsObject::arg(unsigned i) const
|
||||
{
|
||||
JS_ASSERT(i < data()->numArgs);
|
||||
const Value &v = data()->args[i];
|
||||
JS_ASSERT(!v.isMagic(JS_FORWARD_TO_CALL_OBJECT));
|
||||
return v;
|
||||
}
|
||||
|
||||
inline void
|
||||
ArgumentsObject::setArg(unsigned i, const Value &v)
|
||||
{
|
||||
JS_ASSERT(i < data()->numArgs);
|
||||
HeapValue &lhs = data()->args[i];
|
||||
JS_ASSERT(!lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT));
|
||||
lhs = v;
|
||||
}
|
||||
|
||||
inline const Value &
|
||||
ArgumentsObject::element(uint32_t i) const
|
||||
{
|
||||
JS_ASSERT(!isElementDeleted(i));
|
||||
const Value &v = data()->args[i];
|
||||
if (v.isMagic(JS_FORWARD_TO_CALL_OBJECT))
|
||||
return getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall().arg(i);
|
||||
return v;
|
||||
}
|
||||
|
||||
inline void
|
||||
ArgumentsObject::setElement(uint32_t i, const Value &v)
|
||||
{
|
||||
JS_ASSERT(!isElementDeleted(i));
|
||||
HeapValue &lhs = data()->args[i];
|
||||
if (lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT))
|
||||
getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall().setArg(i, v);
|
||||
else
|
||||
lhs = v;
|
||||
return reinterpret_cast<js::ArgumentsData *>(getFixedSlot(DATA_SLOT).toPrivate());
|
||||
}
|
||||
|
||||
inline bool
|
||||
ArgumentsObject::isElementDeleted(uint32_t i) const
|
||||
{
|
||||
JS_ASSERT(i < data()->numArgs);
|
||||
if (i >= initialLength())
|
||||
return false;
|
||||
return IsBitArrayElementSet(data()->deletedBits, initialLength(), i);
|
||||
}
|
||||
|
||||
|
@ -108,17 +74,57 @@ ArgumentsObject::markElementDeleted(uint32_t i)
|
|||
SetBitArrayElement(data()->deletedBits, initialLength(), i);
|
||||
}
|
||||
|
||||
inline bool
|
||||
ArgumentsObject::maybeGetElement(uint32_t i, Value *vp)
|
||||
inline const js::Value &
|
||||
ArgumentsObject::element(uint32_t i) const
|
||||
{
|
||||
if (i >= initialLength() || isElementDeleted(i))
|
||||
return false;
|
||||
*vp = element(i);
|
||||
return true;
|
||||
JS_ASSERT(!isElementDeleted(i));
|
||||
return data()->slots[i];
|
||||
}
|
||||
|
||||
inline void
|
||||
ArgumentsObject::setElement(uint32_t i, const js::Value &v)
|
||||
{
|
||||
JS_ASSERT(!isElementDeleted(i));
|
||||
data()->slots[i] = v;
|
||||
}
|
||||
|
||||
inline bool
|
||||
ArgumentsObject::maybeGetElements(uint32_t start, uint32_t count, Value *vp)
|
||||
ArgumentsObject::getElement(uint32_t i, Value *vp)
|
||||
{
|
||||
if (i >= initialLength() || isElementDeleted(i))
|
||||
return false;
|
||||
|
||||
/*
|
||||
* If this arguments object has an associated stack frame, that contains
|
||||
* the canonical argument value. Note that strict arguments objects do not
|
||||
* alias named arguments and never have a stack frame.
|
||||
*/
|
||||
StackFrame *fp = maybeStackFrame();
|
||||
JS_ASSERT_IF(isStrictArguments(), !fp);
|
||||
if (fp)
|
||||
*vp = fp->canonicalActualArg(i);
|
||||
else
|
||||
*vp = element(i);
|
||||
return true;
|
||||
}
|
||||
|
||||
namespace detail {
|
||||
|
||||
struct STATIC_SKIP_INFERENCE CopyNonHoleArgsTo
|
||||
{
|
||||
CopyNonHoleArgsTo(ArgumentsObject *argsobj, Value *dst) : argsobj(*argsobj), dst(dst) {}
|
||||
ArgumentsObject &argsobj;
|
||||
Value *dst;
|
||||
bool operator()(uint32_t argi, Value *src) {
|
||||
*dst++ = *src;
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
} /* namespace detail */
|
||||
|
||||
inline bool
|
||||
ArgumentsObject::getElements(uint32_t start, uint32_t count, Value *vp)
|
||||
{
|
||||
JS_ASSERT(start + count >= start);
|
||||
|
||||
|
@ -126,9 +132,33 @@ ArgumentsObject::maybeGetElements(uint32_t start, uint32_t count, Value *vp)
|
|||
if (start > length || start + count > length || isAnyElementDeleted())
|
||||
return false;
|
||||
|
||||
for (uint32_t i = start, end = start + count; i < end; ++i, ++vp)
|
||||
*vp = element(i);
|
||||
return true;
|
||||
StackFrame *fp = maybeStackFrame();
|
||||
|
||||
/* If there's no stack frame for this, argument values are in elements(). */
|
||||
if (!fp) {
|
||||
const Value *srcbeg = Valueify(data()->slots) + start;
|
||||
const Value *srcend = srcbeg + count;
|
||||
const Value *src = srcbeg;
|
||||
for (Value *dst = vp; src < srcend; ++dst, ++src)
|
||||
*dst = *src;
|
||||
return true;
|
||||
}
|
||||
|
||||
/* Otherwise, element values are on the stack. */
|
||||
JS_ASSERT(fp->numActualArgs() <= StackSpace::ARGS_LENGTH_MAX);
|
||||
return fp->forEachCanonicalActualArg(detail::CopyNonHoleArgsTo(this, vp), start, count);
|
||||
}
|
||||
|
||||
inline js::StackFrame *
|
||||
ArgumentsObject::maybeStackFrame() const
|
||||
{
|
||||
return reinterpret_cast<js::StackFrame *>(getFixedSlot(STACK_FRAME_SLOT).toPrivate());
|
||||
}
|
||||
|
||||
inline void
|
||||
ArgumentsObject::setStackFrame(StackFrame *frame)
|
||||
{
|
||||
setFixedSlot(STACK_FRAME_SLOT, PrivateValue(frame));
|
||||
}
|
||||
|
||||
inline size_t
|
||||
|
@ -137,7 +167,7 @@ ArgumentsObject::sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const
|
|||
return mallocSizeOf(data());
|
||||
}
|
||||
|
||||
inline const Value &
|
||||
inline const js::Value &
|
||||
NormalArgumentsObject::callee() const
|
||||
{
|
||||
return data()->callee;
|
||||
|
@ -149,6 +179,6 @@ NormalArgumentsObject::clearCallee()
|
|||
data()->callee.set(compartment(), MagicValue(JS_OVERWRITTEN_CALLEE));
|
||||
}
|
||||
|
||||
} /* namespace js */
|
||||
} // namespace js
|
||||
|
||||
#endif /* ArgumentsObject_inl_h___ */
|
||||
|
|
|
@ -22,20 +22,53 @@
|
|||
using namespace js;
|
||||
using namespace js::gc;
|
||||
|
||||
ArgumentsObject *
|
||||
ArgumentsObject::create(JSContext *cx, StackFrame *fp)
|
||||
struct PutArg
|
||||
{
|
||||
JSFunction &callee = fp->callee();
|
||||
RootedObject proto(cx, callee.global().getOrCreateObjectPrototype(cx));
|
||||
PutArg(JSCompartment *comp, ArgumentsObject &argsobj)
|
||||
: compartment(comp), argsobj(argsobj), dst(argsobj.data()->slots) {}
|
||||
JSCompartment *compartment;
|
||||
ArgumentsObject &argsobj;
|
||||
HeapValue *dst;
|
||||
bool operator()(unsigned i, Value *src) {
|
||||
JS_ASSERT(dst->isUndefined());
|
||||
if (!argsobj.isElementDeleted(i))
|
||||
dst->set(compartment, *src);
|
||||
++dst;
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
void
|
||||
js_PutArgsObject(StackFrame *fp)
|
||||
{
|
||||
ArgumentsObject &argsobj = fp->argsObj();
|
||||
if (argsobj.isNormalArguments()) {
|
||||
JS_ASSERT(argsobj.maybeStackFrame() == fp);
|
||||
JSCompartment *comp = fp->compartment();
|
||||
fp->forEachCanonicalActualArg(PutArg(comp, argsobj));
|
||||
argsobj.setStackFrame(NULL);
|
||||
} else {
|
||||
JS_ASSERT(!argsobj.maybeStackFrame());
|
||||
}
|
||||
}
|
||||
|
||||
ArgumentsObject *
|
||||
ArgumentsObject::create(JSContext *cx, uint32_t argc, HandleObject callee)
|
||||
{
|
||||
JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX);
|
||||
JS_ASSERT(!callee->toFunction()->hasRest());
|
||||
|
||||
RootedObject proto(cx, callee->global().getOrCreateObjectPrototype(cx));
|
||||
if (!proto)
|
||||
return NULL;
|
||||
|
||||
RootedTypeObject type(cx);
|
||||
|
||||
type = proto->getNewType(cx);
|
||||
if (!type)
|
||||
return NULL;
|
||||
|
||||
bool strict = callee.inStrictMode();
|
||||
bool strict = callee->toFunction()->inStrictMode();
|
||||
Class *clasp = strict ? &StrictArgumentsObjectClass : &NormalArgumentsObjectClass;
|
||||
|
||||
RootedShape emptyArgumentsShape(cx);
|
||||
|
@ -46,76 +79,59 @@ ArgumentsObject::create(JSContext *cx, StackFrame *fp)
|
|||
if (!emptyArgumentsShape)
|
||||
return NULL;
|
||||
|
||||
unsigned numActuals = fp->numActualArgs();
|
||||
unsigned numFormals = fp->numFormalArgs();
|
||||
unsigned numDeletedWords = NumWordsForBitArrayOfLength(numActuals);
|
||||
unsigned numArgs = Max(numActuals, numFormals);
|
||||
unsigned numBytes = offsetof(ArgumentsData, args) +
|
||||
unsigned numDeletedWords = NumWordsForBitArrayOfLength(argc);
|
||||
unsigned numBytes = offsetof(ArgumentsData, slots) +
|
||||
numDeletedWords * sizeof(size_t) +
|
||||
numArgs * sizeof(Value);
|
||||
argc * sizeof(Value);
|
||||
|
||||
ArgumentsData *data = (ArgumentsData *)cx->malloc_(numBytes);
|
||||
if (!data)
|
||||
return NULL;
|
||||
|
||||
data->numArgs = numArgs;
|
||||
data->callee.init(ObjectValue(callee));
|
||||
data->script = fp->script();
|
||||
|
||||
/* Copy [0, numArgs) into data->slots. */
|
||||
HeapValue *dst = data->args, *dstEnd = data->args + numArgs;
|
||||
for (Value *src = fp->formals(), *end = src + numFormals; src != end; ++src, ++dst)
|
||||
dst->init(*src);
|
||||
if (numActuals > numFormals) {
|
||||
for (Value *src = fp->actuals() + numFormals; dst != dstEnd; ++src, ++dst)
|
||||
dst->init(*src);
|
||||
} else if (numActuals < numFormals) {
|
||||
for (; dst != dstEnd; ++dst)
|
||||
dst->init(UndefinedValue());
|
||||
}
|
||||
|
||||
data->deletedBits = reinterpret_cast<size_t *>(dstEnd);
|
||||
data->callee.init(ObjectValue(*callee));
|
||||
for (HeapValue *vp = data->slots; vp != data->slots + argc; vp++)
|
||||
vp->init(UndefinedValue());
|
||||
data->deletedBits = (size_t *)(data->slots + argc);
|
||||
ClearAllBitArrayElements(data->deletedBits, numDeletedWords);
|
||||
|
||||
/* We have everything needed to fill in the object, so make the object. */
|
||||
JSObject *obj = JSObject::create(cx, FINALIZE_KIND, emptyArgumentsShape, type, NULL);
|
||||
if (!obj)
|
||||
return NULL;
|
||||
|
||||
obj->initFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(numActuals << PACKED_BITS_COUNT));
|
||||
obj->initFixedSlot(DATA_SLOT, PrivateValue(data));
|
||||
|
||||
/*
|
||||
* If it exists and the arguments object aliases formals, the call object
|
||||
* is the canonical location for formals.
|
||||
*/
|
||||
JSScript *script = fp->script();
|
||||
if (fp->fun()->isHeavyweight() && script->argsObjAliasesFormals()) {
|
||||
obj->initFixedSlot(MAYBE_CALL_SLOT, ObjectValue(fp->callObj()));
|
||||
|
||||
/* Flag each slot that canonically lives in the callObj. */
|
||||
if (script->bindingsAccessedDynamically) {
|
||||
for (unsigned i = 0; i < numFormals; ++i)
|
||||
data->args[i] = MagicValue(JS_FORWARD_TO_CALL_OBJECT);
|
||||
} else {
|
||||
for (unsigned i = 0; i < script->numClosedArgs(); ++i)
|
||||
data->args[script->getClosedArg(i)] = MagicValue(JS_FORWARD_TO_CALL_OBJECT);
|
||||
}
|
||||
}
|
||||
|
||||
ArgumentsObject &argsobj = obj->asArguments();
|
||||
JS_ASSERT(argsobj.initialLength() == numActuals);
|
||||
JS_ASSERT(!argsobj.hasOverriddenLength());
|
||||
|
||||
JS_ASSERT(UINT32_MAX > (uint64_t(argc) << PACKED_BITS_COUNT));
|
||||
argsobj.initInitialLength(argc);
|
||||
argsobj.initData(data);
|
||||
argsobj.setStackFrame(NULL);
|
||||
|
||||
JS_ASSERT(argsobj.numFixedSlots() >= NormalArgumentsObject::RESERVED_SLOTS);
|
||||
JS_ASSERT(argsobj.numFixedSlots() >= StrictArgumentsObject::RESERVED_SLOTS);
|
||||
|
||||
return &argsobj;
|
||||
}
|
||||
|
||||
ArgumentsObject *
|
||||
ArgumentsObject::createExpected(JSContext *cx, StackFrame *fp)
|
||||
ArgumentsObject::create(JSContext *cx, StackFrame *fp)
|
||||
{
|
||||
JS_ASSERT(fp->script()->needsArgsObj());
|
||||
ArgumentsObject *argsobj = create(cx, fp);
|
||||
|
||||
ArgumentsObject *argsobj = ArgumentsObject::create(cx, fp->numActualArgs(),
|
||||
RootedObject(cx, &fp->callee()));
|
||||
if (!argsobj)
|
||||
return NULL;
|
||||
|
||||
/*
|
||||
* Strict mode functions have arguments objects that copy the initial
|
||||
* actual parameter values. Non-strict mode arguments use the frame pointer
|
||||
* to retrieve up-to-date parameter values.
|
||||
*/
|
||||
if (argsobj->isStrictArguments())
|
||||
fp->forEachCanonicalActualArg(PutArg(cx->compartment, *argsobj));
|
||||
else
|
||||
argsobj->setStackFrame(fp);
|
||||
|
||||
fp->initArgsObj(*argsobj);
|
||||
return argsobj;
|
||||
}
|
||||
|
@ -123,7 +139,12 @@ ArgumentsObject::createExpected(JSContext *cx, StackFrame *fp)
|
|||
ArgumentsObject *
|
||||
ArgumentsObject::createUnexpected(JSContext *cx, StackFrame *fp)
|
||||
{
|
||||
return create(cx, fp);
|
||||
ArgumentsObject *argsobj = create(cx, fp->numActualArgs(), RootedObject(cx, &fp->callee()));
|
||||
if (!argsobj)
|
||||
return NULL;
|
||||
|
||||
fp->forEachCanonicalActualArg(PutArg(cx->compartment, *argsobj));
|
||||
return argsobj;
|
||||
}
|
||||
|
||||
static JSBool
|
||||
|
@ -132,8 +153,10 @@ args_delProperty(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
|
|||
ArgumentsObject &argsobj = obj->asArguments();
|
||||
if (JSID_IS_INT(id)) {
|
||||
unsigned arg = unsigned(JSID_TO_INT(id));
|
||||
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg))
|
||||
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) {
|
||||
argsobj.setElement(arg, UndefinedValue());
|
||||
argsobj.markElementDeleted(arg);
|
||||
}
|
||||
} else if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) {
|
||||
argsobj.markLengthOverridden();
|
||||
} else if (JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom)) {
|
||||
|
@ -155,15 +178,22 @@ ArgGetter(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
|
|||
* prototype to point to another Arguments object with a bigger argc.
|
||||
*/
|
||||
unsigned arg = unsigned(JSID_TO_INT(id));
|
||||
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg))
|
||||
*vp = argsobj.element(arg);
|
||||
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) {
|
||||
if (StackFrame *fp = argsobj.maybeStackFrame()) {
|
||||
JS_ASSERT_IF(arg < fp->numFormalArgs(), fp->script()->formalIsAliased(arg));
|
||||
*vp = fp->canonicalActualArg(arg);
|
||||
} else {
|
||||
*vp = argsobj.element(arg);
|
||||
}
|
||||
}
|
||||
} else if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) {
|
||||
if (!argsobj.hasOverriddenLength())
|
||||
*vp = Int32Value(argsobj.initialLength());
|
||||
vp->setInt32(argsobj.initialLength());
|
||||
} else {
|
||||
JS_ASSERT(JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom));
|
||||
if (!argsobj.callee().isMagic(JS_OVERWRITTEN_CALLEE))
|
||||
*vp = argsobj.callee();
|
||||
const Value &v = argsobj.callee();
|
||||
if (!v.isMagic(JS_OVERWRITTEN_CALLEE))
|
||||
*vp = v;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
@ -175,15 +205,20 @@ ArgSetter(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp
|
|||
return true;
|
||||
|
||||
NormalArgumentsObject &argsobj = obj->asNormalArguments();
|
||||
JSScript *script = argsobj.containingScript();
|
||||
|
||||
if (JSID_IS_INT(id)) {
|
||||
unsigned arg = unsigned(JSID_TO_INT(id));
|
||||
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) {
|
||||
argsobj.setElement(arg, *vp);
|
||||
if (arg < script->function()->nargs)
|
||||
types::TypeScript::SetArgument(cx, script, arg, *vp);
|
||||
return true;
|
||||
if (arg < argsobj.initialLength()) {
|
||||
if (StackFrame *fp = argsobj.maybeStackFrame()) {
|
||||
JSScript *script = fp->functionScript();
|
||||
JS_ASSERT(script->needsArgsObj());
|
||||
if (arg < fp->numFormalArgs()) {
|
||||
JS_ASSERT(fp->script()->formalIsAliased(arg));
|
||||
types::TypeScript::SetArgument(cx, script, arg, *vp);
|
||||
}
|
||||
fp->canonicalActualArg(arg) = *vp;
|
||||
return true;
|
||||
}
|
||||
}
|
||||
} else {
|
||||
JS_ASSERT(JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom) ||
|
||||
|
@ -240,13 +275,13 @@ args_resolve(JSContext *cx, HandleObject obj, HandleId id, unsigned flags,
|
|||
bool
|
||||
NormalArgumentsObject::optimizedGetElem(JSContext *cx, StackFrame *fp, const Value &elem, Value *vp)
|
||||
{
|
||||
JS_ASSERT(!fp->script()->needsArgsObj());
|
||||
JS_ASSERT(!fp->hasArgsObj());
|
||||
|
||||
/* Fast path: no need to convert to id when elem is already an int in range. */
|
||||
if (elem.isInt32()) {
|
||||
int32_t i = elem.toInt32();
|
||||
if (i >= 0 && uint32_t(i) < fp->numActualArgs()) {
|
||||
*vp = fp->unaliasedActual(i);
|
||||
*vp = fp->canonicalActualArg(i);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -260,7 +295,7 @@ NormalArgumentsObject::optimizedGetElem(JSContext *cx, StackFrame *fp, const Val
|
|||
if (JSID_IS_INT(id)) {
|
||||
int32_t i = JSID_TO_INT(id);
|
||||
if (i >= 0 && uint32_t(i) < fp->numActualArgs()) {
|
||||
*vp = fp->unaliasedActual(i);
|
||||
*vp = fp->canonicalActualArg(i);
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
@ -437,20 +472,34 @@ strictargs_enumerate(JSContext *cx, HandleObject obj)
|
|||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
ArgumentsObject::finalize(FreeOp *fop, JSObject *obj)
|
||||
static void
|
||||
args_finalize(FreeOp *fop, JSObject *obj)
|
||||
{
|
||||
fop->free_(reinterpret_cast<void *>(obj->asArguments().data()));
|
||||
}
|
||||
|
||||
void
|
||||
ArgumentsObject::trace(JSTracer *trc, JSObject *obj)
|
||||
static void
|
||||
args_trace(JSTracer *trc, JSObject *obj)
|
||||
{
|
||||
ArgumentsObject &argsobj = obj->asArguments();
|
||||
ArgumentsData *data = argsobj.data();
|
||||
MarkValue(trc, &data->callee, js_callee_str);
|
||||
MarkValueRange(trc, data->numArgs, data->args, js_arguments_str);
|
||||
MarkScriptUnbarriered(trc, &data->script, "script");
|
||||
MarkValueRange(trc, argsobj.initialLength(), data->slots, js_arguments_str);
|
||||
|
||||
/*
|
||||
* If a generator's arguments or call object escapes, and the generator
|
||||
* frame is not executing, the generator object needs to be marked because
|
||||
* it is not otherwise reachable. An executing generator is rooted by its
|
||||
* invocation. To distinguish the two cases (which imply different access
|
||||
* paths to the generator object), we use the JSFRAME_FLOATING_GENERATOR
|
||||
* flag, which is only set on the StackFrame kept in the generator object's
|
||||
* JSGenerator.
|
||||
*/
|
||||
#if JS_HAS_GENERATORS
|
||||
StackFrame *fp = argsobj.maybeStackFrame();
|
||||
if (fp && fp->isFloatingGenerator())
|
||||
MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
|
||||
#endif
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -472,12 +521,12 @@ Class js::NormalArgumentsObjectClass = {
|
|||
args_enumerate,
|
||||
reinterpret_cast<JSResolveOp>(args_resolve),
|
||||
JS_ConvertStub,
|
||||
ArgumentsObject::finalize,
|
||||
args_finalize, /* finalize */
|
||||
NULL, /* checkAccess */
|
||||
NULL, /* call */
|
||||
NULL, /* construct */
|
||||
NULL, /* hasInstance */
|
||||
ArgumentsObject::trace,
|
||||
args_trace,
|
||||
{
|
||||
NULL, /* equality */
|
||||
NULL, /* outerObject */
|
||||
|
@ -506,12 +555,12 @@ Class js::StrictArgumentsObjectClass = {
|
|||
strictargs_enumerate,
|
||||
reinterpret_cast<JSResolveOp>(strictargs_resolve),
|
||||
JS_ConvertStub,
|
||||
ArgumentsObject::finalize,
|
||||
args_finalize, /* finalize */
|
||||
NULL, /* checkAccess */
|
||||
NULL, /* call */
|
||||
NULL, /* construct */
|
||||
NULL, /* hasInstance */
|
||||
ArgumentsObject::trace,
|
||||
args_trace,
|
||||
{
|
||||
NULL, /* equality */
|
||||
NULL, /* outerObject */
|
||||
|
|
|
@ -16,25 +16,17 @@ namespace js {
|
|||
* ArgumentsData stores the initial indexed arguments provided to the
|
||||
* corresponding and that function itself. It is used to store arguments[i]
|
||||
* and arguments.callee -- up until the corresponding property is modified,
|
||||
* when the relevant value is flagged to memorialize the modification.
|
||||
* when the relevant value is overwritten with MagicValue(JS_ARGS_HOLE) to
|
||||
* memorialize the modification.
|
||||
*/
|
||||
struct ArgumentsData
|
||||
{
|
||||
/*
|
||||
* numArgs = Max(numFormalArgs, numActualArgs)
|
||||
* The array 'args' has numArgs elements.
|
||||
*/
|
||||
unsigned numArgs;
|
||||
|
||||
/*
|
||||
* arguments.callee, or MagicValue(JS_OVERWRITTEN_CALLEE) if
|
||||
* arguments.callee has been modified.
|
||||
* arguments.callee, or MagicValue(JS_ARGS_HOLE) if arguments.callee has
|
||||
* been modified.
|
||||
*/
|
||||
HeapValue callee;
|
||||
|
||||
/* The script for the function containing this arguments object. */
|
||||
JSScript *script;
|
||||
|
||||
/*
|
||||
* Pointer to an array of bits indicating, for every argument in 'slots',
|
||||
* whether the element has been deleted. See isElementDeleted comment.
|
||||
|
@ -42,25 +34,17 @@ struct ArgumentsData
|
|||
size_t *deletedBits;
|
||||
|
||||
/*
|
||||
* This array holds either the current argument value or the magic value
|
||||
* JS_FORWARD_TO_CALL_OBJECT. The latter means that the function has both a
|
||||
* CallObject and an ArgumentsObject AND the particular formal variable is
|
||||
* aliased by the CallObject. In such cases, the CallObject holds the
|
||||
* canonical value so any element access to the arguments object should
|
||||
* load the value out of the CallObject (which is pointed to by
|
||||
* MAYBE_CALL_SLOT).
|
||||
* Values of the arguments for this object, or MagicValue(JS_ARGS_HOLE) if
|
||||
* the indexed argument has been modified.
|
||||
*/
|
||||
HeapValue args[1];
|
||||
|
||||
/* For jit use: */
|
||||
static ptrdiff_t offsetOfArgs() { return offsetof(ArgumentsData, args); }
|
||||
HeapValue slots[1];
|
||||
};
|
||||
|
||||
/*
|
||||
* ArgumentsObject instances represent |arguments| objects created to store
|
||||
* function arguments when a function is called. It's expensive to create such
|
||||
* objects if they're never used, so they're only created when they are
|
||||
* potentially used.
|
||||
* objects if they're never used, so they're only created lazily. (See
|
||||
* js::StackFrame::setArgsObj and friends.)
|
||||
*
|
||||
* Arguments objects are complicated because, for non-strict mode code, they
|
||||
* must alias any named arguments which were provided to the function. Gnarly
|
||||
|
@ -91,27 +75,43 @@ struct ArgumentsData
|
|||
* been modified, then the current value of arguments.length is stored in
|
||||
* another slot associated with a new property.
|
||||
* DATA_SLOT
|
||||
* Stores an ArgumentsData*, described above.
|
||||
* Stores an ArgumentsData* storing argument values and the callee, or
|
||||
* sentinels for any of these if the corresponding property is modified.
|
||||
* Use callee() to access the callee/sentinel, and use
|
||||
* element/addressOfElement/setElement to access the values stored in
|
||||
* the ArgumentsData. If you're simply looking to get arguments[i],
|
||||
* however, use getElement or getElements to avoid spreading arguments
|
||||
* object implementation details around too much.
|
||||
* STACK_FRAME_SLOT
|
||||
* Stores the function's stack frame for non-strict arguments objects until
|
||||
* the function returns, when it is replaced with null. When an arguments
|
||||
* object is created on-trace its private is JS_ARGUMENTS_OBJECT_ON_TRACE,
|
||||
* and when the trace exits its private is replaced with the stack frame or
|
||||
* null, as appropriate. This slot is used by strict arguments objects as
|
||||
* well, but the slot is always null. Conceptually it would be better to
|
||||
* remove this oddity, but preserving it allows us to work with arguments
|
||||
* objects of either kind more abstractly, so we keep it for now.
|
||||
*/
|
||||
class ArgumentsObject : public JSObject
|
||||
{
|
||||
protected:
|
||||
static const uint32_t INITIAL_LENGTH_SLOT = 0;
|
||||
static const uint32_t DATA_SLOT = 1;
|
||||
static const uint32_t MAYBE_CALL_SLOT = 2;
|
||||
static const uint32_t STACK_FRAME_SLOT = 2;
|
||||
|
||||
/* Lower-order bit stolen from the length slot. */
|
||||
static const uint32_t LENGTH_OVERRIDDEN_BIT = 0x1;
|
||||
static const uint32_t PACKED_BITS_COUNT = 1;
|
||||
|
||||
static ArgumentsObject *create(JSContext *cx, StackFrame *fp);
|
||||
inline ArgumentsData *data() const;
|
||||
void initInitialLength(uint32_t length);
|
||||
void initData(ArgumentsData *data);
|
||||
static ArgumentsObject *create(JSContext *cx, uint32_t argc, HandleObject callee);
|
||||
|
||||
public:
|
||||
static const uint32_t RESERVED_SLOTS = 3;
|
||||
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
|
||||
|
||||
/* Create an arguments object for a frame that is expecting them. */
|
||||
static ArgumentsObject *createExpected(JSContext *cx, StackFrame *fp);
|
||||
static ArgumentsObject *create(JSContext *cx, StackFrame *fp);
|
||||
|
||||
/*
|
||||
* Purposefully disconnect the returned arguments object from the frame
|
||||
|
@ -127,13 +127,33 @@ class ArgumentsObject : public JSObject
|
|||
*/
|
||||
inline uint32_t initialLength() const;
|
||||
|
||||
/* The script for the function containing this arguments object. */
|
||||
JSScript *containingScript() const;
|
||||
|
||||
/* True iff arguments.length has been assigned or its attributes changed. */
|
||||
inline bool hasOverriddenLength() const;
|
||||
inline void markLengthOverridden();
|
||||
|
||||
/*
|
||||
* Attempt to speedily and efficiently access the i-th element of this
|
||||
* arguments object. Return true if the element was speedily returned.
|
||||
* Return false if the element must be looked up more slowly using
|
||||
* getProperty or some similar method.
|
||||
*
|
||||
* NB: Returning false does not indicate error!
|
||||
*/
|
||||
inline bool getElement(uint32_t i, js::Value *vp);
|
||||
|
||||
/*
|
||||
* Attempt to speedily and efficiently get elements [start, start + count)
|
||||
* of this arguments object into the locations starting at |vp|. Return
|
||||
* true if all elements were copied. Return false if the elements must be
|
||||
* gotten more slowly, perhaps using a getProperty or some similar method
|
||||
* in a loop.
|
||||
*
|
||||
* NB: Returning false does not indicate error!
|
||||
*/
|
||||
inline bool getElements(uint32_t start, uint32_t count, js::Value *vp);
|
||||
|
||||
inline js::ArgumentsData *data() const;
|
||||
|
||||
/*
|
||||
* Because the arguments object is a real object, its elements may be
|
||||
* deleted. This is implemented by setting a 'deleted' flag for the arg
|
||||
|
@ -152,51 +172,18 @@ class ArgumentsObject : public JSObject
|
|||
inline bool isAnyElementDeleted() const;
|
||||
inline void markElementDeleted(uint32_t i);
|
||||
|
||||
/*
|
||||
* An ArgumentsObject serves two roles:
|
||||
* - a real object, accessed through regular object operations, e.g..,
|
||||
* JSObject::getElement corresponding to 'arguments[i]';
|
||||
* - a VM-internal data structure, storing the value of arguments (formal
|
||||
* and actual) that are accessed directly by the VM when a reading the
|
||||
* value of a formal parameter.
|
||||
* There are two ways to access the ArgumentsData::args corresponding to
|
||||
* these two use cases:
|
||||
* - object access should use elements(i) which will take care of
|
||||
* forwarding when the value is JS_FORWARD_TO_CALL_OBJECT;
|
||||
* - VM argument access should use arg(i) which will assert that the
|
||||
* value is not JS_FORWARD_TO_CALL_OBJECT (since, if such forwarding was
|
||||
* needed, the frontend should have emitted JSOP_GETALIASEDVAR.
|
||||
*/
|
||||
inline const Value &element(uint32_t i) const;
|
||||
inline void setElement(uint32_t i, const Value &v);
|
||||
inline const Value &arg(unsigned i) const;
|
||||
inline void setArg(unsigned i, const Value &v);
|
||||
inline const js::Value &element(uint32_t i) const;
|
||||
inline void setElement(uint32_t i, const js::Value &v);
|
||||
|
||||
/*
|
||||
* Attempt to speedily and efficiently access the i-th element of this
|
||||
* arguments object. Return true if the element was speedily returned.
|
||||
* Return false if the element must be looked up more slowly using
|
||||
* getProperty or some similar method. The second overload copies the
|
||||
* elements [start, start + count) into the locations starting at 'vp'.
|
||||
*
|
||||
* NB: Returning false does not indicate error!
|
||||
*/
|
||||
inline bool maybeGetElement(uint32_t i, Value *vp);
|
||||
inline bool maybeGetElements(uint32_t start, uint32_t count, js::Value *vp);
|
||||
/* The stack frame for this ArgumentsObject, if the frame is still active. */
|
||||
inline js::StackFrame *maybeStackFrame() const;
|
||||
inline void setStackFrame(js::StackFrame *frame);
|
||||
|
||||
/*
|
||||
* Measures things hanging off this ArgumentsObject that are counted by the
|
||||
* |miscSize| argument in JSObject::sizeOfExcludingThis().
|
||||
*/
|
||||
inline size_t sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const;
|
||||
|
||||
static void finalize(FreeOp *fop, JSObject *obj);
|
||||
static void trace(JSTracer *trc, JSObject *obj);
|
||||
|
||||
/* For jit use: */
|
||||
static size_t getDataSlotOffset() {
|
||||
return getFixedSlotOffset(DATA_SLOT);
|
||||
}
|
||||
};
|
||||
|
||||
class NormalArgumentsObject : public ArgumentsObject
|
||||
|
|
|
@ -3137,16 +3137,10 @@ DebuggerArguments_getArg(JSContext *cx, unsigned argc, Value *vp)
|
|||
*/
|
||||
JS_ASSERT(i >= 0);
|
||||
Value arg;
|
||||
if (unsigned(i) < fp->numActualArgs()) {
|
||||
if (unsigned(i) < fp->numFormalArgs() && fp->script()->formalLivesInCallObject(i))
|
||||
arg = fp->callObj().arg(i);
|
||||
else if (fp->script()->argsObjAliasesFormals())
|
||||
arg = fp->argsObj().arg(i);
|
||||
else
|
||||
arg = fp->unaliasedActual(i);
|
||||
} else {
|
||||
if (unsigned(i) < fp->numActualArgs())
|
||||
arg = fp->canonicalActualArg(i);
|
||||
else
|
||||
arg.setUndefined();
|
||||
}
|
||||
|
||||
if (!Debugger::fromChildJSObject(thisobj)->wrapDebuggeeValue(cx, &arg))
|
||||
return false;
|
||||
|
@ -3376,7 +3370,6 @@ js::EvaluateInEnv(JSContext *cx, Handle<Env*> env, StackFrame *fp, const jschar
|
|||
if (!script)
|
||||
return false;
|
||||
|
||||
script->isActiveEval = true;
|
||||
return ExecuteKernel(cx, script, *env, fp->thisValue(), EXECUTE_DEBUG, fp, rval);
|
||||
}
|
||||
|
||||
|
|
|
@ -14,11 +14,18 @@ namespace js {
|
|||
|
||||
inline
|
||||
ScopeCoordinate::ScopeCoordinate(jsbytecode *pc)
|
||||
: hops(GET_UINT16(pc)), slot(GET_UINT16(pc + 2))
|
||||
: hops(GET_UINT16(pc)), binding(GET_UINT16(pc + 2))
|
||||
{
|
||||
JS_ASSERT(JOF_OPTYPE(*pc) == JOF_SCOPECOORD);
|
||||
}
|
||||
|
||||
inline JSAtom *
|
||||
ScopeCoordinateAtom(JSScript *script, jsbytecode *pc)
|
||||
{
|
||||
JS_ASSERT(JOF_OPTYPE(*pc) == JOF_SCOPECOORD);
|
||||
return script->getAtom(GET_UINT32_INDEX(pc + 2 * sizeof(uint16_t)));
|
||||
}
|
||||
|
||||
inline JSObject &
|
||||
ScopeObject::enclosingScope() const
|
||||
{
|
||||
|
@ -35,22 +42,17 @@ ScopeObject::setEnclosingScope(JSContext *cx, HandleObject obj)
|
|||
return true;
|
||||
}
|
||||
|
||||
inline const Value &
|
||||
ScopeObject::aliasedVar(ScopeCoordinate sc)
|
||||
inline StackFrame *
|
||||
ScopeObject::maybeStackFrame() const
|
||||
{
|
||||
JS_ASSERT(isCall() || isClonedBlock());
|
||||
JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == CallObject::RESERVED_SLOTS);
|
||||
JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == BlockObject::RESERVED_SLOTS);
|
||||
return getSlot(CALL_BLOCK_RESERVED_SLOTS + sc.slot);
|
||||
JS_ASSERT(!isStaticBlock() && !isWith());
|
||||
return reinterpret_cast<StackFrame *>(JSObject::getPrivate());
|
||||
}
|
||||
|
||||
inline void
|
||||
ScopeObject::setAliasedVar(ScopeCoordinate sc, const Value &v)
|
||||
ScopeObject::setStackFrame(StackFrame *frame)
|
||||
{
|
||||
JS_ASSERT(isCall() || isClonedBlock());
|
||||
JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == CallObject::RESERVED_SLOTS);
|
||||
JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == BlockObject::RESERVED_SLOTS);
|
||||
setSlot(CALL_BLOCK_RESERVED_SLOTS + sc.slot, v);
|
||||
return setPrivate(frame);
|
||||
}
|
||||
|
||||
/*static*/ inline size_t
|
||||
|
@ -88,35 +90,61 @@ CallObject::getCalleeFunction() const
|
|||
}
|
||||
|
||||
inline const Value &
|
||||
CallObject::arg(unsigned i, MaybeCheckAliasing checkAliasing) const
|
||||
CallObject::arg(unsigned i) const
|
||||
{
|
||||
JS_ASSERT_IF(checkAliasing, getCalleeFunction()->script()->formalLivesInCallObject(i));
|
||||
JS_ASSERT(i < getCalleeFunction()->nargs);
|
||||
return getSlot(RESERVED_SLOTS + i);
|
||||
}
|
||||
|
||||
inline void
|
||||
CallObject::setArg(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
|
||||
CallObject::setArg(unsigned i, const Value &v)
|
||||
{
|
||||
JS_ASSERT_IF(checkAliasing, getCalleeFunction()->script()->formalLivesInCallObject(i));
|
||||
JS_ASSERT(i < getCalleeFunction()->nargs);
|
||||
setSlot(RESERVED_SLOTS + i, v);
|
||||
}
|
||||
|
||||
inline void
|
||||
CallObject::initArgUnchecked(unsigned i, const Value &v)
|
||||
{
|
||||
JS_ASSERT(i < getCalleeFunction()->nargs);
|
||||
initSlotUnchecked(RESERVED_SLOTS + i, v);
|
||||
}
|
||||
|
||||
inline const Value &
|
||||
CallObject::var(unsigned i, MaybeCheckAliasing checkAliasing) const
|
||||
CallObject::var(unsigned i) const
|
||||
{
|
||||
JSFunction *fun = getCalleeFunction();
|
||||
JS_ASSERT_IF(checkAliasing, fun->script()->varIsAliased(i));
|
||||
JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs());
|
||||
JS_ASSERT(i < fun->script()->bindings.numVars());
|
||||
return getSlot(RESERVED_SLOTS + fun->nargs + i);
|
||||
}
|
||||
|
||||
inline void
|
||||
CallObject::setVar(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
|
||||
CallObject::setVar(unsigned i, const Value &v)
|
||||
{
|
||||
JSFunction *fun = getCalleeFunction();
|
||||
JS_ASSERT_IF(checkAliasing, fun->script()->varIsAliased(i));
|
||||
JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs());
|
||||
JS_ASSERT(i < fun->script()->bindings.numVars());
|
||||
setSlot(RESERVED_SLOTS + fun->nargs + i, v);
|
||||
}
|
||||
|
||||
inline void
|
||||
CallObject::initVarUnchecked(unsigned i, const Value &v)
|
||||
{
|
||||
JSFunction *fun = getCalleeFunction();
|
||||
JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs());
|
||||
JS_ASSERT(i < fun->script()->bindings.numVars());
|
||||
initSlotUnchecked(RESERVED_SLOTS + fun->nargs + i, v);
|
||||
}
|
||||
|
||||
inline void
|
||||
CallObject::copyValues(unsigned nargs, Value *argv, unsigned nvars, Value *slots)
|
||||
{
|
||||
JS_ASSERT(slotInRange(RESERVED_SLOTS + nargs + nvars, SENTINEL_ALLOWED));
|
||||
copySlotRange(RESERVED_SLOTS, argv, nargs);
|
||||
copySlotRange(RESERVED_SLOTS + nargs, slots, nvars);
|
||||
}
|
||||
|
||||
inline HeapSlotArray
|
||||
CallObject::argArray()
|
||||
{
|
||||
|
@ -158,27 +186,13 @@ BlockObject::slotCount() const
|
|||
return propertyCount();
|
||||
}
|
||||
|
||||
inline unsigned
|
||||
BlockObject::slotToFrameLocal(JSScript *script, unsigned i)
|
||||
{
|
||||
JS_ASSERT(i < slotCount());
|
||||
return script->nfixed + stackDepth() + i;
|
||||
}
|
||||
|
||||
inline const Value &
|
||||
inline HeapSlot &
|
||||
BlockObject::slotValue(unsigned i)
|
||||
{
|
||||
JS_ASSERT(i < slotCount());
|
||||
return getSlotRef(RESERVED_SLOTS + i);
|
||||
}
|
||||
|
||||
inline void
|
||||
BlockObject::setSlotValue(unsigned i, const Value &v)
|
||||
{
|
||||
JS_ASSERT(i < slotCount());
|
||||
setSlot(RESERVED_SLOTS + i, v);
|
||||
}
|
||||
|
||||
inline StaticBlockObject *
|
||||
StaticBlockObject::enclosingBlock() const
|
||||
{
|
||||
|
@ -203,7 +217,7 @@ inline void
|
|||
StaticBlockObject::setDefinitionParseNode(unsigned i, Definition *def)
|
||||
{
|
||||
JS_ASSERT(slotValue(i).isUndefined());
|
||||
setSlotValue(i, PrivateValue(def));
|
||||
slotValue(i).init(this, i, PrivateValue(def));
|
||||
}
|
||||
|
||||
inline Definition *
|
||||
|
@ -216,12 +230,9 @@ StaticBlockObject::maybeDefinitionParseNode(unsigned i)
|
|||
inline void
|
||||
StaticBlockObject::setAliased(unsigned i, bool aliased)
|
||||
{
|
||||
JS_ASSERT_IF(i > 0, slotValue(i-1).isBoolean());
|
||||
setSlotValue(i, BooleanValue(aliased));
|
||||
if (aliased && !needsClone()) {
|
||||
setSlotValue(0, MagicValue(JS_BLOCK_NEEDS_CLONE));
|
||||
JS_ASSERT(needsClone());
|
||||
}
|
||||
slotValue(i).init(this, i, BooleanValue(aliased));
|
||||
if (aliased)
|
||||
JSObject::setPrivate(reinterpret_cast<void *>(1));
|
||||
}
|
||||
|
||||
inline bool
|
||||
|
@ -231,9 +242,9 @@ StaticBlockObject::isAliased(unsigned i)
|
|||
}
|
||||
|
||||
inline bool
|
||||
StaticBlockObject::needsClone()
|
||||
StaticBlockObject::needsClone() const
|
||||
{
|
||||
return !slotValue(0).isFalse();
|
||||
return JSObject::getPrivate() != NULL;
|
||||
}
|
||||
|
||||
inline bool
|
||||
|
@ -249,19 +260,12 @@ ClonedBlockObject::staticBlock() const
|
|||
}
|
||||
|
||||
inline const Value &
|
||||
ClonedBlockObject::var(unsigned i, MaybeCheckAliasing checkAliasing)
|
||||
ClonedBlockObject::closedSlot(unsigned i)
|
||||
{
|
||||
JS_ASSERT_IF(checkAliasing, staticBlock().isAliased(i));
|
||||
JS_ASSERT(!maybeStackFrame());
|
||||
return slotValue(i);
|
||||
}
|
||||
|
||||
inline void
|
||||
ClonedBlockObject::setVar(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
|
||||
{
|
||||
JS_ASSERT_IF(checkAliasing, staticBlock().isAliased(i));
|
||||
setSlotValue(i, v);
|
||||
}
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
inline js::ScopeObject &
|
||||
|
|
Разница между файлами не показана из-за своего большого размера
Загрузить разницу
|
@ -21,26 +21,24 @@ namespace js {
|
|||
* given lexically-enclosing variable. A scope coordinate has two dimensions:
|
||||
* - hops: the number of scope objects on the scope chain to skip
|
||||
* - binding: which binding on the scope object
|
||||
* Additionally (as described in jsopcode.tbl) there is a 'block' index, but
|
||||
* this is only needed for decompilation/inference so it is not included in the
|
||||
* main ScopeCoordinate struct: use ScopeCoordinate{BlockChain,Atom} instead.
|
||||
*
|
||||
* XXX: Until bug 659577 lands, this is all for show and all ScopeCoordinates
|
||||
* have hops fixed at 0 and 'binding' is just the js::Bindings binding for args
|
||||
* and vars and the stack depth for let bindings. Thus, aliased-var access
|
||||
* touches the StackFrame like it always did and 'binding' must be first
|
||||
* converted to either an arg or local slot (using Bindings::bindingToLocal or
|
||||
* bindingToArg). With bug 659577, ScopeObject will have a 'var' function that
|
||||
* takes a ScopeCoordinate.
|
||||
*/
|
||||
struct ScopeCoordinate
|
||||
{
|
||||
uint16_t hops;
|
||||
uint16_t slot;
|
||||
|
||||
uint16_t binding;
|
||||
inline ScopeCoordinate(jsbytecode *pc);
|
||||
inline ScopeCoordinate() {}
|
||||
};
|
||||
|
||||
/* Return the static block chain (or null) accessed by *pc. */
|
||||
extern StaticBlockObject *
|
||||
ScopeCoordinateBlockChain(JSScript *script, jsbytecode *pc);
|
||||
|
||||
/* Return the name being accessed by the given ALIASEDVAR op. */
|
||||
extern PropertyName *
|
||||
ScopeCoordinateName(JSScript *script, jsbytecode *pc);
|
||||
inline JSAtom *
|
||||
ScopeCoordinateAtom(JSScript *script, jsbytecode *pc);
|
||||
|
||||
/*****************************************************************************/
|
||||
|
||||
|
@ -84,13 +82,13 @@ ScopeCoordinateName(JSScript *script, jsbytecode *pc);
|
|||
|
||||
class ScopeObject : public JSObject
|
||||
{
|
||||
/* Use maybeStackFrame() instead. */
|
||||
void *getPrivate() const;
|
||||
|
||||
protected:
|
||||
static const uint32_t SCOPE_CHAIN_SLOT = 0;
|
||||
|
||||
public:
|
||||
/* Number of reserved slots for both CallObject and BlockObject. */
|
||||
static const uint32_t CALL_BLOCK_RESERVED_SLOTS = 2;
|
||||
|
||||
/*
|
||||
* Since every scope chain terminates with a global object and GlobalObject
|
||||
* does not derive ScopeObject (it has a completely different layout), the
|
||||
|
@ -100,13 +98,12 @@ class ScopeObject : public JSObject
|
|||
inline bool setEnclosingScope(JSContext *cx, HandleObject obj);
|
||||
|
||||
/*
|
||||
* Get or set an aliased variable contained in this scope. Unaliased
|
||||
* variables should instead access the StackFrame. Aliased variable access
|
||||
* is primarily made through JOF_SCOPECOORD ops which is why these members
|
||||
* take a ScopeCoordinate instead of just the slot index.
|
||||
* The stack frame for this scope object, if the frame is still active.
|
||||
* Note: these members may not be called for a StaticBlockObject or
|
||||
* WithObject.
|
||||
*/
|
||||
inline const Value &aliasedVar(ScopeCoordinate sc);
|
||||
inline void setAliasedVar(ScopeCoordinate sc, const Value &v);
|
||||
inline StackFrame *maybeStackFrame() const;
|
||||
inline void setStackFrame(StackFrame *frame);
|
||||
|
||||
/* For jit access. */
|
||||
static inline size_t offsetOfEnclosingScope();
|
||||
|
@ -117,10 +114,10 @@ class CallObject : public ScopeObject
|
|||
static const uint32_t CALLEE_SLOT = 1;
|
||||
|
||||
static CallObject *
|
||||
create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleFunction callee);
|
||||
create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleObject callee);
|
||||
|
||||
public:
|
||||
static const uint32_t RESERVED_SLOTS = CALL_BLOCK_RESERVED_SLOTS;
|
||||
static const uint32_t RESERVED_SLOTS = 3;
|
||||
|
||||
static CallObject *createForFunction(JSContext *cx, StackFrame *fp);
|
||||
static CallObject *createForStrictEval(JSContext *cx, StackFrame *fp);
|
||||
|
@ -137,12 +134,14 @@ class CallObject : public ScopeObject
|
|||
inline void setCallee(JSObject *callee);
|
||||
|
||||
/* Returns the formal argument at the given index. */
|
||||
inline const Value &arg(unsigned i, MaybeCheckAliasing = CHECK_ALIASING) const;
|
||||
inline void setArg(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
|
||||
inline const Value &arg(unsigned i) const;
|
||||
inline void setArg(unsigned i, const Value &v);
|
||||
inline void initArgUnchecked(unsigned i, const Value &v);
|
||||
|
||||
/* Returns the variable at the given index. */
|
||||
inline const Value &var(unsigned i, MaybeCheckAliasing = CHECK_ALIASING) const;
|
||||
inline void setVar(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
|
||||
inline const Value &var(unsigned i) const;
|
||||
inline void setVar(unsigned i, const Value &v);
|
||||
inline void initVarUnchecked(unsigned i, const Value &v);
|
||||
|
||||
/*
|
||||
* Get the actual arrays of arguments and variables. Only call if type
|
||||
|
@ -152,11 +151,15 @@ class CallObject : public ScopeObject
|
|||
inline HeapSlotArray argArray();
|
||||
inline HeapSlotArray varArray();
|
||||
|
||||
inline void copyValues(unsigned nargs, Value *argv, unsigned nvars, Value *slots);
|
||||
|
||||
static JSBool getArgOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp);
|
||||
static JSBool getVarOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp);
|
||||
static JSBool setArgOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp);
|
||||
static JSBool setVarOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp);
|
||||
|
||||
/* Copy in all the unaliased formals and locals. */
|
||||
void copyUnaliasedValues(StackFrame *fp);
|
||||
/* Return whether this environment contains 'name' and, if so, its value. */
|
||||
bool containsVarOrArg(PropertyName *name, Value *vp, JSContext *cx);
|
||||
};
|
||||
|
||||
class DeclEnvObject : public ScopeObject
|
||||
|
@ -181,6 +184,10 @@ class NestedScopeObject : public ScopeObject
|
|||
|
||||
class WithObject : public NestedScopeObject
|
||||
{
|
||||
/* These ScopeObject operations are not valid on a with object. */
|
||||
js::StackFrame *maybeStackFrame() const;
|
||||
void setStackFrame(StackFrame *frame);
|
||||
|
||||
static const unsigned THIS_SLOT = 2;
|
||||
|
||||
/* Use WithObject::object() instead. */
|
||||
|
@ -188,7 +195,7 @@ class WithObject : public NestedScopeObject
|
|||
|
||||
public:
|
||||
static const unsigned RESERVED_SLOTS = 3;
|
||||
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
|
||||
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
|
||||
|
||||
static WithObject *
|
||||
create(JSContext *cx, HandleObject proto, HandleObject enclosing, uint32_t depth);
|
||||
|
@ -203,27 +210,23 @@ class WithObject : public NestedScopeObject
|
|||
class BlockObject : public NestedScopeObject
|
||||
{
|
||||
public:
|
||||
static const unsigned RESERVED_SLOTS = CALL_BLOCK_RESERVED_SLOTS;
|
||||
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
|
||||
static const unsigned RESERVED_SLOTS = 2;
|
||||
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
|
||||
|
||||
/* Return the number of variables associated with this block. */
|
||||
inline uint32_t slotCount() const;
|
||||
|
||||
/*
|
||||
* Return the local corresponding to the ith binding where i is in the
|
||||
* range [0, slotCount()) and the return local index is in the range
|
||||
* [script->nfixed, script->nfixed + script->nslots).
|
||||
*/
|
||||
unsigned slotToFrameLocal(JSScript *script, unsigned i);
|
||||
|
||||
protected:
|
||||
/* Blocks contain an object slot for each slot i: 0 <= i < slotCount. */
|
||||
inline const Value &slotValue(unsigned i);
|
||||
inline void setSlotValue(unsigned i, const Value &v);
|
||||
inline HeapSlot &slotValue(unsigned i);
|
||||
};
|
||||
|
||||
class StaticBlockObject : public BlockObject
|
||||
{
|
||||
/* These ScopeObject operations are not valid on a static block object. */
|
||||
StackFrame *maybeStackFrame() const;
|
||||
void setStackFrame(StackFrame *frame);
|
||||
|
||||
public:
|
||||
static StaticBlockObject *create(JSContext *cx);
|
||||
|
||||
|
@ -251,7 +254,7 @@ class StaticBlockObject : public BlockObject
|
|||
* A static block object is cloned (when entering the block) iff some
|
||||
* variable of the block isAliased.
|
||||
*/
|
||||
bool needsClone();
|
||||
bool needsClone() const;
|
||||
|
||||
const Shape *addVar(JSContext *cx, jsid id, int index, bool *redeclared);
|
||||
};
|
||||
|
@ -265,12 +268,17 @@ class ClonedBlockObject : public BlockObject
|
|||
/* The static block from which this block was cloned. */
|
||||
StaticBlockObject &staticBlock() const;
|
||||
|
||||
/* Assuming 'put' has been called, return the value of the ith let var. */
|
||||
const Value &var(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
|
||||
void setVar(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
|
||||
/*
|
||||
* When this block's stack slots are about to be popped, 'put' must be
|
||||
* called to copy the slot values into this block's object slots.
|
||||
*/
|
||||
void put(StackFrame *fp);
|
||||
|
||||
/* Copy in all the unaliased formals and locals. */
|
||||
void copyUnaliasedValues(StackFrame *fp);
|
||||
/* Assuming 'put' has been called, return the value of the ith let var. */
|
||||
const Value &closedSlot(unsigned i);
|
||||
|
||||
/* Return whether this environment contains 'name' and, if so, its value. */
|
||||
bool containsVar(PropertyName *name, Value *vp, JSContext *cx);
|
||||
};
|
||||
|
||||
template<XDRMode mode>
|
||||
|
@ -414,32 +422,16 @@ class DebugScopes
|
|||
* The map from live frames which have optimized-away scopes to the
|
||||
* corresponding debug scopes.
|
||||
*/
|
||||
typedef HashMap<ScopeIter,
|
||||
DebugScopeObject *,
|
||||
ScopeIter,
|
||||
RuntimeAllocPolicy> MissingScopeMap;
|
||||
typedef HashMap<ScopeIter, DebugScopeObject *, ScopeIter, RuntimeAllocPolicy> MissingScopeMap;
|
||||
MissingScopeMap missingScopes;
|
||||
|
||||
/*
|
||||
* The map from scope objects of live frames to the live frame. This map
|
||||
* updated lazily whenever the debugger needs the information. In between
|
||||
* two lazy updates, liveScopes becomes incomplete (but not invalid, onPop*
|
||||
* removes scopes as they are popped). Thus, two consecutive debugger lazy
|
||||
* updates of liveScopes need only fill in the new scopes.
|
||||
*/
|
||||
typedef HashMap<ScopeObject *,
|
||||
StackFrame *,
|
||||
DefaultHasher<ScopeObject *>,
|
||||
RuntimeAllocPolicy> LiveScopeMap;
|
||||
LiveScopeMap liveScopes;
|
||||
|
||||
public:
|
||||
DebugScopes(JSRuntime *rt);
|
||||
~DebugScopes();
|
||||
bool init();
|
||||
|
||||
void mark(JSTracer *trc);
|
||||
void sweep(JSRuntime *rt);
|
||||
void sweep();
|
||||
|
||||
DebugScopeObject *hasDebugScope(JSContext *cx, ScopeObject &scope) const;
|
||||
bool addDebugScope(JSContext *cx, ScopeObject &scope, DebugScopeObject &debugScope);
|
||||
|
@ -447,17 +439,12 @@ class DebugScopes
|
|||
DebugScopeObject *hasDebugScope(JSContext *cx, ScopeIter si) const;
|
||||
bool addDebugScope(JSContext *cx, ScopeIter si, DebugScopeObject &debugScope);
|
||||
|
||||
bool updateLiveScopes(JSContext *cx);
|
||||
StackFrame *hasLiveFrame(ScopeObject &scope);
|
||||
|
||||
/*
|
||||
* In debug-mode, these must be called whenever exiting a call/block or
|
||||
* when activating/yielding a generator.
|
||||
*/
|
||||
void onPopCall(StackFrame *fp);
|
||||
void onPopBlock(JSContext *cx, StackFrame *fp);
|
||||
void onPopWith(StackFrame *fp);
|
||||
void onPopStrictEvalScope(StackFrame *fp);
|
||||
void onGeneratorFrameChange(StackFrame *from, StackFrame *to);
|
||||
void onCompartmentLeaveDebugMode(JSCompartment *c);
|
||||
};
|
||||
|
|
|
@ -92,7 +92,7 @@ StackFrame::initPrev(JSContext *cx)
|
|||
prev_ = NULL;
|
||||
#ifdef DEBUG
|
||||
prevpc_ = (jsbytecode *)0xbadc;
|
||||
prevInline_ = (InlinedSite *)0xbadc;
|
||||
prevInline_ = (JSInlinedSite *)0xbadc;
|
||||
#endif
|
||||
}
|
||||
}
|
||||
|
@ -147,8 +147,9 @@ StackFrame::initCallFrame(JSContext *cx, JSFunction &callee,
|
|||
JS_ASSERT(!hasBlockChain());
|
||||
JS_ASSERT(!hasHookData());
|
||||
JS_ASSERT(annotation() == NULL);
|
||||
JS_ASSERT(!hasCallObj());
|
||||
|
||||
initVarsToUndefined();
|
||||
SetValueRangeToUndefined(slots(), script->nfixed);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -170,129 +171,85 @@ StackFrame::initFixupFrame(StackFrame *prev, StackFrame::Flags flags, void *ncod
|
|||
u.nactual = nactual;
|
||||
}
|
||||
|
||||
inline bool
|
||||
StackFrame::heavyweightFunctionPrologue(JSContext *cx)
|
||||
{
|
||||
JS_ASSERT(isNonEvalFunctionFrame());
|
||||
JS_ASSERT(fun()->isHeavyweight());
|
||||
|
||||
CallObject *callobj = CallObject::createForFunction(cx, this);
|
||||
if (!callobj)
|
||||
return false;
|
||||
|
||||
pushOnScopeChain(*callobj);
|
||||
flags_ |= HAS_CALL_OBJ;
|
||||
|
||||
if (script()->nesting())
|
||||
types::NestingPrologue(cx, this);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
||||
inline void
|
||||
StackFrame::initVarsToUndefined()
|
||||
{
|
||||
SetValueRangeToUndefined(slots(), script()->nfixed);
|
||||
}
|
||||
|
||||
inline JSObject *
|
||||
StackFrame::createRestParameter(JSContext *cx)
|
||||
{
|
||||
JS_ASSERT(fun()->hasRest());
|
||||
unsigned nformal = fun()->nargs - 1, nactual = numActualArgs();
|
||||
unsigned nrest = (nactual > nformal) ? nactual - nformal : 0;
|
||||
return NewDenseCopiedArray(cx, nrest, actuals() + nformal);
|
||||
return NewDenseCopiedArray(cx, nrest, actualArgs() + nformal);
|
||||
}
|
||||
|
||||
inline Value &
|
||||
StackFrame::unaliasedVar(unsigned i, MaybeCheckAliasing checkAliasing)
|
||||
{
|
||||
JS_ASSERT_IF(checkAliasing, !script()->varIsAliased(i));
|
||||
JS_ASSERT(i < script()->nfixed);
|
||||
return slots()[i];
|
||||
}
|
||||
|
||||
inline Value &
|
||||
StackFrame::unaliasedLocal(unsigned i, MaybeCheckAliasing checkAliasing)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
if (checkAliasing) {
|
||||
JS_ASSERT(i < script()->nslots);
|
||||
if (i < script()->nfixed) {
|
||||
JS_ASSERT(!script()->varIsAliased(i));
|
||||
} else {
|
||||
unsigned depth = i - script()->nfixed;
|
||||
for (StaticBlockObject *b = maybeBlockChain(); b; b = b->enclosingBlock()) {
|
||||
if (b->containsVarAtDepth(depth)) {
|
||||
JS_ASSERT(!b->isAliased(depth - b->stackDepth()));
|
||||
break;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
return slots()[i];
|
||||
}
|
||||
|
||||
inline Value &
|
||||
StackFrame::unaliasedFormal(unsigned i, MaybeCheckAliasing checkAliasing)
|
||||
{
|
||||
JS_ASSERT(i < numFormalArgs());
|
||||
JS_ASSERT_IF(checkAliasing, !script()->formalIsAliased(i));
|
||||
return formals()[i];
|
||||
}
|
||||
|
||||
inline Value &
|
||||
StackFrame::unaliasedActual(unsigned i)
|
||||
StackFrame::canonicalActualArg(unsigned i) const
|
||||
{
|
||||
if (i < numFormalArgs())
|
||||
return formalArg(i);
|
||||
JS_ASSERT(i < numActualArgs());
|
||||
JS_ASSERT(!script()->formalIsAliased(i));
|
||||
return i < numFormalArgs() ? formals()[i] : actuals()[i];
|
||||
return actualArgs()[i];
|
||||
}
|
||||
|
||||
template <class Op>
|
||||
inline void
|
||||
StackFrame::forEachUnaliasedActual(Op op)
|
||||
inline bool
|
||||
StackFrame::forEachCanonicalActualArg(Op op, unsigned start /* = 0 */, unsigned count /* = unsigned(-1) */)
|
||||
{
|
||||
JS_ASSERT(script()->numClosedArgs() == 0);
|
||||
JS_ASSERT(!script()->needsArgsObj());
|
||||
unsigned nformal = fun()->nargs;
|
||||
JS_ASSERT(start <= nformal);
|
||||
|
||||
unsigned nformal = numFormalArgs();
|
||||
Value *formals = formalArgsEnd() - nformal;
|
||||
unsigned nactual = numActualArgs();
|
||||
if (count == unsigned(-1))
|
||||
count = nactual - start;
|
||||
|
||||
const Value *formalsEnd = (const Value *)this;
|
||||
const Value *formals = formalsEnd - nformal;
|
||||
unsigned end = start + count;
|
||||
JS_ASSERT(end >= start);
|
||||
JS_ASSERT(end <= nactual);
|
||||
|
||||
if (nactual <= nformal) {
|
||||
const Value *actualsEnd = formals + nactual;
|
||||
for (const Value *p = formals; p < actualsEnd; ++p)
|
||||
op(*p);
|
||||
if (end <= nformal) {
|
||||
Value *p = formals + start;
|
||||
for (; start < end; ++p, ++start) {
|
||||
if (!op(start, p))
|
||||
return false;
|
||||
}
|
||||
} else {
|
||||
for (const Value *p = formals; p < formalsEnd; ++p)
|
||||
op(*p);
|
||||
|
||||
const Value *actualsEnd = formals - 2;
|
||||
const Value *actuals = actualsEnd - nactual;
|
||||
for (const Value *p = actuals + nformal; p < actualsEnd; ++p)
|
||||
op(*p);
|
||||
for (Value *p = formals + start; start < nformal; ++p, ++start) {
|
||||
if (!op(start, p))
|
||||
return false;
|
||||
}
|
||||
JS_ASSERT(start >= nformal);
|
||||
Value *actuals = formals - (nactual + 2) + start;
|
||||
for (Value *p = actuals; start < end; ++p, ++start) {
|
||||
if (!op(start, p))
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
template <class Op>
|
||||
inline bool
|
||||
StackFrame::forEachFormalArg(Op op)
|
||||
{
|
||||
Value *formals = formalArgsEnd() - fun()->nargs;
|
||||
Value *formalsEnd = formalArgsEnd();
|
||||
unsigned i = 0;
|
||||
for (Value *p = formals; p != formalsEnd; ++p, ++i) {
|
||||
if (!op(i, p))
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
struct CopyTo
|
||||
{
|
||||
Value *dst;
|
||||
CopyTo(Value *dst) : dst(dst) {}
|
||||
void operator()(const Value &src) { *dst++ = src; }
|
||||
bool operator()(unsigned, Value *src) {
|
||||
*dst++ = *src;
|
||||
return true;
|
||||
}
|
||||
};
|
||||
|
||||
inline unsigned
|
||||
StackFrame::numFormalArgs() const
|
||||
{
|
||||
JS_ASSERT(hasArgs());
|
||||
return fun()->nargs;
|
||||
}
|
||||
|
||||
inline unsigned
|
||||
StackFrame::numActualArgs() const
|
||||
{
|
||||
|
@ -310,51 +267,57 @@ StackFrame::numActualArgs() const
|
|||
return numFormalArgs();
|
||||
}
|
||||
|
||||
inline ArgumentsObject &
|
||||
StackFrame::argsObj() const
|
||||
inline Value *
|
||||
StackFrame::actualArgs() const
|
||||
{
|
||||
JS_ASSERT(script()->needsArgsObj());
|
||||
JS_ASSERT(flags_ & HAS_ARGS_OBJ);
|
||||
return *argsObj_;
|
||||
JS_ASSERT(hasArgs());
|
||||
Value *argv = formalArgs();
|
||||
if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS))
|
||||
return argv - (2 + u.nactual);
|
||||
return argv;
|
||||
}
|
||||
|
||||
inline Value *
|
||||
StackFrame::actualArgsEnd() const
|
||||
{
|
||||
JS_ASSERT(hasArgs());
|
||||
if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS))
|
||||
return formalArgs() - 2;
|
||||
return formalArgs() + numActualArgs();
|
||||
}
|
||||
|
||||
inline void
|
||||
StackFrame::initArgsObj(ArgumentsObject &argsobj)
|
||||
StackFrame::setScopeChain(JSObject &obj)
|
||||
{
|
||||
JS_ASSERT(script()->needsArgsObj());
|
||||
flags_ |= HAS_ARGS_OBJ;
|
||||
argsObj_ = &argsobj;
|
||||
}
|
||||
|
||||
inline ScopeObject &
|
||||
StackFrame::aliasedVarScope(ScopeCoordinate sc) const
|
||||
{
|
||||
JSObject *scope = &scopeChain()->asScope();
|
||||
for (unsigned i = sc.hops; i; i--)
|
||||
scope = &scope->asScope().enclosingScope();
|
||||
return scope->asScope();
|
||||
}
|
||||
|
||||
inline void
|
||||
StackFrame::pushOnScopeChain(ScopeObject &scope)
|
||||
{
|
||||
JS_ASSERT(*scopeChain() == scope.enclosingScope() ||
|
||||
*scopeChain() == scope.asCall().enclosingScope().asDeclEnv().enclosingScope());
|
||||
scopeChain_ = &scope;
|
||||
#ifdef DEBUG
|
||||
JS_ASSERT(&obj != NULL);
|
||||
if (hasCallObj()) {
|
||||
JSObject *pobj = &obj;
|
||||
while (pobj && !pobj->isWith() && pobj->asScope().maybeStackFrame() != this)
|
||||
pobj = pobj->enclosingScope();
|
||||
JS_ASSERT(pobj);
|
||||
} else {
|
||||
for (JSObject *pobj = &obj; pobj->isScope() && !pobj->isWith(); pobj = pobj->enclosingScope())
|
||||
JS_ASSERT_IF(pobj->isCall(), pobj->asScope().maybeStackFrame() != this);
|
||||
}
|
||||
#endif
|
||||
scopeChain_ = &obj;
|
||||
flags_ |= HAS_SCOPECHAIN;
|
||||
}
|
||||
|
||||
inline void
|
||||
StackFrame::popOffScopeChain()
|
||||
StackFrame::initScopeChain(CallObject &obj)
|
||||
{
|
||||
JS_ASSERT(flags_ & HAS_SCOPECHAIN);
|
||||
scopeChain_ = &scopeChain_->asScope().enclosingScope();
|
||||
JS_ASSERT(&obj != NULL);
|
||||
JS_ASSERT(!hasCallObj() && obj.maybeStackFrame() == this);
|
||||
scopeChain_ = &obj;
|
||||
flags_ |= HAS_SCOPECHAIN | HAS_CALL_OBJ;
|
||||
}
|
||||
|
||||
inline CallObject &
|
||||
StackFrame::callObj() const
|
||||
{
|
||||
JS_ASSERT(fun()->isHeavyweight());
|
||||
JS_ASSERT_IF(isNonEvalFunctionFrame() || isStrictEvalFrame(), hasCallObj());
|
||||
|
||||
JSObject *pobj = scopeChain();
|
||||
while (JS_UNLIKELY(!pobj->isCall()))
|
||||
|
@ -362,6 +325,89 @@ StackFrame::callObj() const
|
|||
return pobj->asCall();
|
||||
}
|
||||
|
||||
inline bool
|
||||
StackFrame::maintainNestingState() const
|
||||
{
|
||||
/*
|
||||
* Whether to invoke the nesting epilogue/prologue to maintain active
|
||||
* frame counts and check for reentrant outer functions.
|
||||
*/
|
||||
return isNonEvalFunctionFrame() && !isGeneratorFrame() && script()->nesting();
|
||||
}
|
||||
|
||||
inline bool
|
||||
StackFrame::functionPrologue(JSContext *cx)
|
||||
{
|
||||
JS_ASSERT(isNonEvalFunctionFrame());
|
||||
JS_ASSERT(!isGeneratorFrame());
|
||||
|
||||
if (fun()->isHeavyweight()) {
|
||||
CallObject *callobj = CallObject::createForFunction(cx, this);
|
||||
if (!callobj)
|
||||
return false;
|
||||
initScopeChain(*callobj);
|
||||
} else {
|
||||
/* Force instantiation of the scope chain, for JIT frames. */
|
||||
scopeChain();
|
||||
}
|
||||
|
||||
if (script()->nesting()) {
|
||||
JS_ASSERT(maintainNestingState());
|
||||
types::NestingPrologue(cx, this);
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
inline void
|
||||
StackFrame::functionEpilogue(JSContext *cx)
|
||||
{
|
||||
JS_ASSERT(isNonEvalFunctionFrame());
|
||||
|
||||
if (cx->compartment->debugMode())
|
||||
cx->runtime->debugScopes->onPopCall(this);
|
||||
|
||||
if (flags_ & (HAS_ARGS_OBJ | HAS_CALL_OBJ)) {
|
||||
if (hasCallObj())
|
||||
js_PutCallObject(this, scopeChain_->asCall());
|
||||
if (hasArgsObj())
|
||||
js_PutArgsObject(this);
|
||||
}
|
||||
|
||||
if (maintainNestingState())
|
||||
types::NestingEpilogue(this);
|
||||
}
|
||||
|
||||
inline void
|
||||
StackFrame::updateEpilogueFlags()
|
||||
{
|
||||
if (flags_ & (HAS_ARGS_OBJ | HAS_CALL_OBJ)) {
|
||||
if (hasArgsObj() && !argsObj().maybeStackFrame())
|
||||
flags_ &= ~HAS_ARGS_OBJ;
|
||||
if (hasCallObj() && !callObj().maybeStackFrame()) {
|
||||
/*
|
||||
* For function frames, the call object may or may not have have an
|
||||
* enclosing DeclEnv object, so we use the callee's parent, since
|
||||
* it was the initial scope chain. For global (strict) eval frames,
|
||||
* there is no callee, but the call object's parent is the initial
|
||||
* scope chain.
|
||||
*/
|
||||
scopeChain_ = isFunctionFrame()
|
||||
? callee().environment()
|
||||
: &scopeChain_->asScope().enclosingScope();
|
||||
flags_ &= ~HAS_CALL_OBJ;
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
* For outer/inner function frames, undo the active frame balancing so that
|
||||
* when we redo it in the epilogue we get the right final value. The other
|
||||
* nesting epilogue changes (update active args/vars) are idempotent.
|
||||
*/
|
||||
if (maintainNestingState())
|
||||
script()->nesting()->activeFrames++;
|
||||
}
|
||||
|
||||
/*****************************************************************************/
|
||||
|
||||
STATIC_POSTCONDITION(!return || ubound(from) >= nvals)
|
||||
|
@ -383,7 +429,7 @@ inline Value *
|
|||
StackSpace::getStackLimit(JSContext *cx, MaybeReportError report)
|
||||
{
|
||||
FrameRegs ®s = cx->regs();
|
||||
unsigned nvals = regs.fp()->script()->nslots + STACK_JIT_EXTRA;
|
||||
unsigned nvals = regs.fp()->numSlots() + STACK_JIT_EXTRA;
|
||||
return ensureSpace(cx, report, regs.sp, nvals)
|
||||
? conservativeEnd_
|
||||
: NULL;
|
||||
|
@ -404,7 +450,7 @@ ContextStack::getCallFrame(JSContext *cx, MaybeReportError report, const CallArg
|
|||
/* Include extra space to satisfy the method-jit stackLimit invariant. */
|
||||
unsigned nvals = VALUES_PER_STACK_FRAME + script->nslots + StackSpace::STACK_JIT_EXTRA;
|
||||
|
||||
/* Maintain layout invariant: &formals[0] == ((Value *)fp) - nformal. */
|
||||
/* Maintain layout invariant: &formalArgs[0] == ((Value *)fp) - nformal. */
|
||||
|
||||
if (args.length() == nformal) {
|
||||
if (!space().ensureSpace(cx, report, firstUnused, nvals))
|
||||
|
@ -496,7 +542,9 @@ ContextStack::popInlineFrame(FrameRegs ®s)
|
|||
JS_ASSERT(®s == &seg_->regs());
|
||||
|
||||
StackFrame *fp = regs.fp();
|
||||
Value *newsp = fp->actuals() - 1;
|
||||
fp->functionEpilogue(cx_);
|
||||
|
||||
Value *newsp = fp->actualArgs() - 1;
|
||||
JS_ASSERT(newsp >= fp->prev()->base());
|
||||
|
||||
newsp[-1] = fp->returnValue();
|
||||
|
@ -509,7 +557,7 @@ ContextStack::popFrameAfterOverflow()
|
|||
/* Restore the regs to what they were on entry to JSOP_CALL. */
|
||||
FrameRegs ®s = seg_->regs();
|
||||
StackFrame *fp = regs.fp();
|
||||
regs.popFrame(fp->actuals() + fp->numActualArgs());
|
||||
regs.popFrame(fp->actualArgsEnd());
|
||||
}
|
||||
|
||||
inline JSScript *
|
||||
|
|
|
@ -90,22 +90,22 @@ StackFrame::initDummyFrame(JSContext *cx, JSObject &chain)
|
|||
flags_ = DUMMY | HAS_PREVPC | HAS_SCOPECHAIN;
|
||||
initPrev(cx);
|
||||
JS_ASSERT(chain.isGlobal());
|
||||
scopeChain_ = &chain;
|
||||
setScopeChain(chain);
|
||||
}
|
||||
|
||||
template <class T, class U, StackFrame::TriggerPostBarriers doPostBarrier>
|
||||
void
|
||||
StackFrame::copyFrameAndValues(JSContext *cx, StackFrame *fp, T *vp,
|
||||
StackFrame::stealFrameAndSlots(JSContext *cx, StackFrame *fp, T *vp,
|
||||
StackFrame *otherfp, U *othervp, Value *othersp)
|
||||
{
|
||||
JS_ASSERT((U *)vp == (U *)this - ((U *)otherfp - othervp));
|
||||
JS_ASSERT((Value *)othervp == otherfp->generatorArgsSnapshotBegin());
|
||||
JS_ASSERT((Value *)othervp == otherfp->actualArgs() - 2);
|
||||
JS_ASSERT(othersp >= otherfp->slots());
|
||||
JS_ASSERT(othersp <= otherfp->generatorSlotsSnapshotBegin() + otherfp->script()->nslots);
|
||||
JS_ASSERT(othersp <= otherfp->base() + otherfp->numSlots());
|
||||
JS_ASSERT((T *)fp - vp == (U *)otherfp - othervp);
|
||||
|
||||
/* Copy args, StackFrame, and slots. */
|
||||
U *srcend = (U *)otherfp->generatorArgsSnapshotEnd();
|
||||
U *srcend = (U *)otherfp->formalArgsEnd();
|
||||
T *dst = vp;
|
||||
for (U *src = othervp; src < srcend; src++, dst++)
|
||||
*dst = *src;
|
||||
|
@ -119,15 +119,39 @@ StackFrame::copyFrameAndValues(JSContext *cx, StackFrame *fp, T *vp,
|
|||
for (U *src = (U *)otherfp->slots(); src < srcend; src++, dst++)
|
||||
*dst = *src;
|
||||
|
||||
/*
|
||||
* Repoint Call, Arguments, Block and With objects to the new live frame.
|
||||
* Call and Arguments are done directly because we have pointers to them.
|
||||
* Block and With objects are done indirectly through 'liveFrame'. See
|
||||
* js_LiveFrameToFloating comment in jsiter.h.
|
||||
*/
|
||||
if (hasCallObj()) {
|
||||
CallObject &obj = callObj();
|
||||
obj.setStackFrame(this);
|
||||
otherfp->flags_ &= ~HAS_CALL_OBJ;
|
||||
if (js_IsNamedLambda(fun())) {
|
||||
DeclEnvObject &env = obj.enclosingScope().asDeclEnv();
|
||||
env.setStackFrame(this);
|
||||
}
|
||||
}
|
||||
if (hasArgsObj()) {
|
||||
ArgumentsObject &argsobj = argsObj();
|
||||
if (argsobj.isNormalArguments())
|
||||
argsobj.setStackFrame(this);
|
||||
else
|
||||
JS_ASSERT(!argsobj.maybeStackFrame());
|
||||
otherfp->flags_ &= ~HAS_ARGS_OBJ;
|
||||
}
|
||||
|
||||
if (cx->compartment->debugMode())
|
||||
cx->runtime->debugScopes->onGeneratorFrameChange(otherfp, this);
|
||||
}
|
||||
|
||||
/* Note: explicit instantiation for js_NewGenerator located in jsiter.cpp. */
|
||||
template void StackFrame::copyFrameAndValues<Value, HeapValue, StackFrame::NoPostBarrier>(
|
||||
template void StackFrame::stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>(
|
||||
JSContext *, StackFrame *, Value *,
|
||||
StackFrame *, HeapValue *, Value *);
|
||||
template void StackFrame::copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
|
||||
template void StackFrame::stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
|
||||
JSContext *, StackFrame *, HeapValue *,
|
||||
StackFrame *, Value *, Value *);
|
||||
|
||||
|
@ -139,7 +163,7 @@ StackFrame::writeBarrierPost()
|
|||
JSObject::writeBarrierPost(scopeChain_, (void *)&scopeChain_);
|
||||
if (isDummyFrame())
|
||||
return;
|
||||
if (flags_ & HAS_ARGS_OBJ)
|
||||
if (hasArgsObj())
|
||||
JSObject::writeBarrierPost(argsObj_, (void *)&argsObj_);
|
||||
if (isScriptFrame()) {
|
||||
if (isFunctionFrame()) {
|
||||
|
@ -154,29 +178,8 @@ StackFrame::writeBarrierPost()
|
|||
HeapValue::writeBarrierPost(rval_, &rval_);
|
||||
}
|
||||
|
||||
JSGenerator *
|
||||
StackFrame::maybeSuspendedGenerator(JSRuntime *rt)
|
||||
{
|
||||
/*
|
||||
* A suspended generator's frame is embedded inside the JSGenerator object
|
||||
* instead of on the contiguous stack like all active frames.
|
||||
*/
|
||||
if (!isGeneratorFrame() || rt->stackSpace.containsFast(this))
|
||||
return NULL;
|
||||
|
||||
/*
|
||||
* Once we know we have a suspended generator frame, there is a static
|
||||
* offset from the frame's snapshot to beginning of the JSGenerator.
|
||||
*/
|
||||
char *vp = reinterpret_cast<char *>(generatorArgsSnapshotBegin());
|
||||
char *p = vp - offsetof(JSGenerator, stackSnapshot);
|
||||
JSGenerator *gen = reinterpret_cast<JSGenerator *>(p);
|
||||
JS_ASSERT(gen->fp == this);
|
||||
return gen;
|
||||
}
|
||||
|
||||
jsbytecode *
|
||||
StackFrame::prevpcSlow(InlinedSite **pinlined)
|
||||
StackFrame::prevpcSlow(JSInlinedSite **pinlined)
|
||||
{
|
||||
JS_ASSERT(!(flags_ & HAS_PREVPC));
|
||||
#if defined(JS_METHODJIT) && defined(JS_MONOIC)
|
||||
|
@ -194,7 +197,7 @@ StackFrame::prevpcSlow(InlinedSite **pinlined)
|
|||
}
|
||||
|
||||
jsbytecode *
|
||||
StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, InlinedSite **pinlined)
|
||||
StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, JSInlinedSite **pinlined)
|
||||
{
|
||||
JS_ASSERT_IF(next, next->prev() == this);
|
||||
|
||||
|
@ -216,98 +219,6 @@ StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, InlinedSite
|
|||
return next->prevpc(pinlined);
|
||||
}
|
||||
|
||||
bool
|
||||
StackFrame::prologue(JSContext *cx, bool newType)
|
||||
{
|
||||
JS_ASSERT(!isDummyFrame());
|
||||
JS_ASSERT(!isGeneratorFrame());
|
||||
|
||||
if (isEvalFrame()) {
|
||||
if (script()->strictModeCode) {
|
||||
CallObject *callobj = CallObject::createForStrictEval(cx, this);
|
||||
if (!callobj)
|
||||
return false;
|
||||
pushOnScopeChain(*callobj);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
if (isGlobalFrame())
|
||||
return true;
|
||||
|
||||
JS_ASSERT(isNonEvalFunctionFrame());
|
||||
|
||||
if (fun()->isHeavyweight()) {
|
||||
CallObject *callobj = CallObject::createForFunction(cx, this);
|
||||
if (!callobj)
|
||||
return false;
|
||||
pushOnScopeChain(*callobj);
|
||||
flags_ |= HAS_CALL_OBJ;
|
||||
}
|
||||
|
||||
if (script()->nesting())
|
||||
types::NestingPrologue(cx, this);
|
||||
|
||||
if (isConstructing()) {
|
||||
RootedObject callee(cx, &this->callee());
|
||||
JSObject *obj = js_CreateThisForFunction(cx, callee, newType);
|
||||
if (!obj)
|
||||
return false;
|
||||
functionThis() = ObjectValue(*obj);
|
||||
}
|
||||
|
||||
Probes::enterJSFun(cx, fun(), script());
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
StackFrame::epilogue(JSContext *cx)
|
||||
{
|
||||
JS_ASSERT(!isDummyFrame());
|
||||
JS_ASSERT(!isGeneratorFrame() || !isYielding());
|
||||
JS_ASSERT(!hasBlockChain());
|
||||
|
||||
if (isEvalFrame()) {
|
||||
if (isStrictEvalFrame()) {
|
||||
JS_ASSERT(scopeChain()->asCall().isForEval());
|
||||
if (cx->compartment->debugMode())
|
||||
cx->runtime->debugScopes->onPopStrictEvalScope(this);
|
||||
} else if (isDirectEvalFrame()) {
|
||||
if (isDebuggerFrame())
|
||||
JS_ASSERT(!scopeChain()->isScope());
|
||||
else
|
||||
JS_ASSERT(scopeChain() == prev()->scopeChain());
|
||||
} else {
|
||||
JS_ASSERT(scopeChain()->isGlobal());
|
||||
}
|
||||
return;
|
||||
}
|
||||
|
||||
if (isGlobalFrame()) {
|
||||
JS_ASSERT(!scopeChain()->isScope());
|
||||
return;
|
||||
}
|
||||
|
||||
JS_ASSERT(isNonEvalFunctionFrame());
|
||||
if (fun()->isHeavyweight()) {
|
||||
JS_ASSERT(scopeChain()->asCall().getCalleeFunction()->script() == script());
|
||||
} else {
|
||||
JS_ASSERT(!scopeChain()->isCall() || scopeChain()->asCall().isForEval() ||
|
||||
scopeChain()->asCall().getCalleeFunction()->script() != script());
|
||||
}
|
||||
|
||||
if (cx->compartment->debugMode())
|
||||
cx->runtime->debugScopes->onPopCall(this);
|
||||
|
||||
Probes::exitJSFun(cx, fun(), script());
|
||||
|
||||
if (script()->nesting())
|
||||
types::NestingEpilogue(this);
|
||||
|
||||
if (isConstructing() && returnValue().isPrimitive())
|
||||
setReturnValue(ObjectValue(constructorThis()));
|
||||
}
|
||||
|
||||
bool
|
||||
StackFrame::pushBlock(JSContext *cx, StaticBlockObject &block)
|
||||
{
|
||||
|
@ -319,7 +230,7 @@ StackFrame::pushBlock(JSContext *cx, StaticBlockObject &block)
|
|||
if (!clone)
|
||||
return false;
|
||||
|
||||
pushOnScopeChain(*clone);
|
||||
scopeChain_ = clone;
|
||||
}
|
||||
|
||||
flags_ |= HAS_BLOCKCHAIN;
|
||||
|
@ -336,8 +247,10 @@ StackFrame::popBlock(JSContext *cx)
|
|||
cx->runtime->debugScopes->onPopBlock(cx, this);
|
||||
|
||||
if (blockChain_->needsClone()) {
|
||||
JS_ASSERT(scopeChain_->asClonedBlock().staticBlock() == *blockChain_);
|
||||
popOffScopeChain();
|
||||
ClonedBlockObject &clone = scopeChain()->asClonedBlock();
|
||||
JS_ASSERT(clone.staticBlock() == *blockChain_);
|
||||
clone.put(cx->fp());
|
||||
scopeChain_ = &clone.enclosingScope();
|
||||
}
|
||||
|
||||
blockChain_ = blockChain_->enclosingBlock();
|
||||
|
@ -346,11 +259,7 @@ StackFrame::popBlock(JSContext *cx)
|
|||
void
|
||||
StackFrame::popWith(JSContext *cx)
|
||||
{
|
||||
if (cx->compartment->debugMode())
|
||||
cx->runtime->debugScopes->onPopWith(this);
|
||||
|
||||
JS_ASSERT(scopeChain()->isWith());
|
||||
popOffScopeChain();
|
||||
setScopeChain(scopeChain()->asWith().enclosingScope());
|
||||
}
|
||||
|
||||
void
|
||||
|
@ -365,7 +274,7 @@ StackFrame::mark(JSTracer *trc)
|
|||
gc::MarkObjectUnbarriered(trc, &scopeChain_, "scope chain");
|
||||
if (isDummyFrame())
|
||||
return;
|
||||
if (flags_ & HAS_ARGS_OBJ)
|
||||
if (hasArgsObj())
|
||||
gc::MarkObjectUnbarriered(trc, &argsObj_, "arguments");
|
||||
if (isFunctionFrame()) {
|
||||
gc::MarkObjectUnbarriered(trc, &exec.fun, "fun");
|
||||
|
@ -550,7 +459,7 @@ StackSpace::containingSegment(const StackFrame *target) const
|
|||
}
|
||||
|
||||
void
|
||||
StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
|
||||
StackSpace::markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
|
||||
{
|
||||
Value *slotsBegin = fp->slots();
|
||||
|
||||
|
@ -624,12 +533,12 @@ StackSpace::mark(JSTracer *trc)
|
|||
jsbytecode *pc = seg->maybepc();
|
||||
for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
|
||||
/* Mark from fp->slots() to slotsEnd. */
|
||||
markFrameValues(trc, fp, slotsEnd, pc);
|
||||
markFrameSlots(trc, fp, slotsEnd, pc);
|
||||
|
||||
fp->mark(trc);
|
||||
slotsEnd = (Value *)fp;
|
||||
|
||||
InlinedSite *site;
|
||||
JSInlinedSite *site;
|
||||
pc = fp->prevpc(&site);
|
||||
JS_ASSERT_IF(fp->prev(), !site);
|
||||
}
|
||||
|
@ -789,7 +698,7 @@ ContextStack::ensureOnTop(JSContext *cx, MaybeReportError report, unsigned nvars
|
|||
*/
|
||||
if (FrameRegs *regs = cx->maybeRegs()) {
|
||||
JSFunction *fun = NULL;
|
||||
if (InlinedSite *site = regs->inlined()) {
|
||||
if (JSInlinedSite *site = regs->inlined()) {
|
||||
mjit::JITChunk *chunk = regs->fp()->jit()->chunk(regs->pc);
|
||||
fun = chunk->inlineFrames()[site->inlineIndex].fun;
|
||||
} else {
|
||||
|
@ -940,7 +849,7 @@ ContextStack::pushExecuteFrame(JSContext *cx, JSScript *script, const Value &thi
|
|||
StackFrame *prev = evalInFrame ? evalInFrame : maybefp();
|
||||
StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused + 2);
|
||||
fp->initExecuteFrame(script, prev, seg_->maybeRegs(), thisv, scopeChain, type);
|
||||
fp->initVarsToUndefined();
|
||||
SetValueRangeToUndefined(fp->slots(), script->nfixed);
|
||||
efg->regs_.prepareToRun(*fp, script);
|
||||
|
||||
/* pushRegs() below links the prev-frame; manually link the prev-call. */
|
||||
|
@ -982,6 +891,9 @@ ContextStack::popFrame(const FrameGuard &fg)
|
|||
JS_ASSERT(space().firstUnused() == fg.regs_.sp);
|
||||
JS_ASSERT(&fg.regs_ == &seg_->regs());
|
||||
|
||||
if (fg.regs_.fp()->isNonEvalFunctionFrame())
|
||||
fg.regs_.fp()->functionEpilogue(cx_);
|
||||
|
||||
seg_->popRegs(fg.prevRegs_);
|
||||
if (fg.pushedSeg_)
|
||||
popSegment();
|
||||
|
@ -997,11 +909,11 @@ ContextStack::popFrame(const FrameGuard &fg)
|
|||
bool
|
||||
ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrameGuard *gfg)
|
||||
{
|
||||
HeapValue *genvp = gen->stackSnapshot;
|
||||
JS_ASSERT(genvp == HeapValueify(gen->fp->generatorArgsSnapshotBegin()));
|
||||
unsigned vplen = HeapValueify(gen->fp->generatorArgsSnapshotEnd()) - genvp;
|
||||
StackFrame *genfp = gen->floatingFrame();
|
||||
HeapValue *genvp = gen->floatingStack;
|
||||
unsigned vplen = (HeapValue *)genfp - genvp;
|
||||
|
||||
unsigned nvars = vplen + VALUES_PER_STACK_FRAME + gen->fp->script()->nslots;
|
||||
unsigned nvars = vplen + VALUES_PER_STACK_FRAME + genfp->numSlots();
|
||||
Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &gfg->pushedSeg_);
|
||||
if (!firstUnused)
|
||||
return false;
|
||||
|
@ -1020,13 +932,15 @@ ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrame
|
|||
* We don't need to worry about generational barriers as the generator
|
||||
* object has a trace hook and cannot be nursery allocated.
|
||||
*/
|
||||
JS_ASSERT(gen->obj->getClass()->trace);
|
||||
JSObject::writeBarrierPre(gen->obj);
|
||||
JSObject *genobj = js_FloatingFrameToGenerator(genfp)->obj;
|
||||
JS_ASSERT(genobj->getClass()->trace);
|
||||
JSObject::writeBarrierPre(genobj);
|
||||
|
||||
/* Copy from the generator's floating frame to the stack. */
|
||||
stackfp->copyFrameAndValues<Value, HeapValue, StackFrame::NoPostBarrier>(
|
||||
cx, stackfp, stackvp, gen->fp, genvp, gen->regs.sp);
|
||||
stackfp->stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>(
|
||||
cx, stackfp, stackvp, genfp, genvp, gen->regs.sp);
|
||||
stackfp->resetGeneratorPrev(cx);
|
||||
stackfp->unsetFloatingGenerator();
|
||||
gfg->regs_.rebaseFromTo(gen->regs, *stackfp);
|
||||
|
||||
gfg->prevRegs_ = seg_->pushRegs(gfg->regs_);
|
||||
|
@ -1039,17 +953,18 @@ void
|
|||
ContextStack::popGeneratorFrame(const GeneratorFrameGuard &gfg)
|
||||
{
|
||||
JSGenerator *gen = gfg.gen_;
|
||||
HeapValue *genvp = gen->stackSnapshot;
|
||||
JS_ASSERT(genvp == HeapValueify(gen->fp->generatorArgsSnapshotBegin()));
|
||||
StackFrame *genfp = gen->floatingFrame();
|
||||
HeapValue *genvp = gen->floatingStack;
|
||||
|
||||
const FrameRegs &stackRegs = gfg.regs_;
|
||||
StackFrame *stackfp = stackRegs.fp();
|
||||
Value *stackvp = gfg.stackvp_;
|
||||
|
||||
/* Copy from the stack to the generator's floating frame. */
|
||||
gen->regs.rebaseFromTo(stackRegs, *gen->fp);
|
||||
gen->fp->copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
|
||||
cx_, gen->fp, genvp, stackfp, stackvp, stackRegs.sp);
|
||||
gen->regs.rebaseFromTo(stackRegs, *genfp);
|
||||
genfp->stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
|
||||
cx_, genfp, genvp, stackfp, stackvp, stackRegs.sp);
|
||||
genfp->setFloatingGenerator();
|
||||
|
||||
/* ~FrameGuard/popFrame will finish the popping. */
|
||||
JS_ASSERT(ImplicitCast<const FrameGuard>(gfg).pushed());
|
||||
|
@ -1098,7 +1013,7 @@ StackIter::popFrame()
|
|||
JS_ASSERT(seg_->contains(oldfp));
|
||||
fp_ = fp_->prev();
|
||||
if (seg_->contains(fp_)) {
|
||||
InlinedSite *inline_;
|
||||
JSInlinedSite *inline_;
|
||||
pc_ = oldfp->prevpc(&inline_);
|
||||
JS_ASSERT(!inline_);
|
||||
|
||||
|
@ -1111,7 +1026,7 @@ StackIter::popFrame()
|
|||
*/
|
||||
if (oldfp->isGeneratorFrame()) {
|
||||
/* Generator's args do not overlap with the caller's expr stack. */
|
||||
sp_ = oldfp->generatorArgsSnapshotBegin();
|
||||
sp_ = (Value *)oldfp->actualArgs() - 2;
|
||||
} else if (oldfp->isNonEvalFunctionFrame()) {
|
||||
/*
|
||||
* When Invoke is called from a native, there will be an enclosing
|
||||
|
@ -1121,7 +1036,7 @@ StackIter::popFrame()
|
|||
* cases, the actual arguments of the callee should be included in
|
||||
* the caller's expr stack.
|
||||
*/
|
||||
sp_ = oldfp->actuals() + oldfp->numActualArgs();
|
||||
sp_ = oldfp->actualArgsEnd();
|
||||
} else if (oldfp->isFramePushedByExecute()) {
|
||||
/* pushExecuteFrame pushes exactly (callee, this) before frame. */
|
||||
sp_ = (Value *)oldfp - 2;
|
||||
|
@ -1176,8 +1091,7 @@ StackIter::startOnSegment(StackSegment *seg)
|
|||
static void JS_NEVER_INLINE
|
||||
CrashIfInvalidSlot(StackFrame *fp, Value *vp)
|
||||
{
|
||||
Value *slots = (Value *)(fp + 1);
|
||||
if (vp < slots || vp >= slots + fp->script()->nslots) {
|
||||
if (vp < fp->slots() || vp >= fp->slots() + fp->script()->nslots) {
|
||||
JS_ASSERT(false && "About to dereference invalid slot");
|
||||
*(int *)0xbad = 0; // show up nicely in crash-stats
|
||||
MOZ_Assert("About to dereference invalid slot", __FILE__, __LINE__);
|
||||
|
|
|
@ -14,7 +14,14 @@
|
|||
struct JSContext;
|
||||
struct JSCompartment;
|
||||
|
||||
extern void js_DumpStackFrame(JSContext *, js::StackFrame *);
|
||||
#ifdef JS_METHODJIT
|
||||
namespace js { namespace mjit { struct CallSite; }}
|
||||
typedef js::mjit::CallSite JSInlinedSite;
|
||||
#else
|
||||
struct JSInlinedSite {};
|
||||
#endif
|
||||
|
||||
typedef /* js::mjit::RejoinState */ size_t JSRejoinState;
|
||||
|
||||
namespace js {
|
||||
|
||||
|
@ -36,24 +43,14 @@ class ScriptFrameIter;
|
|||
class AllFramesIter;
|
||||
|
||||
class ArgumentsObject;
|
||||
class ScopeCoordinate;
|
||||
class ScopeObject;
|
||||
class StaticBlockObject;
|
||||
|
||||
#ifdef JS_METHODJIT
|
||||
namespace mjit {
|
||||
class CallCompiler;
|
||||
class GetPropCompiler;
|
||||
struct CallSite;
|
||||
struct JITScript;
|
||||
jsbytecode *NativeToPC(JITScript *jit, void *ncode, CallSite **pinline);
|
||||
namespace ic { struct GetElementIC; }
|
||||
}
|
||||
typedef mjit::CallSite InlinedSite;
|
||||
#else
|
||||
struct InlinedSite {};
|
||||
#endif
|
||||
typedef size_t FrameRejoinState;
|
||||
|
||||
namespace detail {
|
||||
struct OOMCheck;
|
||||
|
@ -64,9 +61,10 @@ namespace detail {
|
|||
/*
|
||||
* VM stack layout
|
||||
*
|
||||
* SpiderMonkey uses a per-runtime stack to store the activation records,
|
||||
* SpiderMonkey uses a per-thread stack to store the activation records,
|
||||
* parameters, locals, and expression temporaries for the stack of actively
|
||||
* executing scripts, functions and generators.
|
||||
* executing scripts, functions and generators. The stack is owned by the
|
||||
* StackSpace object stored in the runtime.
|
||||
*
|
||||
* The stack is subdivided into contiguous segments of memory which
|
||||
* have a memory layout invariant that allows fixed offsets to be used for stack
|
||||
|
@ -78,13 +76,13 @@ namespace detail {
|
|||
* A sample memory layout of a segment looks like:
|
||||
*
|
||||
* regs
|
||||
* .------------------------------------------------.
|
||||
* | V
|
||||
* | fp .--FrameRegs--. sp
|
||||
* | V V
|
||||
* |StackSegment| values |StackFrame| values |StackFrame| values |
|
||||
* | ^ |
|
||||
* ? <-----------' `------------'
|
||||
* .---------------------------------------------.
|
||||
* | V
|
||||
* | fp .--FrameRegs--. sp
|
||||
* | V V
|
||||
* |StackSegment| slots |StackFrame| slots |StackFrame| slots |
|
||||
* | ^ |
|
||||
* ? <----------' `-----------'
|
||||
* prev prev
|
||||
*
|
||||
* A segment starts with a fixed-size header (js::StackSegment) which logically
|
||||
|
@ -92,14 +90,14 @@ namespace detail {
|
|||
* end of the stack.
|
||||
*
|
||||
* Each script activation (global or function code) is given a fixed-size header
|
||||
* (js::StackFrame) which is associated with the values before and after it.
|
||||
* The frame contains bookkeeping information about the activation and links to
|
||||
* the previous frame.
|
||||
* (js::StackFrame) which is associated with the values (called "slots") before
|
||||
* and after it. The frame contains bookkeeping information about the activation
|
||||
* and links to the previous frame.
|
||||
*
|
||||
* The value preceding a (function) StackFrame in memory are the arguments of
|
||||
* the call. The values after a StackFrame in memory are its locals followed by
|
||||
* The slots preceding a (function) StackFrame in memory are the arguments of
|
||||
* the call. The slots after a StackFrame in memory are its locals followed by
|
||||
* its expression stack. There is no clean line between the arguments of a
|
||||
* frame and the expression stack of the previous frame since the top values of
|
||||
* frame and the expression stack of the previous frame since the top slots of
|
||||
* the expression become the arguments of a call. There are also layout
|
||||
* invariants concerning the arguments and StackFrame; see "Arguments" comment
|
||||
* in StackFrame for more details.
|
||||
|
@ -115,20 +113,20 @@ namespace detail {
|
|||
* A call to a native (C++) function does not push a frame. Instead, an array
|
||||
* of values is passed to the native. The layout of this array is abstracted by
|
||||
* js::CallArgs. With respect to the StackSegment layout above, the args to a
|
||||
* native call are inserted anywhere there can be values. A sample memory layout
|
||||
* native call are inserted anywhere there can be slots. A sample memory layout
|
||||
* looks like:
|
||||
*
|
||||
* regs
|
||||
* .------------------------------------------.
|
||||
* | V
|
||||
* | fp .--FrameRegs--. sp
|
||||
* | V V
|
||||
* |StackSegment| native call | values |StackFrame| values | native call |
|
||||
* | vp <--argc--> end vp <--argc--> end
|
||||
* | CallArgs <------------------------------ CallArgs
|
||||
* | prev ^
|
||||
* `-------------------------------------------------------'
|
||||
* calls
|
||||
* .----------------------------------------.
|
||||
* | V
|
||||
* | fp .--FrameRegs--. sp
|
||||
* | V V
|
||||
* |StackSegment| native call | slots |StackFrame| slots | native call |
|
||||
* | vp <--argc--> end vp <--argc--> end
|
||||
* | CallArgs <------------------------------ CallArgs
|
||||
* | prev ^
|
||||
* `-----------------------------------------------------'
|
||||
* calls
|
||||
*
|
||||
* Here there are two native calls on the stack. The start of each native arg
|
||||
* range is recorded by a CallArgs element which is prev-linked like stack
|
||||
|
@ -295,15 +293,11 @@ CallArgsListFromVp(unsigned argc, Value *vp, CallArgsList *prev)
|
|||
|
||||
/*****************************************************************************/
|
||||
|
||||
enum MaybeCheckAliasing { CHECK_ALIASING = true, DONT_CHECK_ALIASING = false };
|
||||
|
||||
/*****************************************************************************/
|
||||
|
||||
/* Flags specified for a frame as it is constructed. */
|
||||
enum InitialFrameFlags {
|
||||
INITIAL_NONE = 0,
|
||||
INITIAL_CONSTRUCT = 0x40, /* == StackFrame::CONSTRUCTING, asserted below */
|
||||
INITIAL_LOWERED = 0x100000 /* == StackFrame::LOWERED_CALL_APPLY, asserted below */
|
||||
INITIAL_CONSTRUCT = 0x80, /* == StackFrame::CONSTRUCTING, asserted below */
|
||||
INITIAL_LOWERED = 0x200000 /* == StackFrame::LOWERED_CALL_APPLY, asserted below */
|
||||
};
|
||||
|
||||
enum ExecuteType {
|
||||
|
@ -328,34 +322,30 @@ class StackFrame
|
|||
EVAL = 0x8, /* frame pushed for eval() or debugger eval */
|
||||
DEBUGGER = 0x10, /* frame pushed for debugger eval */
|
||||
GENERATOR = 0x20, /* frame is associated with a generator */
|
||||
CONSTRUCTING = 0x40, /* frame is for a constructor invocation */
|
||||
FLOATING_GENERATOR = 0x40, /* frame is is in generator obj, not on stack */
|
||||
CONSTRUCTING = 0x80, /* frame is for a constructor invocation */
|
||||
|
||||
/* Temporary frame states */
|
||||
YIELDING = 0x80, /* Interpret dispatched JSOP_YIELD */
|
||||
FINISHED_IN_INTERP = 0x100, /* set if frame finished in Interpret() */
|
||||
YIELDING = 0x100, /* js::Interpret dispatched JSOP_YIELD */
|
||||
FINISHED_IN_INTERP = 0x200, /* set if frame finished in Interpret() */
|
||||
|
||||
/* Function arguments */
|
||||
OVERFLOW_ARGS = 0x200, /* numActualArgs > numFormalArgs */
|
||||
UNDERFLOW_ARGS = 0x400, /* numActualArgs < numFormalArgs */
|
||||
|
||||
/* Function prologue state */
|
||||
HAS_CALL_OBJ = 0x800, /* CallObject created for heavyweight fun */
|
||||
HAS_ARGS_OBJ = 0x1000, /* ArgumentsObject created for needsArgsObj script */
|
||||
OVERFLOW_ARGS = 0x400, /* numActualArgs > numFormalArgs */
|
||||
UNDERFLOW_ARGS = 0x800, /* numActualArgs < numFormalArgs */
|
||||
|
||||
/* Lazy frame initialization */
|
||||
HAS_HOOK_DATA = 0x2000, /* frame has hookData_ set */
|
||||
HAS_ANNOTATION = 0x4000, /* frame has annotation_ set */
|
||||
HAS_RVAL = 0x8000, /* frame has rval_ set */
|
||||
HAS_SCOPECHAIN = 0x10000, /* frame has scopeChain_ set */
|
||||
HAS_PREVPC = 0x20000, /* frame has prevpc_ and prevInline_ set */
|
||||
HAS_BLOCKCHAIN = 0x40000, /* frame has blockChain_ set */
|
||||
HAS_CALL_OBJ = 0x1000, /* frame has a callobj reachable from scopeChain_ */
|
||||
HAS_ARGS_OBJ = 0x2000, /* frame has an argsobj in StackFrame::args */
|
||||
HAS_HOOK_DATA = 0x4000, /* frame has hookData_ set */
|
||||
HAS_ANNOTATION = 0x8000, /* frame has annotation_ set */
|
||||
HAS_RVAL = 0x10000, /* frame has rval_ set */
|
||||
HAS_SCOPECHAIN = 0x20000, /* frame has scopeChain_ set */
|
||||
HAS_PREVPC = 0x40000, /* frame has prevpc_ and prevInline_ set */
|
||||
HAS_BLOCKCHAIN = 0x80000, /* frame has blockChain_ set */
|
||||
|
||||
/* Method JIT state */
|
||||
DOWN_FRAMES_EXPANDED = 0x80000, /* inlining in down frames has been expanded */
|
||||
LOWERED_CALL_APPLY = 0x100000, /* Pushed by a lowered call/apply */
|
||||
|
||||
/* Debugger state */
|
||||
PREV_UP_TO_DATE = 0x200000 /* see DebugScopes::updateLiveScopes */
|
||||
DOWN_FRAMES_EXPANDED = 0x100000, /* inlining in down frames has been expanded */
|
||||
LOWERED_CALL_APPLY = 0x200000 /* Pushed by a lowered call/apply */
|
||||
};
|
||||
|
||||
private:
|
||||
|
@ -365,20 +355,22 @@ class StackFrame
|
|||
JSFunction *fun; /* function frame, pre GetScopeChain */
|
||||
} exec;
|
||||
union { /* describes the arguments of a function */
|
||||
unsigned nactual; /* for non-eval frames */
|
||||
unsigned nactual; /* for non-eval frames */
|
||||
JSScript *evalScript; /* the script of an eval-in-function */
|
||||
} u;
|
||||
mutable JSObject *scopeChain_; /* if HAS_SCOPECHAIN, current scope chain */
|
||||
StackFrame *prev_; /* if HAS_PREVPC, previous cx->regs->fp */
|
||||
void *ncode_; /* for a jit frame, return address for method JIT */
|
||||
Value rval_; /* if HAS_RVAL, return value of the frame */
|
||||
StaticBlockObject *blockChain_; /* if HAS_BLOCKCHAIN, innermost let block */
|
||||
ArgumentsObject *argsObj_; /* if HAS_ARGS_OBJ, the call's arguments object */
|
||||
jsbytecode *prevpc_; /* if HAS_PREVPC, pc of previous frame*/
|
||||
InlinedSite *prevInline_; /* for a jit frame, inlined site in previous frame */
|
||||
void *hookData_; /* if HAS_HOOK_DATA, closure returned by call hook */
|
||||
void *annotation_; /* if HAS_ANNOTATION, perhaps remove with bug 546848 */
|
||||
FrameRejoinState rejoin_; /* for a jit frame rejoining the interpreter
|
||||
mutable JSObject *scopeChain_; /* current scope chain */
|
||||
StackFrame *prev_; /* previous cx->regs->fp */
|
||||
void *ncode_; /* return address for method JIT */
|
||||
|
||||
/* Lazily initialized */
|
||||
Value rval_; /* return value of the frame */
|
||||
StaticBlockObject *blockChain_; /* innermost let block */
|
||||
ArgumentsObject *argsObj_; /* if has HAS_ARGS_OBJ */
|
||||
jsbytecode *prevpc_; /* pc of previous frame*/
|
||||
JSInlinedSite *prevInline_; /* inlined site in previous frame */
|
||||
void *hookData_; /* closure returned by call hook */
|
||||
void *annotation_; /* perhaps remove with bug 546848 */
|
||||
JSRejoinState rejoin_; /* If rejoining into the interpreter
|
||||
* from JIT code, state at rejoin. */
|
||||
|
||||
static void staticAsserts() {
|
||||
|
@ -387,39 +379,15 @@ class StackFrame
|
|||
}
|
||||
|
||||
inline void initPrev(JSContext *cx);
|
||||
jsbytecode *prevpcSlow(InlinedSite **pinlined);
|
||||
void writeBarrierPost();
|
||||
jsbytecode *prevpcSlow(JSInlinedSite **pinlined);
|
||||
|
||||
public:
|
||||
/*
|
||||
* These utilities provide raw access to the values associated with a
|
||||
* StackFrame (see "VM stack layout" comment). The utilities are private
|
||||
* since they are not able to assert that only unaliased vars/formals are
|
||||
* accessed. Normal code should prefer the StackFrame::unaliased* members
|
||||
* (or FrameRegs::stackDepth for the usual "depth is at least" assertions).
|
||||
*/
|
||||
Value *slots() const { return (Value *)(this + 1); }
|
||||
Value *base() const { return slots() + script()->nfixed; }
|
||||
Value *formals() const { return (Value *)this - fun()->nargs; }
|
||||
Value *actuals() const { return formals() - (flags_ & OVERFLOW_ARGS ? 2 + u.nactual : 0); }
|
||||
|
||||
friend class FrameRegs;
|
||||
friend class ContextStack;
|
||||
friend class StackSpace;
|
||||
friend class StackIter;
|
||||
friend class CallObject;
|
||||
friend class ClonedBlockObject;
|
||||
friend class ArgumentsObject;
|
||||
friend void ::js_DumpStackFrame(JSContext *, StackFrame *);
|
||||
friend void ::js_ReportIsNotFunction(JSContext *, const js::Value *, unsigned);
|
||||
#ifdef JS_METHODJIT
|
||||
friend class mjit::CallCompiler;
|
||||
friend class mjit::GetPropCompiler;
|
||||
friend class mjit::ic::GetElementIC;
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Frame initialization, called by ContextStack operations after acquiring
|
||||
* the raw memory for the frame:
|
||||
* Frame initialization
|
||||
*
|
||||
* After acquiring a pointer to an uninitialized stack frame on the VM
|
||||
* stack from StackSpace, these members are used to initialize the stack
|
||||
* frame before officially pushing the frame into the context.
|
||||
*/
|
||||
|
||||
/* Used for Invoke, Interpret, trace-jit LeaveTree, and method-jit stubs. */
|
||||
|
@ -433,39 +401,19 @@ class StackFrame
|
|||
void initExecuteFrame(JSScript *script, StackFrame *prev, FrameRegs *regs,
|
||||
const Value &thisv, JSObject &scopeChain, ExecuteType type);
|
||||
|
||||
/* Used when activating generators. */
|
||||
enum TriggerPostBarriers {
|
||||
DoPostBarrier = true,
|
||||
NoPostBarrier = false
|
||||
};
|
||||
template <class T, class U, TriggerPostBarriers doPostBarrier>
|
||||
void stealFrameAndSlots(JSContext *cx, StackFrame *fp, T *vp,
|
||||
StackFrame *otherfp, U *othervp, Value *othersp);
|
||||
void writeBarrierPost();
|
||||
|
||||
/* Perhaps one fine day we will remove dummy frames. */
|
||||
void initDummyFrame(JSContext *cx, JSObject &chain);
|
||||
|
||||
public:
|
||||
/*
|
||||
* Frame prologue/epilogue
|
||||
*
|
||||
* Every stack frame must have 'prologue' called before executing the
|
||||
* first op and 'epilogue' called after executing the last op and before
|
||||
* popping the frame (whether the exit is exceptional or not).
|
||||
*
|
||||
* For inline JS calls/returns, it is easy to call the prologue/epilogue
|
||||
* exactly once. When calling JS from C++, Invoke/Execute push the stack
|
||||
* frame but do *not* call the prologue/epilogue. That means Interpret
|
||||
* must call the prologue/epilogue for the entry frame. This scheme
|
||||
* simplifies jit compilation.
|
||||
*
|
||||
* The 'newType' option indicates whether the constructed 'this' value (if
|
||||
* there is one) should be given a new singleton type.
|
||||
*/
|
||||
|
||||
bool prologue(JSContext *cx, bool newType);
|
||||
void epilogue(JSContext *cx);
|
||||
|
||||
/*
|
||||
* Optimized path for the jit heavyweight function frame prologue. This
|
||||
* does not include constructing 'this'.
|
||||
*/
|
||||
inline bool heavyweightFunctionPrologue(JSContext *cx);
|
||||
|
||||
/* Initialize local variables of newly-pushed frame. */
|
||||
void initVarsToUndefined();
|
||||
|
||||
/*
|
||||
* Stack frame type
|
||||
*
|
||||
|
@ -554,124 +502,39 @@ class StackFrame
|
|||
}
|
||||
|
||||
inline void resetGeneratorPrev(JSContext *cx);
|
||||
inline void resetInlinePrev(StackFrame *prevfp, jsbytecode *prevpc);
|
||||
|
||||
/*
|
||||
* (Unaliased) locals and arguments
|
||||
*
|
||||
* Only non-eval function frames have arguments. The arguments pushed by
|
||||
* the caller are the 'actual' arguments. The declared arguments of the
|
||||
* callee are the 'formal' arguments. When the caller passes less or equal
|
||||
* actual arguments, the actual and formal arguments are the same array
|
||||
* (but with different extents). When the caller passes too many arguments,
|
||||
* the formal subset of the actual arguments is copied onto the top of the
|
||||
* stack. This allows the engine to maintain a jit-time constant offset of
|
||||
* arguments from the frame pointer. Since the formal subset of the actual
|
||||
* arguments is potentially on the stack twice, it is important for all
|
||||
* reads/writes to refer to the same canonical memory location. This is
|
||||
* abstracted by the unaliased{Formal,Actual} methods.
|
||||
*
|
||||
* When a local/formal variable is "aliased" (accessed by nested closures,
|
||||
* dynamic scope operations, or 'arguments), the canonical location for
|
||||
* that value is the slot of an activation object (scope or arguments).
|
||||
* Currently, all variables are given slots in *both* the stack frame and
|
||||
* heap objects, even though, as just described, only one should ever be
|
||||
* accessed. Thus, it is up to the code performing an access to access the
|
||||
* correct value. These functions assert that accesses to stack values are
|
||||
* unaliased. For more about canonical values locations.
|
||||
*/
|
||||
|
||||
inline Value &unaliasedVar(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
|
||||
inline Value &unaliasedLocal(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
|
||||
|
||||
bool hasArgs() const { return isNonEvalFunctionFrame(); }
|
||||
inline Value &unaliasedFormal(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
|
||||
inline Value &unaliasedActual(unsigned i);
|
||||
template <class Op> inline void forEachUnaliasedActual(Op op);
|
||||
|
||||
inline unsigned numFormalArgs() const;
|
||||
inline unsigned numActualArgs() const;
|
||||
|
||||
/*
|
||||
* Arguments object
|
||||
*
|
||||
* If a non-eval function has script->needsArgsObj, an arguments object is
|
||||
* created in the prologue and stored in the local variable for the
|
||||
* 'arguments' binding (script->argumentsLocal). Since this local is
|
||||
* mutable, the arguments object can be overwritten and we can "lose" the
|
||||
* arguments object. Thus, StackFrame keeps an explicit argsObj_ field so
|
||||
* that the original arguments object is always available.
|
||||
*/
|
||||
|
||||
ArgumentsObject &argsObj() const;
|
||||
void initArgsObj(ArgumentsObject &argsobj);
|
||||
inline void initInlineFrame(JSFunction *fun, StackFrame *prevfp, jsbytecode *prevpc);
|
||||
|
||||
inline JSObject *createRestParameter(JSContext *cx);
|
||||
|
||||
/*
|
||||
* Scope chain
|
||||
* Frame slots
|
||||
*
|
||||
* In theory, the scope chain would contain an object for every lexical
|
||||
* scope. However, only objects that are required for dynamic lookup are
|
||||
* actually created.
|
||||
*
|
||||
* Given that a (non-dummy) StackFrame corresponds roughly to a ES5
|
||||
* Execution Context (ES5 10.3), StackFrame::varObj corresponds to the
|
||||
* VariableEnvironment component of a Exection Context. Intuitively, the
|
||||
* variables object is where new bindings (variables and functions) are
|
||||
* stored. One might expect that this is either the Call object or
|
||||
* scopeChain.globalObj for function or global code, respectively, however
|
||||
* the JSAPI allows calls of Execute to specify a variables object on the
|
||||
* scope chain other than the call/global object. This allows embeddings to
|
||||
* run multiple scripts under the same global, each time using a new
|
||||
* variables object to collect and discard the script's global variables.
|
||||
* A frame's 'slots' are the fixed slots associated with the frame (like
|
||||
* local variables) followed by an expression stack holding temporary
|
||||
* values. A frame's 'base' is the base of the expression stack.
|
||||
*/
|
||||
|
||||
inline HandleObject scopeChain() const;
|
||||
|
||||
inline ScopeObject &aliasedVarScope(ScopeCoordinate sc) const;
|
||||
inline GlobalObject &global() const;
|
||||
inline CallObject &callObj() const;
|
||||
inline JSObject &varObj();
|
||||
|
||||
inline void pushOnScopeChain(ScopeObject &scope);
|
||||
inline void popOffScopeChain();
|
||||
|
||||
/*
|
||||
* Block chain
|
||||
*
|
||||
* Entering/leaving a let (or exception) block may do 1 or 2 things: First,
|
||||
* a static block object (created at compiled time and stored in the
|
||||
* script) is pushed on StackFrame::blockChain. Second, if the static block
|
||||
* may be cloned to hold the dynamic values if this is needed for dynamic
|
||||
* scope access. A clone is created for a static block iff
|
||||
* StaticBlockObject::needsClone.
|
||||
*/
|
||||
|
||||
bool hasBlockChain() const {
|
||||
return (flags_ & HAS_BLOCKCHAIN) && blockChain_;
|
||||
Value *slots() const {
|
||||
return (Value *)(this + 1);
|
||||
}
|
||||
|
||||
StaticBlockObject *maybeBlockChain() {
|
||||
return (flags_ & HAS_BLOCKCHAIN) ? blockChain_ : NULL;
|
||||
Value *base() const {
|
||||
return slots() + script()->nfixed;
|
||||
}
|
||||
|
||||
StaticBlockObject &blockChain() const {
|
||||
JS_ASSERT(hasBlockChain());
|
||||
return *blockChain_;
|
||||
Value &varSlot(unsigned i) {
|
||||
JS_ASSERT(i < script()->nfixed);
|
||||
JS_ASSERT_IF(maybeFun(), i < script()->bindings.numVars());
|
||||
return slots()[i];
|
||||
}
|
||||
|
||||
bool pushBlock(JSContext *cx, StaticBlockObject &block);
|
||||
void popBlock(JSContext *cx);
|
||||
|
||||
/*
|
||||
* With
|
||||
*
|
||||
* Entering/leaving a with (or E4X filter) block pushes/pops an object
|
||||
* on the scope chain. Pushing uses pushOnScopeChain, popping should use
|
||||
* popWith.
|
||||
*/
|
||||
|
||||
void popWith(JSContext *cx);
|
||||
Value &localSlot(unsigned i) {
|
||||
/* Let variables can be above script->nfixed. */
|
||||
JS_ASSERT(i < script()->nslots);
|
||||
return slots()[i];
|
||||
}
|
||||
|
||||
/*
|
||||
* Script
|
||||
|
@ -688,17 +551,6 @@ class StackFrame
|
|||
* the same VMFrame. Other calls force expansion of the inlined frames.
|
||||
*/
|
||||
|
||||
JSScript *script() const {
|
||||
JS_ASSERT(isScriptFrame());
|
||||
return isFunctionFrame()
|
||||
? isEvalFrame() ? u.evalScript : fun()->script()
|
||||
: exec.script;
|
||||
}
|
||||
|
||||
JSScript *maybeScript() const {
|
||||
return isScriptFrame() ? script() : NULL;
|
||||
}
|
||||
|
||||
/*
|
||||
* Get the frame's current bytecode, assuming |this| is in |cx|. next is
|
||||
* frame whose prev == this, NULL if not known or if this == cx->fp().
|
||||
|
@ -714,12 +566,15 @@ class StackFrame
|
|||
*
|
||||
* Using next can avoid this, but in most cases prefer ScriptFrameIter;
|
||||
* it is amortized O(1).
|
||||
*
|
||||
* When I get to the bottom I go back to the top of the stack
|
||||
* Where I stop and I turn and I go right back
|
||||
* Till I get to the bottom and I see you again...
|
||||
*/
|
||||
|
||||
jsbytecode *pcQuadratic(const ContextStack &stack, StackFrame *next = NULL,
|
||||
InlinedSite **pinlined = NULL);
|
||||
JSInlinedSite **pinlined = NULL);
|
||||
|
||||
jsbytecode *prevpc(InlinedSite **pinlined) {
|
||||
jsbytecode *prevpc(JSInlinedSite **pinlined) {
|
||||
if (flags_ & HAS_PREVPC) {
|
||||
if (pinlined)
|
||||
*pinlined = prevInline_;
|
||||
|
@ -728,11 +583,45 @@ class StackFrame
|
|||
return prevpcSlow(pinlined);
|
||||
}
|
||||
|
||||
InlinedSite *prevInline() {
|
||||
JSInlinedSite *prevInline() {
|
||||
JS_ASSERT(flags_ & HAS_PREVPC);
|
||||
return prevInline_;
|
||||
}
|
||||
|
||||
JSScript *script() const {
|
||||
JS_ASSERT(isScriptFrame());
|
||||
return isFunctionFrame()
|
||||
? isEvalFrame() ? u.evalScript : fun()->script()
|
||||
: exec.script;
|
||||
}
|
||||
|
||||
JSScript *functionScript() const {
|
||||
JS_ASSERT(isFunctionFrame());
|
||||
return isEvalFrame() ? u.evalScript : fun()->script();
|
||||
}
|
||||
|
||||
JSScript *globalScript() const {
|
||||
JS_ASSERT(isGlobalFrame());
|
||||
return exec.script;
|
||||
}
|
||||
|
||||
JSScript *maybeScript() const {
|
||||
return isScriptFrame() ? script() : NULL;
|
||||
}
|
||||
|
||||
size_t numFixed() const {
|
||||
return script()->nfixed;
|
||||
}
|
||||
|
||||
size_t numSlots() const {
|
||||
return script()->nslots;
|
||||
}
|
||||
|
||||
size_t numGlobalVars() const {
|
||||
JS_ASSERT(isGlobalFrame());
|
||||
return exec.script->nfixed;
|
||||
}
|
||||
|
||||
/*
|
||||
* Function
|
||||
*
|
||||
|
@ -760,6 +649,94 @@ class StackFrame
|
|||
return fp->script()->function();
|
||||
}
|
||||
|
||||
/*
|
||||
* Arguments
|
||||
*
|
||||
* Only non-eval function frames have arguments. A frame follows its
|
||||
* arguments contiguously in memory. The arguments pushed by the caller are
|
||||
* the 'actual' arguments. The declared arguments of the callee are the
|
||||
* 'formal' arguments. When the caller passes less or equal actual
|
||||
* arguments, the actual and formal arguments are the same array (but with
|
||||
* different extents). When the caller passes too many arguments, the
|
||||
* formal subset of the actual arguments is copied onto the top of the
|
||||
* stack. This allows the engine to maintain a jit-time constant offset of
|
||||
* arguments from the frame pointer. Since the formal subset of the actual
|
||||
* arguments is potentially on the stack twice, it is important for all
|
||||
* reads/writes to refer to the same canonical memory location.
|
||||
*
|
||||
* An arguments object (the object returned by the 'arguments' keyword) is
|
||||
* lazily created, so a given function frame may or may not have one.
|
||||
*/
|
||||
|
||||
/* True if this frame has arguments. Contrast with hasArgsObj. */
|
||||
bool hasArgs() const {
|
||||
return isNonEvalFunctionFrame();
|
||||
}
|
||||
|
||||
unsigned numFormalArgs() const {
|
||||
JS_ASSERT(hasArgs());
|
||||
return fun()->nargs;
|
||||
}
|
||||
|
||||
Value &formalArg(unsigned i) const {
|
||||
JS_ASSERT(i < numFormalArgs());
|
||||
return formalArgs()[i];
|
||||
}
|
||||
|
||||
Value *formalArgs() const {
|
||||
JS_ASSERT(hasArgs());
|
||||
return (Value *)this - numFormalArgs();
|
||||
}
|
||||
|
||||
Value *formalArgsEnd() const {
|
||||
JS_ASSERT(hasArgs());
|
||||
return (Value *)this;
|
||||
}
|
||||
|
||||
Value *maybeFormalArgs() const {
|
||||
return (flags_ & (FUNCTION | EVAL)) == FUNCTION
|
||||
? formalArgs()
|
||||
: NULL;
|
||||
}
|
||||
|
||||
inline unsigned numActualArgs() const;
|
||||
inline Value *actualArgs() const;
|
||||
inline Value *actualArgsEnd() const;
|
||||
|
||||
inline Value &canonicalActualArg(unsigned i) const;
|
||||
template <class Op>
|
||||
inline bool forEachCanonicalActualArg(Op op, unsigned start = 0, unsigned count = unsigned(-1));
|
||||
template <class Op> inline bool forEachFormalArg(Op op);
|
||||
|
||||
/* XXX: all these argsObj functions will be removed with bug 659577. */
|
||||
|
||||
bool hasArgsObj() const {
|
||||
/*
|
||||
* HAS_ARGS_OBJ is still technically not equivalent to
|
||||
* script()->needsArgsObj() during functionPrologue (where GC can
|
||||
* observe a frame that needsArgsObj but has not yet been given the
|
||||
* args). This can be fixed by creating and rooting the args/call
|
||||
* object before pushing the frame, which should be done eventually.
|
||||
*/
|
||||
return !!(flags_ & HAS_ARGS_OBJ);
|
||||
}
|
||||
|
||||
ArgumentsObject &argsObj() const {
|
||||
JS_ASSERT(hasArgsObj());
|
||||
return *argsObj_;
|
||||
}
|
||||
|
||||
ArgumentsObject *maybeArgsObj() const {
|
||||
return hasArgsObj() ? &argsObj() : NULL;
|
||||
}
|
||||
|
||||
void initArgsObj(ArgumentsObject &argsObj) {
|
||||
JS_ASSERT(script()->needsArgsObj());
|
||||
JS_ASSERT(!hasArgsObj());
|
||||
argsObj_ = &argsObj;
|
||||
flags_ |= HAS_ARGS_OBJ;
|
||||
}
|
||||
|
||||
/*
|
||||
* This value
|
||||
*
|
||||
|
@ -770,25 +747,31 @@ class StackFrame
|
|||
* frames and directly before the frame for global frames. The *Args
|
||||
* members assert !isEvalFrame(), so we implement specialized inline
|
||||
* methods for accessing 'this'. When the caller has static knowledge that
|
||||
* a frame is a function, 'functionThis' allows more efficient access.
|
||||
* a frame is a function or global frame, 'functionThis' and 'globalThis',
|
||||
* respectively, allow more efficient access.
|
||||
*/
|
||||
|
||||
Value &functionThis() const {
|
||||
JS_ASSERT(isFunctionFrame());
|
||||
if (isEvalFrame())
|
||||
return ((Value *)this)[-1];
|
||||
return formals()[-1];
|
||||
return formalArgs()[-1];
|
||||
}
|
||||
|
||||
JSObject &constructorThis() const {
|
||||
JS_ASSERT(hasArgs());
|
||||
return formals()[-1].toObject();
|
||||
return formalArgs()[-1].toObject();
|
||||
}
|
||||
|
||||
Value &globalThis() const {
|
||||
JS_ASSERT(isGlobalFrame());
|
||||
return ((Value *)this)[-1];
|
||||
}
|
||||
|
||||
Value &thisValue() const {
|
||||
if (flags_ & (EVAL | GLOBAL))
|
||||
return ((Value *)this)[-1];
|
||||
return formals()[-1];
|
||||
return formalArgs()[-1];
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -814,7 +797,7 @@ class StackFrame
|
|||
JS_ASSERT(isScriptFrame());
|
||||
Value &calleev = flags_ & (EVAL | GLOBAL)
|
||||
? ((Value *)this)[-2]
|
||||
: formals()[-2];
|
||||
: formalArgs()[-2];
|
||||
JS_ASSERT(calleev.isObjectOrNull());
|
||||
return calleev;
|
||||
}
|
||||
|
@ -823,13 +806,117 @@ class StackFrame
|
|||
JS_ASSERT(isFunctionFrame());
|
||||
if (isEvalFrame())
|
||||
return ((Value *)this)[-2];
|
||||
return formals()[-2];
|
||||
return formalArgs()[-2];
|
||||
}
|
||||
|
||||
CallReceiver callReceiver() const {
|
||||
return CallReceiverFromArgv(formals());
|
||||
return CallReceiverFromArgv(formalArgs());
|
||||
}
|
||||
|
||||
/*
|
||||
* Scope chain
|
||||
*
|
||||
* Every frame has a scopeChain which, when traversed via the 'parent' link
|
||||
* to the root, indicates the current global object. A 'call object' is a
|
||||
* node on a scope chain representing a function's activation record. A
|
||||
* call object is used for dynamically-scoped name lookup and lexically-
|
||||
* scoped upvar access. The call object holds the values of locals and
|
||||
* arguments when a function returns (and its stack frame is popped). For
|
||||
* performance reasons, call objects are created lazily for 'lightweight'
|
||||
* functions, i.e., functions which are not statically known to require a
|
||||
* call object. Thus, a given function frame may or may not have a call
|
||||
* object. When a function does have a call object, it is found by walking
|
||||
* up the scope chain until the first call object. Thus, it is important,
|
||||
* when setting the scope chain, to indicate whether the new scope chain
|
||||
* contains a new call object and thus changes the 'hasCallObj' state.
|
||||
*
|
||||
* The method JIT requires that HAS_SCOPECHAIN be set for all frames which
|
||||
* use NAME or related opcodes that can access the scope chain (so it does
|
||||
* not have to test the bit). To ensure this, we always initialize the
|
||||
* scope chain when pushing frames in the VM, and only initialize it when
|
||||
* pushing frames in JIT code when the above situation applies.
|
||||
*
|
||||
* NB: 'fp->hasCallObj()' implies that fp->callObj() needs to be 'put' when
|
||||
* the frame is popped. Since the scope chain of a non-strict eval frame
|
||||
* contains the call object of the parent (function) frame, it is possible
|
||||
* to have:
|
||||
* !fp->hasCall() && fp->scopeChain().isCall()
|
||||
*/
|
||||
|
||||
inline HandleObject scopeChain() const;
|
||||
inline GlobalObject &global() const;
|
||||
|
||||
bool hasCallObj() const {
|
||||
bool ret = !!(flags_ & HAS_CALL_OBJ);
|
||||
JS_ASSERT_IF(ret, !isNonStrictEvalFrame());
|
||||
return ret;
|
||||
}
|
||||
|
||||
inline CallObject &callObj() const;
|
||||
inline void initScopeChain(CallObject &callobj);
|
||||
inline void setScopeChain(JSObject &obj);
|
||||
|
||||
/*
|
||||
* Variables object
|
||||
*
|
||||
* Given that a (non-dummy) StackFrame corresponds roughly to a ES5
|
||||
* Execution Context (ES5 10.3), StackFrame::varObj corresponds to the
|
||||
* VariableEnvironment component of a Exection Context. Intuitively, the
|
||||
* variables object is where new bindings (variables and functions) are
|
||||
* stored. One might expect that this is either the callObj or
|
||||
* scopeChain.globalObj for function or global code, respectively, however
|
||||
* the JSAPI allows calls of Execute to specify a variables object on the
|
||||
* scope chain other than the call/global object. This allows embeddings to
|
||||
* run multiple scripts under the same global, each time using a new
|
||||
* variables object to collect and discard the script's global variables.
|
||||
*/
|
||||
|
||||
inline JSObject &varObj();
|
||||
|
||||
/* Block chain */
|
||||
|
||||
bool hasBlockChain() const {
|
||||
return (flags_ & HAS_BLOCKCHAIN) && blockChain_;
|
||||
}
|
||||
|
||||
StaticBlockObject *maybeBlockChain() {
|
||||
return (flags_ & HAS_BLOCKCHAIN) ? blockChain_ : NULL;
|
||||
}
|
||||
|
||||
StaticBlockObject &blockChain() const {
|
||||
JS_ASSERT(hasBlockChain());
|
||||
return *blockChain_;
|
||||
}
|
||||
|
||||
/* Enter/exit execution of a lexical block. */
|
||||
bool pushBlock(JSContext *cx, StaticBlockObject &block);
|
||||
void popBlock(JSContext *cx);
|
||||
|
||||
/* Exits (via execution or exception) a with block. */
|
||||
void popWith(JSContext *cx);
|
||||
|
||||
/*
|
||||
* Prologue for function frames: make a call object for heavyweight
|
||||
* functions, and maintain type nesting invariants.
|
||||
*/
|
||||
inline bool functionPrologue(JSContext *cx);
|
||||
|
||||
/*
|
||||
* Epilogue for function frames: put any args or call object for the frame
|
||||
* which may still be live, and maintain type nesting invariants. Note:
|
||||
* this does mark the epilogue as having been completed, since the frame is
|
||||
* about to be popped. Use updateEpilogueFlags for this.
|
||||
*/
|
||||
inline void functionEpilogue(JSContext *cx);
|
||||
|
||||
/*
|
||||
* If callObj() or argsObj() have already been put, update our flags
|
||||
* accordingly. This call must be followed by a later functionEpilogue.
|
||||
*/
|
||||
inline void updateEpilogueFlags();
|
||||
|
||||
inline bool maintainNestingState() const;
|
||||
|
||||
/*
|
||||
* Frame compartment
|
||||
*
|
||||
|
@ -852,11 +939,11 @@ class StackFrame
|
|||
|
||||
/* JIT rejoin state */
|
||||
|
||||
FrameRejoinState rejoin() const {
|
||||
JSRejoinState rejoin() const {
|
||||
return rejoin_;
|
||||
}
|
||||
|
||||
void setRejoin(FrameRejoinState state) {
|
||||
void setRejoin(JSRejoinState state) {
|
||||
rejoin_ = state;
|
||||
}
|
||||
|
||||
|
@ -931,53 +1018,35 @@ class StackFrame
|
|||
}
|
||||
|
||||
/*
|
||||
* A "generator" frame is a function frame associated with a generator.
|
||||
* Since generators are not executed LIFO, the VM copies a single abstract
|
||||
* generator frame back and forth between the LIFO VM stack (when the
|
||||
* generator is active) and a snapshot stored in JSGenerator (when the
|
||||
* generator is inactive). A generator frame is comprised of a StackFrame
|
||||
* structure and the values that make up the arguments, locals, and
|
||||
* expression stack. The layout in the JSGenerator snapshot matches the
|
||||
* layout on the stack (see the "VM stack layout" comment above).
|
||||
* Generator-specific members
|
||||
*
|
||||
* A non-eval function frame may optionally be the activation of a
|
||||
* generator. For the most part, generator frames act like ordinary frames.
|
||||
* For exceptions, see js_FloatingFrameIfGenerator.
|
||||
*/
|
||||
|
||||
bool isGeneratorFrame() const {
|
||||
bool ret = flags_ & GENERATOR;
|
||||
JS_ASSERT_IF(ret, isNonEvalFunctionFrame());
|
||||
return ret;
|
||||
return !!(flags_ & GENERATOR);
|
||||
}
|
||||
|
||||
void initGeneratorFrame() const {
|
||||
JS_ASSERT(!isGeneratorFrame());
|
||||
JS_ASSERT(isNonEvalFunctionFrame());
|
||||
flags_ |= GENERATOR;
|
||||
bool isFloatingGenerator() const {
|
||||
JS_ASSERT_IF(flags_ & FLOATING_GENERATOR, isGeneratorFrame());
|
||||
return !!(flags_ & FLOATING_GENERATOR);
|
||||
}
|
||||
|
||||
Value *generatorArgsSnapshotBegin() const {
|
||||
JS_ASSERT(isGeneratorFrame());
|
||||
return actuals() - 2;
|
||||
void initFloatingGenerator() {
|
||||
JS_ASSERT(!(flags_ & GENERATOR));
|
||||
flags_ |= (GENERATOR | FLOATING_GENERATOR);
|
||||
}
|
||||
|
||||
Value *generatorArgsSnapshotEnd() const {
|
||||
JS_ASSERT(isGeneratorFrame());
|
||||
return (Value *)this;
|
||||
void unsetFloatingGenerator() {
|
||||
flags_ &= ~FLOATING_GENERATOR;
|
||||
}
|
||||
|
||||
Value *generatorSlotsSnapshotBegin() const {
|
||||
JS_ASSERT(isGeneratorFrame());
|
||||
return (Value *)(this + 1);
|
||||
void setFloatingGenerator() {
|
||||
flags_ |= FLOATING_GENERATOR;
|
||||
}
|
||||
|
||||
enum TriggerPostBarriers {
|
||||
DoPostBarrier = true,
|
||||
NoPostBarrier = false
|
||||
};
|
||||
template <class T, class U, TriggerPostBarriers doPostBarrier>
|
||||
void copyFrameAndValues(JSContext *cx, StackFrame *fp, T *vp,
|
||||
StackFrame *otherfp, U *othervp, Value *othersp);
|
||||
|
||||
JSGenerator *maybeSuspendedGenerator(JSRuntime *rt);
|
||||
|
||||
/*
|
||||
* js::Execute pushes both global and function frames (since eval() in a
|
||||
* function pushes a frame with isFunctionFrame() && isEvalFrame()). Most
|
||||
|
@ -1006,12 +1075,6 @@ class StackFrame
|
|||
return !!(flags_ & CONSTRUCTING);
|
||||
}
|
||||
|
||||
bool beforeHeavyweightPrologue() const {
|
||||
JS_ASSERT(isNonEvalFunctionFrame());
|
||||
JS_ASSERT(fun()->isHeavyweight());
|
||||
return !(flags_ & HAS_CALL_OBJ);
|
||||
}
|
||||
|
||||
/*
|
||||
* The method JIT call/apply optimization can erase Function.{call,apply}
|
||||
* invocations from the stack and push the callee frame directly. The base
|
||||
|
@ -1026,12 +1089,8 @@ class StackFrame
|
|||
return !!(flags_ & DEBUGGER);
|
||||
}
|
||||
|
||||
bool prevUpToDate() const {
|
||||
return !!(flags_ & PREV_UP_TO_DATE);
|
||||
}
|
||||
|
||||
void setPrevUpToDate() {
|
||||
flags_ |= PREV_UP_TO_DATE;
|
||||
bool hasOverflowArgs() const {
|
||||
return !!(flags_ & OVERFLOW_ARGS);
|
||||
}
|
||||
|
||||
bool isYielding() {
|
||||
|
@ -1057,9 +1116,6 @@ class StackFrame
|
|||
public:
|
||||
/* Public, but only for JIT use: */
|
||||
|
||||
inline void resetInlinePrev(StackFrame *prevfp, jsbytecode *prevpc);
|
||||
inline void initInlineFrame(JSFunction *fun, StackFrame *prevfp, jsbytecode *prevpc);
|
||||
|
||||
static size_t offsetOfFlags() {
|
||||
return offsetof(StackFrame, flags_);
|
||||
}
|
||||
|
@ -1084,12 +1140,12 @@ class StackFrame
|
|||
return offsetof(StackFrame, rval_);
|
||||
}
|
||||
|
||||
static ptrdiff_t offsetOfNcode() {
|
||||
return offsetof(StackFrame, ncode_);
|
||||
static size_t offsetOfArgsObj() {
|
||||
return offsetof(StackFrame, argsObj_);
|
||||
}
|
||||
|
||||
static ptrdiff_t offsetOfArgsObj() {
|
||||
return offsetof(StackFrame, argsObj_);
|
||||
static ptrdiff_t offsetOfNcode() {
|
||||
return offsetof(StackFrame, ncode_);
|
||||
}
|
||||
|
||||
static ptrdiff_t offsetOfCallee(JSFunction *fun) {
|
||||
|
@ -1165,11 +1221,11 @@ class FrameRegs
|
|||
Value *sp;
|
||||
jsbytecode *pc;
|
||||
private:
|
||||
InlinedSite *inlined_;
|
||||
JSInlinedSite *inlined_;
|
||||
StackFrame *fp_;
|
||||
public:
|
||||
StackFrame *fp() const { return fp_; }
|
||||
InlinedSite *inlined() const { return inlined_; }
|
||||
JSInlinedSite *inlined() const { return inlined_; }
|
||||
|
||||
/* For jit use (need constant): */
|
||||
static const size_t offsetOfFp = 3 * sizeof(void *);
|
||||
|
@ -1180,16 +1236,6 @@ class FrameRegs
|
|||
}
|
||||
void clearInlined() { inlined_ = NULL; }
|
||||
|
||||
unsigned stackDepth() const {
|
||||
JS_ASSERT(sp >= fp_->base());
|
||||
return sp - fp_->base();
|
||||
}
|
||||
|
||||
Value *spForStackDepth(unsigned depth) const {
|
||||
JS_ASSERT(fp_->script()->nfixed + depth <= fp_->script()->nslots);
|
||||
return fp_->base() + depth;
|
||||
}
|
||||
|
||||
/* For generator: */
|
||||
void rebaseFromTo(const FrameRegs &from, StackFrame &to) {
|
||||
fp_ = &to;
|
||||
|
@ -1436,10 +1482,6 @@ class StackSpace
|
|||
|
||||
StackSegment &findContainingSegment(const StackFrame *target) const;
|
||||
|
||||
bool containsFast(StackFrame *fp) {
|
||||
return (Value *)fp >= base_ && (Value *)fp <= trustedEnd_;
|
||||
}
|
||||
|
||||
public:
|
||||
StackSpace();
|
||||
bool init();
|
||||
|
@ -1492,7 +1534,7 @@ class StackSpace
|
|||
|
||||
/* Called during GC: mark segments, frames, and slots under firstUnused. */
|
||||
void mark(JSTracer *trc);
|
||||
void markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
|
||||
void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
|
||||
|
||||
/* Called during GC: sets active flag on compartments with active frames. */
|
||||
void markActiveCompartments();
|
||||
|
|
Загрузка…
Ссылка в новой задаче