Bug 659577 - Don't alias stack variables (r=bhackett)

--HG--
rename : js/src/jit-test/tests/basic/testBug659577.js => js/src/jit-test/tests/basic/testBug659577-1.js
This commit is contained in:
Luke Wagner 2012-02-23 13:59:10 -08:00
Родитель 73c4e60e88
Коммит e89e33891b
61 изменённых файлов: 1674 добавлений и 2352 удалений

Просмотреть файл

@ -843,7 +843,7 @@ EmitAliasedVarOp(JSContext *cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter *bc
maybeBlockIndex = bce->objectList.indexOf(bce->sc->blockChain);
bool decomposed = js_CodeSpec[op].format & JOF_DECOMPOSE;
unsigned n = 2 * sizeof(uint16_t) + sizeof(uint32_t) + sizeof(uint16_t) + (decomposed ? 1 : 0);
unsigned n = 2 * sizeof(uint16_t) + sizeof(uint32_t) + (decomposed ? 1 : 0);
JS_ASSERT(int(n) + 1 /* op */ == js_CodeSpec[op].length);
ptrdiff_t off = EmitN(cx, bce, op, n);
@ -853,11 +853,9 @@ EmitAliasedVarOp(JSContext *cx, JSOp op, ScopeCoordinate sc, BytecodeEmitter *bc
jsbytecode *pc = bce->code(off);
SET_UINT16(pc, sc.hops);
pc += sizeof(uint16_t);
SET_UINT16(pc, sc.binding);
SET_UINT16(pc, sc.slot);
pc += sizeof(uint16_t);
SET_UINT32_INDEX(pc, maybeBlockIndex);
pc += sizeof(uint32_t);
SET_UINT16(pc, sc.frameBinding);
return true;
}
@ -888,15 +886,13 @@ EmitAliasedVarOp(JSContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *bce)
if (JOF_OPTYPE(pn->getOp()) == JOF_QARG) {
JS_ASSERT(bce->sc->funIsHeavyweight());
sc.hops = ClonedBlockDepth(bce);
sc.binding = bce->sc->bindings.argToBinding(pn->pn_cookie.slot());
sc.frameBinding = sc.binding;
sc.slot = bce->sc->bindings.argToSlot(pn->pn_cookie.slot());
} else {
JS_ASSERT(JOF_OPTYPE(pn->getOp()) == JOF_LOCAL || pn->isKind(PNK_FUNCTION));
unsigned local = pn->pn_cookie.slot();
sc.frameBinding = bce->sc->bindings.localToBinding(local);
if (local < bce->sc->bindings.numVars()) {
sc.hops = ClonedBlockDepth(bce);
sc.binding = sc.frameBinding;
sc.slot = bce->sc->bindings.localToSlot(local);
} else {
unsigned depth = local - bce->sc->bindings.numVars();
unsigned hops = 0;
@ -907,7 +903,7 @@ EmitAliasedVarOp(JSContext *cx, JSOp op, ParseNode *pn, BytecodeEmitter *bce)
b = b->enclosingBlock();
}
sc.hops = hops;
sc.binding = depth - b->stackDepth();
sc.slot = depth - b->stackDepth();
}
}
@ -2637,12 +2633,11 @@ frontend::EmitFunctionScript(JSContext *cx, BytecodeEmitter *bce, ParseNode *bod
if (bce->sc->bindingsAccessedDynamically()) {
ScopeCoordinate sc;
sc.hops = 0;
sc.binding = bce->sc->bindings.localToBinding(bce->sc->argumentsLocalSlot());
sc.frameBinding = sc.binding;
sc.slot = bce->sc->bindings.localToSlot(bce->sc->argumentsLocal());
if (!EmitAliasedVarOp(cx, JSOP_SETALIASEDVAR, sc, bce))
return false;
} else {
if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, bce->sc->argumentsLocalSlot(), bce))
if (!EmitUnaliasedVarOp(cx, JSOP_SETLOCAL, bce->sc->argumentsLocal(), bce))
return false;
}
if (Emit1(cx, bce, JSOP_POP) < 0)

Просмотреть файл

@ -53,7 +53,8 @@ SharedContext::needStrictChecks() {
}
inline unsigned
SharedContext::argumentsLocalSlot() const {
SharedContext::argumentsLocal() const
{
PropertyName *arguments = context->runtime->atomState.argumentsAtom;
unsigned slot;
DebugOnly<BindingKind> kind = bindings.lookup(context, arguments, &slot);

Просмотреть файл

@ -187,7 +187,7 @@ struct SharedContext {
#undef INFUNC
unsigned argumentsLocalSlot() const;
unsigned argumentsLocal() const;
bool inFunction() const { return !!fun_; }

Просмотреть файл

@ -338,6 +338,7 @@ class EncapsulatedValue
bool isDouble() const { return value.isDouble(); }
bool isString() const { return value.isString(); }
bool isObject() const { return value.isObject(); }
bool isMagic() const { return value.isMagic(); }
bool isMagic(JSWhyMagic why) const { return value.isMagic(why); }
bool isGCThing() const { return value.isGCThing(); }
bool isMarkable() const { return value.isMarkable(); }
@ -463,6 +464,14 @@ Valueify(const EncapsulatedValue *array)
return (const Value *)array;
}
static inline HeapValue *
HeapValueify(Value *v)
{
JS_STATIC_ASSERT(sizeof(HeapValue) == sizeof(Value));
JS_STATIC_ASSERT(sizeof(HeapSlot) == sizeof(Value));
return (HeapValue *)v;
}
class HeapSlotArray
{
HeapSlot *array;

Просмотреть файл

@ -129,6 +129,12 @@ MarkValue(JSTracer *trc, EncapsulatedValue *v, const char *name);
void
MarkValueRange(JSTracer *trc, size_t len, EncapsulatedValue *vec, const char *name);
inline void
MarkValueRange(JSTracer *trc, HeapValue *begin, HeapValue *end, const char *name)
{
return MarkValueRange(trc, end - begin, begin, name);
}
void
MarkValueRoot(JSTracer *trc, Value *v, const char *name);

Просмотреть файл

@ -0,0 +1,11 @@
function f() {
let (x, y, z) {
eval('x = 1; y = 2; z = 3');
for (var i = 0; i < 10000; ++i) {
assertEq(x, 1);
assertEq(y, 2);
assertEq(z, 3);
}
}
}
f();

Просмотреть файл

@ -0,0 +1,15 @@
gczeal(4);
evaluate("\
Date.formatFunctions = {count:0};\
Date.prototype.dateFormat = function(format) {\
var funcName = 'format' + Date.formatFunctions.count++;\
var code = 'Date.prototype.' + funcName + ' = function(){return ';\
var ch = '';\
for (var i = 0; i < format.length; ++i) {\
ch = format.charAt(i);\
eval(code.substring(0, code.length - 3) + ';}');\
}\
};\
var date = new Date('1/1/2007 1:11:11');\
var shortFormat = date.dateFormat('Y-m-d');\
");

Просмотреть файл

@ -0,0 +1,6 @@
// |jit-test| error:TypeError
(function({
l
}) {
eval();
})()

Просмотреть файл

@ -0,0 +1,10 @@
function g(x,y) {
return x + y;
}
function f(x) {
return g.apply(null, arguments);
}
for (var i = 0; i < 100; ++i)
assertEq(f(i, 1), i+1);

Просмотреть файл

@ -0,0 +1,6 @@
// |jit-test| debug
try {
function f() {}
(1 for (x in []))
} catch (e) {}
gc()

Просмотреть файл

@ -0,0 +1,16 @@
var g = newGlobal('new-compartment');
var dbg = new Debugger(g);
var hits = 0;
dbg.onDebuggerStatement = function(frame) {
++hits;
frame.older.eval("escaped = function() { return y }");
}
g.escaped = undefined;
g.eval("function h() { debugger }");
g.eval("(function () { var y = 42; h(); yield })().next();");
assertEq(g.eval("escaped()"), 42);
gc();
assertEq(g.eval("escaped()"), 42);

Просмотреть файл

@ -3,7 +3,7 @@ setDebug(true);
x = "notset";
function main() {
/* The JSOP_STOP in main. */
a = { valueOf: function () { trap(main, 97, "success()"); } };
a = { valueOf: function () { trap(main, 95, "success()"); } };
b = "";
eval();
a + b;

Просмотреть файл

@ -225,7 +225,7 @@ MSG_DEF(JSMSG_BAD_XML_CHARACTER, 171, 0, JSEXN_SYNTAXERR, "illegal XML char
MSG_DEF(JSMSG_BAD_DEFAULT_XML_NAMESPACE,172,0,JSEXN_SYNTAXERR, "invalid default XML namespace")
MSG_DEF(JSMSG_BAD_XML_NAME_SYNTAX, 173, 0, JSEXN_SYNTAXERR, "invalid XML name")
MSG_DEF(JSMSG_BRACKET_AFTER_ATTR_EXPR,174, 0, JSEXN_SYNTAXERR, "missing ] after attribute expression")
MSG_DEF(JSMSG_NESTING_GENERATOR, 175, 1, JSEXN_TYPEERR, "already executing generator {0}")
MSG_DEF(JSMSG_NESTING_GENERATOR, 175, 0, JSEXN_TYPEERR, "already executing generator")
MSG_DEF(JSMSG_CURLY_IN_XML_EXPR, 176, 0, JSEXN_SYNTAXERR, "missing } in XML expression")
MSG_DEF(JSMSG_BAD_XML_NAMESPACE, 177, 1, JSEXN_TYPEERR, "invalid XML namespace {0}")
MSG_DEF(JSMSG_BAD_XML_ATTR_NAME, 178, 1, JSEXN_TYPEERR, "invalid XML attribute name {0}")

Просмотреть файл

@ -323,14 +323,6 @@ ScriptAnalysis::analyzeBytecode(JSContext *cx)
case JSOP_SETALIASEDVAR: {
JS_ASSERT(!isInlineable);
usesScopeChain_ = true;
/* XXX: this can be removed after bug 659577. */
ScopeCoordinate sc(pc);
if (script->bindings.bindingIsLocal(sc.frameBinding) &&
script->bindings.bindingToLocal(sc.frameBinding) >= script->nfixed)
{
localsAliasStack_ = true;
}
break;
}
@ -1925,15 +1917,15 @@ ScriptAnalysis::needsArgsObj(NeedsArgsObjState &state, SSAUseChain *use)
if (op == JSOP_POP || op == JSOP_POPN)
return false;
#ifdef JS_METHODJIT
/* SplatApplyArgs can read fp->canonicalActualArg(i) directly. */
if (state.canOptimizeApply && op == JSOP_FUNAPPLY && GET_ARGC(pc) == 2 && use->u.which == 0) {
#ifdef JS_METHODJIT
JS_ASSERT(mjit::IsLowerableFunCallOrApply(pc));
#endif
state.haveOptimizedApply = true;
state.canOptimizeApply = false;
return false;
}
#endif
/* arguments[i] can read fp->canonicalActualArg(i) directly. */
if (!state.haveOptimizedApply && op == JSOP_GETELEM && use->u.which == 1) {
@ -1973,8 +1965,11 @@ ScriptAnalysis::needsArgsObj(JSContext *cx)
* soundly perform this analysis in their presence. Also, debuggers may
* want to see 'arguments', so assume every arguments object escapes.
*/
if (script->bindingsAccessedDynamically || localsAliasStack() || cx->compartment->debugMode())
if (script->bindingsAccessedDynamically || script->numClosedArgs() > 0 ||
localsAliasStack() || cx->compartment->debugMode())
{
return true;
}
unsigned pcOff = script->argumentsBytecode() - script->code;

Просмотреть файл

@ -363,13 +363,14 @@ static inline uint32_t GetBytecodeSlot(JSScript *script, jsbytecode *pc)
case JSOP_CALLALIASEDVAR:
case JSOP_SETALIASEDVAR:
{
ScopeCoordinate sc = ScopeCoordinate(pc);
return script->bindings.bindingIsArg(sc.frameBinding)
? ArgSlot(script->bindings.bindingToArg(sc.frameBinding))
: LocalSlot(script, script->bindings.bindingToLocal(sc.frameBinding));
ScopeCoordinate sc(pc);
if (StaticBlockObject *block = ScopeCoordinateBlockChain(script, pc))
return LocalSlot(script, block->slotToFrameLocal(script, sc.slot));
if (script->bindings.slotIsArg(sc.slot))
return ArgSlot(script->bindings.slotToArg(sc.slot));
return LocalSlot(script, script->bindings.slotToLocal(sc.slot));
}
case JSOP_THIS:
return ThisSlot();

Просмотреть файл

@ -97,7 +97,7 @@ ExhaustiveTest(const char funcode[])
for (size_t i = 0; i <= ArgCount; i++) {
for (size_t j = 0; j <= ArgCount - i; j++) {
ClearElements(elems);
CHECK(argsobj.getElements(i, j, elems));
CHECK(argsobj.maybeGetElements(i, j, elems));
for (size_t k = 0; k < j; k++)
CHECK_SAME(elems[k], INT_TO_JSVAL(i + k));
for (size_t k = j; k < MAX_ELEMS - 1; k++)

Просмотреть файл

@ -399,7 +399,7 @@ GetElement(JSContext *cx, JSObject *obj, IndexType index, JSBool *hole, Value *v
return JS_TRUE;
}
if (obj->isArguments()) {
if (obj->asArguments().getElement(uint32_t(index), vp)) {
if (obj->asArguments().maybeGetElement(uint32_t(index), vp)) {
*hole = JS_FALSE;
return true;
}
@ -438,7 +438,7 @@ GetElements(JSContext *cx, HandleObject aobj, uint32_t length, Value *vp)
if (aobj->isArguments()) {
ArgumentsObject &argsobj = aobj->asArguments();
if (!argsobj.hasOverriddenLength()) {
if (argsobj.getElements(0, length, vp))
if (argsobj.maybeGetElements(0, length, vp))
return true;
}
}

Просмотреть файл

@ -994,6 +994,7 @@ JSContext::JSContext(JSRuntime *rt)
functionCallback(NULL),
#endif
enumerators(NULL),
innermostGenerator_(NULL),
#ifdef DEBUG
stackIterAssertionEnabled(true),
#endif
@ -1079,26 +1080,24 @@ JSContext::wrapPendingException()
setPendingException(v);
}
JSGenerator *
JSContext::generatorFor(StackFrame *fp) const
void
JSContext::enterGenerator(JSGenerator *gen)
{
JS_ASSERT(stack.containsSlow(fp));
JS_ASSERT(fp->isGeneratorFrame());
JS_ASSERT(!fp->isFloatingGenerator());
JS_ASSERT(!genStack.empty());
if (JS_LIKELY(fp == genStack.back()->liveFrame()))
return genStack.back();
/* General case; should only be needed for debug APIs. */
for (size_t i = 0; i < genStack.length(); ++i) {
if (genStack[i]->liveFrame() == fp)
return genStack[i];
}
JS_NOT_REACHED("no matching generator");
return NULL;
JS_ASSERT(!gen->prevGenerator);
gen->prevGenerator = innermostGenerator_;
innermostGenerator_ = gen;
}
void
JSContext::leaveGenerator(JSGenerator *gen)
{
JS_ASSERT(innermostGenerator_ == gen);
innermostGenerator_ = innermostGenerator_->prevGenerator;
gen->prevGenerator = NULL;
}
bool
JSContext::runningWithTrustedPrincipals() const
{

Просмотреть файл

@ -1242,29 +1242,12 @@ struct JSContext : js::ContextFriendFields
JSObject *enumerators;
private:
/*
* To go from a live generator frame (on the stack) to its generator object
* (see comment js_FloatingFrameIfGenerator), we maintain a stack of active
* generators, pushing and popping when entering and leaving generator
* frames, respectively.
*/
js::Vector<JSGenerator *, 2, js::SystemAllocPolicy> genStack;
/* Innermost-executing generator or null if no generator are executing. */
JSGenerator *innermostGenerator_;
public:
/* Return the generator object for the given generator frame. */
JSGenerator *generatorFor(js::StackFrame *fp) const;
/* Early OOM-check. */
inline bool ensureGeneratorStackSpace();
bool enterGenerator(JSGenerator *gen) {
return genStack.append(gen);
}
void leaveGenerator(JSGenerator *gen) {
JS_ASSERT(genStack.back() == gen);
genStack.popBack();
}
JSGenerator *innermostGenerator() const { return innermostGenerator_; }
void enterGenerator(JSGenerator *gen);
void leaveGenerator(JSGenerator *gen);
inline void* malloc_(size_t bytes) {
return runtime->malloc_(bytes, this);
@ -1296,9 +1279,6 @@ struct JSContext : js::ContextFriendFields
void purge();
/* For DEBUG. */
inline void assertValidStackDepth(unsigned depth);
bool isExceptionPending() {
return throwing;
}

Просмотреть файл

@ -541,14 +541,6 @@ JSContext::setCompileOptions(unsigned newcopts)
maybeOverrideVersion(newVersion);
}
inline void
JSContext::assertValidStackDepth(unsigned depth)
{
#ifdef DEBUG
JS_ASSERT(0 <= regs().sp - fp()->base());
JS_ASSERT(depth <= uintptr_t(regs().sp - fp()->base()));
#endif
}
inline js::LifoAlloc &
JSContext::typeLifoAlloc()
@ -556,15 +548,6 @@ JSContext::typeLifoAlloc()
return compartment->typeLifoAlloc;
}
inline bool
JSContext::ensureGeneratorStackSpace()
{
bool ok = genStack.reserve(genStack.length() + 1);
if (!ok)
js_ReportOutOfMemory(this);
return ok;
}
inline void
JSContext::setPendingException(js::Value v) {
JS_ASSERT(!IsPoisonedValue(v));

Просмотреть файл

@ -565,11 +565,8 @@ JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fpArg)
*/
while (o) {
ScopeObject &scope = o->asDebugScope().scope();
if (scope.isCall()) {
JS_ASSERT_IF(cx->compartment->debugMode() && fp->isNonEvalFunctionFrame(),
fp == scope.asCall().maybeStackFrame());
if (scope.isCall())
return o;
}
o = o->enclosingScope();
}
return NULL;
@ -805,10 +802,10 @@ GetPropertyDesc(JSContext *cx, JSObject *obj_, Shape *shape, JSPropertyDesc *pd)
| (!shape->writable() ? JSPD_READONLY : 0)
| (!shape->configurable() ? JSPD_PERMANENT : 0);
pd->spare = 0;
if (shape->getter() == CallObject::getArgOp) {
if (shape->setter() == CallObject::setArgOp) {
pd->slot = shape->shortid();
pd->flags |= JSPD_ARGUMENT;
} else if (shape->getter() == CallObject::getVarOp) {
} else if (shape->setter() == CallObject::setVarOp) {
pd->slot = shape->shortid();
pd->flags |= JSPD_VARIABLE;
} else {

Просмотреть файл

@ -127,7 +127,7 @@ fun_getProperty(JSContext *cx, HandleObject obj_, HandleId id, Value *vp)
* innermost function as uninlineable to expand its frame and allow us
* to recover its callee object.
*/
JSInlinedSite *inlined;
InlinedSite *inlined;
jsbytecode *prevpc = fp->prev()->pcQuadratic(cx->stack, fp, &inlined);
if (inlined) {
mjit::JITChunk *chunk = fp->prev()->jit()->chunk(prevpc);
@ -696,7 +696,7 @@ js_fun_apply(JSContext *cx, unsigned argc, Value *vp)
args.thisv() = vp[2];
/* Steps 7-8. */
cx->fp()->forEachCanonicalActualArg(CopyTo(args.array()));
cx->fp()->forEachUnaliasedActual(CopyTo(args.array()));
} else {
/* Step 3. */
if (!vp[3].isObject()) {

Просмотреть файл

@ -244,9 +244,6 @@ js_ValueToCallableObject(JSContext *cx, js::Value *vp, unsigned flags);
extern void
js_ReportIsNotFunction(JSContext *cx, const js::Value *vp, unsigned flags);
extern void
js_PutCallObject(js::StackFrame *fp, js::CallObject &callobj);
namespace js {
/*
@ -278,9 +275,6 @@ JSFunction::toExtended() const
return static_cast<const js::FunctionExtended *>(this);
}
extern void
js_PutArgsObject(js::StackFrame *fp);
inline bool
js_IsNamedLambda(JSFunction *fun) { return (fun->flags & JSFUN_LAMBDA) && fun->atom; }

Просмотреть файл

@ -3241,7 +3241,7 @@ SweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool *startBackgroundSweep)
/* Finalize unreachable (key,value) pairs in all weak maps. */
WeakMapBase::sweepAll(&rt->gcMarker);
rt->debugScopes->sweep();
rt->debugScopes->sweep(rt);
SweepAtomState(rt);

Просмотреть файл

@ -5192,8 +5192,8 @@ NestingPrologue(JSContext *cx, StackFrame *fp)
}
nesting->activeCall = &fp->callObj();
nesting->argArray = fp->formalArgs();
nesting->varArray = fp->slots();
nesting->argArray = Valueify(nesting->activeCall->argArray());
nesting->varArray = Valueify(nesting->activeCall->varArray());
}
/* Maintain stack frame count for the function. */

Просмотреть файл

@ -23,6 +23,9 @@ struct TypeInferenceSizes;
}
namespace js {
class CallObject;
namespace types {
/* Type set entry for either a JSObject with singleton type or a non-singleton TypeObject. */
@ -961,7 +964,7 @@ struct TypeScriptNesting
JSScript *next;
/* If this is an outer function, the most recent activation. */
JSObject *activeCall;
CallObject *activeCall;
/*
* If this is an outer function, pointers to the most recent activation's

Просмотреть файл

@ -223,6 +223,7 @@ js::RunScript(JSContext *cx, JSScript *script, StackFrame *fp)
JS_ASSERT(fp == cx->fp());
JS_ASSERT(fp->script() == script);
JS_ASSERT_IF(!fp->isGeneratorFrame(), cx->regs().pc == script->code);
JS_ASSERT_IF(fp->isEvalFrame(), script->isActiveEval);
#ifdef JS_METHODJIT_SPEW
JMCheckLogging();
#endif
@ -317,16 +318,11 @@ js::InvokeKernel(JSContext *cx, CallArgs args, MaybeConstruct construct)
if (!cx->stack.pushInvokeFrame(cx, args, initial, &ifg))
return false;
/* Now that the new frame is rooted, maybe create a call object. */
StackFrame *fp = ifg.fp();
if (!fp->functionPrologue(cx))
return false;
/* Run function until JSOP_STOP, JSOP_RETURN or error. */
JSBool ok = RunScript(cx, fun->script(), fp);
JSBool ok = RunScript(cx, fun->script(), ifg.fp());
/* Propagate the return value out. */
args.rval() = fp->returnValue();
args.rval() = ifg.fp()->returnValue();
JS_ASSERT_IF(ok && construct, !args.rval().isPrimitive());
return ok;
}
@ -453,29 +449,15 @@ js::ExecuteKernel(JSContext *cx, JSScript *script_, JSObject &scopeChain, const
if (!script->ensureRanAnalysis(cx, &scopeChain))
return false;
/* Give strict mode eval its own fresh lexical environment. */
StackFrame *fp = efg.fp();
if (fp->isStrictEvalFrame() && !CallObject::createForStrictEval(cx, fp))
return false;
TypeScript::SetThis(cx, script, efg.fp()->thisValue());
Probes::startExecution(cx, script);
TypeScript::SetThis(cx, script, fp->thisValue());
bool ok = RunScript(cx, script, fp);
if (fp->isStrictEvalFrame()) {
if (cx->compartment->debugMode())
cx->runtime->debugScopes->onPopStrictEvalScope(fp);
js_PutCallObject(fp, fp->callObj());
}
bool ok = RunScript(cx, script, efg.fp());
Probes::stopExecution(cx, script);
/* Propgate the return value out. */
if (result)
*result = fp->returnValue();
*result = efg.fp()->returnValue();
return ok;
}
@ -698,7 +680,7 @@ EnterWith(JSContext *cx, int stackIndex)
StackFrame *fp = cx->fp();
Value *sp = cx->regs().sp;
JS_ASSERT(stackIndex < 0);
JS_ASSERT(fp->base() <= sp + stackIndex);
JS_ASSERT(int(cx->regs().stackDepth()) + stackIndex >= 0);
RootedObject obj(cx);
if (sp[-1].isObject()) {
@ -706,17 +688,17 @@ EnterWith(JSContext *cx, int stackIndex)
} else {
obj = js_ValueToNonNullObject(cx, sp[-1]);
if (!obj)
return JS_FALSE;
return false;
sp[-1].setObject(*obj);
}
JSObject *withobj = WithObject::create(cx, obj, fp->scopeChain(),
sp + stackIndex - fp->base());
WithObject *withobj = WithObject::create(cx, obj, fp->scopeChain(),
cx->regs().stackDepth() + stackIndex);
if (!withobj)
return JS_FALSE;
return false;
fp->setScopeChain(*withobj);
return JS_TRUE;
fp->pushOnScopeChain(*withobj);
return true;
}
/* Unwind block and scope chains to match the given depth. */
@ -724,7 +706,7 @@ void
js::UnwindScope(JSContext *cx, uint32_t stackDepth)
{
StackFrame *fp = cx->fp();
JS_ASSERT(fp->base() + stackDepth <= cx->regs().sp);
JS_ASSERT(stackDepth <= cx->regs().stackDepth());
for (ScopeIter si(fp); !si.done(); si = si.enclosing()) {
switch (si.type()) {
@ -753,7 +735,7 @@ js::UnwindForUncatchableException(JSContext *cx, const FrameRegs &regs)
for (TryNoteIter tni(regs); !tni.done(); ++tni) {
JSTryNote *tn = *tni;
if (tn->kind == JSTRY_ITER) {
Value *sp = regs.fp()->base() + tn->stackDepth;
Value *sp = regs.spForStackDepth(tn->stackDepth);
UnwindIteratorForUncatchableException(cx, &sp[-1].toObject());
}
}
@ -813,7 +795,7 @@ TryNoteIter::settle()
* depth exceeding the current one and this condition is what we use to
* filter them out.
*/
if (tn->stackDepth <= regs.sp - regs.fp()->base())
if (tn->stackDepth <= regs.stackDepth())
break;
}
}
@ -850,36 +832,6 @@ DoIncDec(JSContext *cx, JSScript *script, jsbytecode *pc, const Value &v, Value
return true;
}
static inline void
CheckLocalAccess(StackFrame *fp, unsigned index, bool aliased = false)
{
#ifdef DEBUG
if (index < fp->numFixed()) {
JS_ASSERT(fp->script()->varIsAliased(index) == aliased);
} else {
unsigned depth = index - fp->numFixed();
for (StaticBlockObject *b = fp->maybeBlockChain(); b; b = b->enclosingBlock()) {
if (b->containsVarAtDepth(depth)) {
JS_ASSERT(b->isAliased(depth - b->stackDepth()) == aliased);
return;
}
}
/*
* Unfortunately, strange uses of JSOP_GETLOCAL (e.g., comprehensions
* and group assignment) access slots above script->nfixed and not in
* any block so we cannot use JS_NOT_REACHED here.
*/
}
#endif
}
static inline void
CheckArgAccess(StackFrame *fp, unsigned index)
{
JS_ASSERT(fp->script()->formalLivesInArgumentsObject(index) ==
fp->script()->argsObjAliasesFormals());
}
#define PUSH_COPY(v) do { *regs.sp++ = v; assertSameCompartment(cx, regs.sp[-1]); } while (0)
#define PUSH_COPY_SKIP_CHECK(v) *regs.sp++ = v
#define PUSH_NULL() regs.sp++->setNull()
@ -1222,7 +1174,6 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
#define RESTORE_INTERP_VARS() \
JS_BEGIN_MACRO \
SET_SCRIPT(regs.fp()->script()); \
argv = regs.fp()->maybeFormalArgs(); \
atoms = FrameAtomBase(cx, regs.fp()); \
JS_ASSERT(&cx->regs() == &regs); \
JS_END_MACRO
@ -1285,7 +1236,6 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
JSRuntime *const rt = cx->runtime;
Rooted<JSScript*> script(cx);
SET_SCRIPT(regs.fp()->script());
Value *argv = regs.fp()->maybeFormalArgs();
CHECK_INTERRUPT_HANDLER();
/*
@ -1320,8 +1270,8 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
#if JS_HAS_GENERATORS
if (JS_UNLIKELY(regs.fp()->isGeneratorFrame())) {
JS_ASSERT((size_t) (regs.pc - script->code) <= script->length);
JS_ASSERT((size_t) (regs.sp - regs.fp()->base()) <= StackDepth(script));
JS_ASSERT(size_t(regs.pc - script->code) <= script->length);
JS_ASSERT(regs.stackDepth() <= script->nslots);
/*
* To support generator_throw and to catch ignored exceptions,
@ -1338,8 +1288,7 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
/* Don't call the script prologue if executing between Method and Trace JIT. */
if (interpMode == JSINTERP_NORMAL) {
StackFrame *fp = regs.fp();
JS_ASSERT_IF(!fp->isGeneratorFrame(), regs.pc == script->code);
if (!ScriptPrologueOrGeneratorResume(cx, fp, UseNewTypeAtEntry(cx, fp)))
if (!fp->isGeneratorFrame() && !fp->prologue(cx, UseNewTypeAtEntry(cx, fp)))
goto error;
if (cx->compartment->debugMode()) {
JSTrapStatus status = ScriptDebugPrologue(cx, fp);
@ -1574,25 +1523,12 @@ BEGIN_CASE(JSOP_POP)
END_CASE(JSOP_POP)
BEGIN_CASE(JSOP_POPN)
{
JS_ASSERT(GET_UINT16(regs.pc) <= regs.stackDepth());
regs.sp -= GET_UINT16(regs.pc);
#ifdef DEBUG
JS_ASSERT(regs.fp()->base() <= regs.sp);
StaticBlockObject *block = regs.fp()->maybeBlockChain();
JS_ASSERT_IF(block,
block->stackDepth() + block->slotCount()
<= (size_t) (regs.sp - regs.fp()->base()));
for (JSObject *obj = regs.fp()->scopeChain(); obj; obj = obj->enclosingScope()) {
if (!obj->isBlock() || !obj->isWith())
continue;
if (obj->getPrivate() != js_FloatingFrameIfGenerator(cx, regs.fp()))
break;
JS_ASSERT(regs.fp()->base() + obj->asBlock().stackDepth()
+ (obj->isBlock() ? obj->asBlock().slotCount() : 1)
<= regs.sp);
}
if (StaticBlockObject *block = regs.fp()->maybeBlockChain())
JS_ASSERT(regs.stackDepth() >= block->stackDepth() + block->slotCount());
#endif
}
END_CASE(JSOP_POPN)
BEGIN_CASE(JSOP_SETRVAL)
@ -1639,14 +1575,13 @@ BEGIN_CASE(JSOP_STOP)
if (entryFrame != regs.fp())
inline_return:
{
AssertValidFunctionScopeChainAtExit(regs.fp());
if (cx->compartment->debugMode())
interpReturnOK = ScriptDebugEpilogue(cx, regs.fp(), interpReturnOK);
interpReturnOK = ScriptEpilogue(cx, regs.fp(), interpReturnOK);
if (!regs.fp()->isYielding())
regs.fp()->epilogue(cx);
/* The JIT inlines ScriptEpilogue. */
/* The JIT inlines the epilogue. */
#ifdef JS_METHODJIT
jit_return:
#endif
@ -1679,7 +1614,7 @@ BEGIN_CASE(JSOP_STOP)
regs.pc += JSOP_CALL_LENGTH;
goto error;
} else {
JS_ASSERT(regs.sp == regs.fp()->base());
JS_ASSERT(regs.stackDepth() == 0);
}
interpReturnOK = true;
goto exit;
@ -1796,7 +1731,7 @@ END_CASE(JSOP_IN)
BEGIN_CASE(JSOP_ITER)
{
JS_ASSERT(regs.sp > regs.fp()->base());
JS_ASSERT(regs.stackDepth() >= 1);
uint8_t flags = GET_UINT8(regs.pc);
if (!ValueToIterator(cx, flags, &regs.sp[-1]))
goto error;
@ -1807,7 +1742,7 @@ END_CASE(JSOP_ITER)
BEGIN_CASE(JSOP_MOREITER)
{
JS_ASSERT(regs.sp - 1 >= regs.fp()->base());
JS_ASSERT(regs.stackDepth() >= 1);
JS_ASSERT(regs.sp[-1].isObject());
PUSH_NULL();
bool cond;
@ -1820,8 +1755,8 @@ END_CASE(JSOP_MOREITER)
BEGIN_CASE(JSOP_ITERNEXT)
{
JS_ASSERT(regs.stackDepth() >= unsigned(GET_INT8(regs.pc)));
Value *itervp = regs.sp - GET_INT8(regs.pc);
JS_ASSERT(itervp >= regs.fp()->base());
JS_ASSERT(itervp->isObject());
PUSH_NULL();
if (!IteratorNext(cx, &itervp->toObject(), &regs.sp[-1]))
@ -1831,7 +1766,7 @@ END_CASE(JSOP_ITERNEXT)
BEGIN_CASE(JSOP_ENDITER)
{
JS_ASSERT(regs.sp - 1 >= regs.fp()->base());
JS_ASSERT(regs.stackDepth() >= 1);
bool ok = CloseIterator(cx, &regs.sp[-1].toObject());
regs.sp--;
if (!ok)
@ -1841,7 +1776,7 @@ END_CASE(JSOP_ENDITER)
BEGIN_CASE(JSOP_DUP)
{
JS_ASSERT(regs.sp > regs.fp()->base());
JS_ASSERT(regs.stackDepth() >= 1);
const Value &rref = regs.sp[-1];
PUSH_COPY(rref);
}
@ -1849,7 +1784,7 @@ END_CASE(JSOP_DUP)
BEGIN_CASE(JSOP_DUP2)
{
JS_ASSERT(regs.sp - 2 >= regs.fp()->base());
JS_ASSERT(regs.stackDepth() >= 2);
const Value &lref = regs.sp[-2];
const Value &rref = regs.sp[-1];
PUSH_COPY(lref);
@ -1859,7 +1794,7 @@ END_CASE(JSOP_DUP2)
BEGIN_CASE(JSOP_SWAP)
{
JS_ASSERT(regs.sp - 2 >= regs.fp()->base());
JS_ASSERT(regs.stackDepth() >= 2);
Value &lref = regs.sp[-2];
Value &rref = regs.sp[-1];
lref.swap(rref);
@ -1869,7 +1804,7 @@ END_CASE(JSOP_SWAP)
BEGIN_CASE(JSOP_PICK)
{
unsigned i = GET_UINT8(regs.pc);
JS_ASSERT(regs.sp - (i + 1) >= regs.fp()->base());
JS_ASSERT(regs.stackDepth() >= i + 1);
Value lval = regs.sp[-int(i + 1)];
memmove(regs.sp - (i + 1), regs.sp - i, sizeof(Value) * i);
regs.sp[-1] = lval;
@ -2355,10 +2290,17 @@ BEGIN_CASE(JSOP_INCARG)
BEGIN_CASE(JSOP_ARGINC)
{
unsigned i = GET_ARGNO(regs.pc);
CheckArgAccess(regs.fp(), i);
Value &arg = regs.fp()->formalArg(i);
if (!DoIncDec(cx, script, regs.pc, arg, &arg, &regs.sp[0]))
goto error;
if (script->argsObjAliasesFormals()) {
const Value &arg = regs.fp()->argsObj().arg(i);
Value v;
if (!DoIncDec(cx, script, regs.pc, arg, &v, &regs.sp[0]))
goto error;
regs.fp()->argsObj().setArg(i, v);
} else {
Value &arg = regs.fp()->unaliasedFormal(i);
if (!DoIncDec(cx, script, regs.pc, arg, &arg, &regs.sp[0]))
goto error;
}
regs.sp++;
}
END_CASE(JSOP_ARGINC);
@ -2369,8 +2311,7 @@ BEGIN_CASE(JSOP_INCLOCAL)
BEGIN_CASE(JSOP_LOCALINC)
{
unsigned i = GET_SLOTNO(regs.pc);
CheckLocalAccess(regs.fp(), i);
Value &local = regs.fp()->localSlot(i);
Value &local = regs.fp()->unaliasedLocal(i);
if (!DoIncDec(cx, script, regs.pc, local, &local, &regs.sp[0]))
goto error;
regs.sp++;
@ -2481,13 +2422,12 @@ BEGIN_CASE(JSOP_NEW)
BEGIN_CASE(JSOP_CALL)
BEGIN_CASE(JSOP_FUNCALL)
{
JS_ASSERT(regs.stackDepth() >= 2 + GET_ARGC(regs.pc));
CallArgs args = CallArgsFromSp(GET_ARGC(regs.pc), regs.sp);
JS_ASSERT(args.base() >= regs.fp()->base());
bool construct = (*regs.pc == JSOP_NEW);
RootedFunction &fun = rootFunction0;
/* Don't bother trying to fast-path calls to scripted non-constructors. */
if (!IsFunctionObject(args.calleev(), fun.address()) || !fun->isInterpretedConstructor()) {
if (construct) {
@ -2521,10 +2461,6 @@ BEGIN_CASE(JSOP_FUNCALL)
goto error;
RESTORE_INTERP_VARS();
if (!regs.fp()->functionPrologue(cx))
goto error;
RESET_USE_METHODJIT();
bool newType = cx->typeInferenceEnabled() && UseNewType(cx, script, regs.pc);
@ -2547,7 +2483,7 @@ BEGIN_CASE(JSOP_FUNCALL)
}
#endif
if (!ScriptPrologue(cx, regs.fp(), newType))
if (!regs.fp()->prologue(cx, newType))
goto error;
if (cx->compartment->debugMode()) {
@ -2804,7 +2740,7 @@ END_CASE(JSOP_ACTUALSFILLED)
BEGIN_CASE(JSOP_ARGUMENTS)
JS_ASSERT(!regs.fp()->fun()->hasRest());
if (script->needsArgsObj()) {
ArgumentsObject *obj = ArgumentsObject::create(cx, regs.fp());
ArgumentsObject *obj = ArgumentsObject::createExpected(cx, regs.fp());
if (!obj)
goto error;
PUSH_COPY(ObjectValue(*obj));
@ -2843,16 +2779,20 @@ BEGIN_CASE(JSOP_GETARG)
BEGIN_CASE(JSOP_CALLARG)
{
unsigned i = GET_ARGNO(regs.pc);
CheckArgAccess(regs.fp(), i);
PUSH_COPY(regs.fp()->formalArg(i));
if (script->argsObjAliasesFormals())
PUSH_COPY(regs.fp()->argsObj().arg(i));
else
PUSH_COPY(regs.fp()->unaliasedFormal(i));
}
END_CASE(JSOP_GETARG)
BEGIN_CASE(JSOP_SETARG)
{
unsigned i = GET_ARGNO(regs.pc);
CheckArgAccess(regs.fp(), i);
regs.fp()->formalArg(i) = regs.sp[-1];
if (script->argsObjAliasesFormals())
regs.fp()->argsObj().setArg(i, regs.sp[-1]);
else
regs.fp()->unaliasedFormal(i) = regs.sp[-1];
}
END_CASE(JSOP_SETARG)
@ -2860,8 +2800,7 @@ BEGIN_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_CALLLOCAL)
{
unsigned i = GET_SLOTNO(regs.pc);
CheckLocalAccess(regs.fp(), i);
PUSH_COPY_SKIP_CHECK(regs.fp()->localSlot(i));
PUSH_COPY_SKIP_CHECK(regs.fp()->unaliasedLocal(i));
/*
* Skip the same-compartment assertion if the local will be immediately
@ -2877,8 +2816,7 @@ END_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_SETLOCAL)
{
unsigned i = GET_SLOTNO(regs.pc);
CheckLocalAccess(regs.fp(), i);
regs.fp()->localSlot(i) = regs.sp[-1];
regs.fp()->unaliasedLocal(i) = regs.sp[-1];
}
END_CASE(JSOP_SETLOCAL)
@ -3025,7 +2963,7 @@ END_CASE(JSOP_LAMBDA)
BEGIN_CASE(JSOP_CALLEE)
JS_ASSERT(regs.fp()->isNonEvalFunctionFrame());
PUSH_COPY(argv[-2]);
PUSH_COPY(regs.fp()->calleev());
END_CASE(JSOP_CALLEE)
BEGIN_CASE(JSOP_GETTER)
@ -3057,7 +2995,7 @@ BEGIN_CASE(JSOP_SETTER)
case JSOP_INITPROP:
{
JS_ASSERT(regs.sp - regs.fp()->base() >= 2);
JS_ASSERT(regs.stackDepth() >= 2);
rval = regs.sp[-1];
i = -1;
PropertyName *name;
@ -3067,8 +3005,7 @@ BEGIN_CASE(JSOP_SETTER)
}
default:
JS_ASSERT(op2 == JSOP_INITELEM);
JS_ASSERT(regs.sp - regs.fp()->base() >= 3);
JS_ASSERT(regs.stackDepth() >= 3);
rval = regs.sp[-1];
id = JSID_VOID;
i = -2;
@ -3186,7 +3123,7 @@ END_CASE(JSOP_NEWOBJECT)
BEGIN_CASE(JSOP_ENDINIT)
{
/* FIXME remove JSOP_ENDINIT bug 588522 */
JS_ASSERT(regs.sp - regs.fp()->base() >= 1);
JS_ASSERT(regs.stackDepth() >= 1);
JS_ASSERT(regs.sp[-1].isObject());
}
END_CASE(JSOP_ENDINIT)
@ -3194,7 +3131,7 @@ END_CASE(JSOP_ENDINIT)
BEGIN_CASE(JSOP_INITPROP)
{
/* Load the property's initial value into rval. */
JS_ASSERT(regs.sp - regs.fp()->base() >= 2);
JS_ASSERT(regs.stackDepth() >= 2);
Value rval = regs.sp[-1];
/* Load the object being initialized into lval/obj. */
@ -3222,7 +3159,7 @@ END_CASE(JSOP_INITPROP);
BEGIN_CASE(JSOP_INITELEM)
{
/* Pop the element's value into rval. */
JS_ASSERT(regs.sp - regs.fp()->base() >= 3);
JS_ASSERT(regs.stackDepth() >= 3);
const Value &rref = regs.sp[-1];
RootedObject &obj = rootObject0;
@ -3708,24 +3645,17 @@ BEGIN_CASE(JSOP_ENTERLET1)
{
StaticBlockObject &blockObj = script->getObject(GET_UINT32_INDEX(regs.pc))->asStaticBlock();
if (op == JSOP_ENTERBLOCK) {
JS_ASSERT(regs.stackDepth() == blockObj.stackDepth());
JS_ASSERT(regs.stackDepth() + blockObj.slotCount() <= script->nslots);
Value *vp = regs.sp + blockObj.slotCount();
SetValueRangeToUndefined(regs.sp, vp);
regs.sp = vp;
}
/* Clone block iff there are any closed-over variables. */
if (!regs.fp()->pushBlock(cx, blockObj))
goto error;
if (op == JSOP_ENTERBLOCK) {
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() == regs.sp);
Value *vp = regs.sp + blockObj.slotCount();
JS_ASSERT(regs.sp < vp);
JS_ASSERT(vp <= regs.fp()->slots() + script->nslots);
SetValueRangeToUndefined(regs.sp, vp);
regs.sp = vp;
} else if (op == JSOP_ENTERLET0) {
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
== regs.sp);
} else if (op == JSOP_ENTERLET1) {
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
== regs.sp - 1);
}
}
END_CASE(JSOP_ENTERBLOCK)
@ -3740,12 +3670,12 @@ BEGIN_CASE(JSOP_LEAVEBLOCKEXPR)
if (op == JSOP_LEAVEBLOCK) {
/* Pop the block's slots. */
regs.sp -= GET_UINT16(regs.pc);
JS_ASSERT(regs.fp()->base() + blockDepth == regs.sp);
JS_ASSERT(regs.stackDepth() == blockDepth);
} else if (op == JSOP_LEAVEBLOCKEXPR) {
/* Pop the block's slots maintaining the topmost expr. */
Value *vp = &regs.sp[-1];
regs.sp -= GET_UINT16(regs.pc);
JS_ASSERT(regs.fp()->base() + blockDepth == regs.sp - 1);
JS_ASSERT(regs.stackDepth() == blockDepth + 1);
regs.sp[-1] = *vp;
} else {
/* Another op will pop; nothing to do here. */
@ -3759,11 +3689,13 @@ END_CASE(JSOP_LEAVEBLOCK)
BEGIN_CASE(JSOP_GENERATOR)
{
JS_ASSERT(!cx->isExceptionPending());
regs.fp()->initGeneratorFrame();
regs.pc += JSOP_GENERATOR_LENGTH;
JSObject *obj = js_NewGenerator(cx);
if (!obj)
goto error;
regs.fp()->setReturnValue(ObjectValue(*obj));
regs.fp()->setYielding();
interpReturnOK = true;
if (entryFrame != regs.fp())
goto inline_return;
@ -3773,9 +3705,9 @@ BEGIN_CASE(JSOP_GENERATOR)
BEGIN_CASE(JSOP_YIELD)
JS_ASSERT(!cx->isExceptionPending());
JS_ASSERT(regs.fp()->isNonEvalFunctionFrame());
if (cx->generatorFor(regs.fp())->state == JSGEN_CLOSING) {
js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD,
JSDVG_SEARCH_STACK, argv[-2], NULL);
if (cx->innermostGenerator()->state == JSGEN_CLOSING) {
js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD, JSDVG_SEARCH_STACK,
ObjectValue(regs.fp()->callee()), NULL);
goto error;
}
regs.fp()->setReturnValue(regs.sp[-1]);
@ -3789,9 +3721,8 @@ BEGIN_CASE(JSOP_ARRAYPUSH)
uint32_t slot = GET_UINT16(regs.pc);
JS_ASSERT(script->nfixed <= slot);
JS_ASSERT(slot < script->nslots);
CheckLocalAccess(regs.fp(), slot);
RootedObject &obj = rootObject0;
obj = &regs.fp()->slots()[slot].toObject();
obj = &regs.fp()->unaliasedLocal(slot).toObject();
if (!js_NewbornArrayPush(cx, obj, regs.sp[-1]))
goto error;
regs.sp--;
@ -3907,7 +3838,7 @@ END_CASE(JSOP_ARRAYPUSH)
* the for-in loop.
*/
regs.pc = (script)->main() + tn->start + tn->length;
regs.sp = regs.fp()->base() + tn->stackDepth;
regs.sp = regs.spForStackDepth(tn->stackDepth);
switch (tn->kind) {
case JSTRY_CATCH:
@ -3977,19 +3908,10 @@ END_CASE(JSOP_ARRAYPUSH)
exit:
if (cx->compartment->debugMode())
interpReturnOK = ScriptDebugEpilogue(cx, regs.fp(), interpReturnOK);
interpReturnOK = ScriptEpilogueOrGeneratorYield(cx, regs.fp(), interpReturnOK);
if (!regs.fp()->isYielding())
regs.fp()->epilogue(cx);
regs.fp()->setFinishedInInterpreter();
#ifdef DEBUG
JS_ASSERT(entryFrame == regs.fp());
if (regs.fp()->isFunctionFrame())
AssertValidFunctionScopeChainAtExit(regs.fp());
else if (regs.fp()->isEvalFrame())
AssertValidEvalFrameScopeChainAtExit(regs.fp());
else if (!regs.fp()->isGeneratorFrame())
JS_ASSERT(!regs.fp()->scopeChain()->isScope());
#endif
#ifdef JS_METHODJIT
/*
* This path is used when it's guaranteed the method can be finished

Просмотреть файл

@ -18,31 +18,6 @@
namespace js {
/*
* ScriptPrologue/ScriptEpilogue must be called in pairs. ScriptPrologue
* must be called before the script executes. ScriptEpilogue must be called
* after the script returns or exits via exception.
*/
inline bool
ScriptPrologue(JSContext *cx, StackFrame *fp, JSScript *script);
inline bool
ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok);
/*
* It is not valid to call ScriptPrologue when a generator is resumed or to
* call ScriptEpilogue when a generator yields. However, the debugger still
* needs LIFO notification of generator start/stop. This pair of functions does
* the right thing based on the state of 'fp'.
*/
inline bool
ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp);
inline bool
ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok);
/* Implemented in jsdbgapi: */
/*
@ -274,9 +249,6 @@ UnwindForUncatchableException(JSContext *cx, const FrameRegs &regs);
extern bool
OnUnknownMethod(JSContext *cx, HandleObject obj, Value idval, Value *vp);
inline void
AssertValidFunctionScopeChainAtExit(StackFrame *fp);
class TryNoteIter
{
const FrameRegs &regs;

Просмотреть файл

@ -421,70 +421,6 @@ DefVarOrConstOperation(JSContext *cx, HandleObject varobj, PropertyName *dn, uns
return true;
}
inline bool
FunctionNeedsPrologue(JSContext *cx, JSFunction *fun)
{
/* Heavyweight functions need call objects created. */
if (fun->isHeavyweight())
return true;
/* Outer and inner functions need to preserve nesting invariants. */
if (cx->typeInferenceEnabled() && fun->script()->nesting())
return true;
return false;
}
inline bool
ScriptPrologue(JSContext *cx, StackFrame *fp, bool newType)
{
JS_ASSERT_IF(fp->isNonEvalFunctionFrame() && fp->fun()->isHeavyweight(), fp->hasCallObj());
if (fp->isConstructing()) {
JSObject *obj = js_CreateThisForFunction(cx, RootedObject(cx, &fp->callee()), newType);
if (!obj)
return false;
fp->functionThis().setObject(*obj);
}
Probes::enterJSFun(cx, fp->maybeFun(), fp->script());
return true;
}
inline bool
ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok)
{
Probes::exitJSFun(cx, fp->maybeFun(), fp->script());
/*
* If inline-constructing, replace primitive rval with the new object
* passed in via |this|, and instrument this constructor invocation.
*/
if (fp->isConstructing() && ok) {
if (fp->returnValue().isPrimitive())
fp->setReturnValue(ObjectValue(fp->constructorThis()));
}
return ok;
}
inline bool
ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp, bool newType)
{
if (!fp->isGeneratorFrame())
return ScriptPrologue(cx, fp, newType);
return true;
}
inline bool
ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok)
{
if (!fp->isYielding())
return ScriptEpilogue(cx, fp, ok);
return ok;
}
inline void
InterpreterFrames::enableInterruptsIfRunning(JSScript *script)
{
@ -492,49 +428,6 @@ InterpreterFrames::enableInterruptsIfRunning(JSScript *script)
enabler.enableInterrupts();
}
inline void
AssertValidEvalFrameScopeChainAtExit(StackFrame *fp)
{
#ifdef DEBUG
JS_ASSERT(fp->isEvalFrame());
JS_ASSERT(!fp->hasBlockChain());
JSObject &scope = *fp->scopeChain();
if (fp->isStrictEvalFrame())
JS_ASSERT(scope.asCall().maybeStackFrame() == fp);
else if (fp->isDebuggerFrame())
JS_ASSERT(!scope.isScope());
else if (fp->isDirectEvalFrame())
JS_ASSERT(scope == *fp->prev()->scopeChain());
else
JS_ASSERT(scope.isGlobal());
#endif
}
inline void
AssertValidFunctionScopeChainAtExit(StackFrame *fp)
{
#ifdef DEBUG
JS_ASSERT(fp->isFunctionFrame());
if (fp->isGeneratorFrame() || fp->isYielding())
return;
if (fp->isEvalFrame()) {
AssertValidEvalFrameScopeChainAtExit(fp);
return;
}
JS_ASSERT(!fp->hasBlockChain());
JSObject &scope = *fp->scopeChain();
if (fp->fun()->isHeavyweight() && fp->hasCallObj())
JS_ASSERT(scope.asCall().maybeStackFrame() == fp);
else if (scope.isCall() || scope.isBlock())
JS_ASSERT(scope.asScope().maybeStackFrame() != fp);
#endif
}
static JS_ALWAYS_INLINE bool
AddOperation(JSContext *cx, const Value &lhs, const Value &rhs, Value *res)
{
@ -722,7 +615,7 @@ GetObjectElementOperation(JSContext *cx, JSOp op, HandleObject obj, const Value
break;
}
} else if (obj->isArguments()) {
if (obj->asArguments().getElement(index, res))
if (obj->asArguments().maybeGetElement(index, res))
break;
}
if (!obj->getElement(cx, index, res))
@ -888,7 +781,7 @@ GuardFunApplySpeculation(JSContext *cx, FrameRegs &regs)
if (!IsNativeFunction(args.calleev(), js_fun_apply)) {
if (!JSScript::applySpeculationFailed(cx, regs.fp()->script()))
return false;
args[1] = ObjectValue(regs.fp()->argsObj());
regs.sp[-1] = ObjectValue(regs.fp()->argsObj());
}
}
return true;

Просмотреть файл

@ -1324,32 +1324,23 @@ generator_finalize(FreeOp *fop, JSObject *obj)
JS_ASSERT(gen->state == JSGEN_NEWBORN ||
gen->state == JSGEN_CLOSED ||
gen->state == JSGEN_OPEN);
JS_POISON(gen->fp, JS_FREE_PATTERN, sizeof(StackFrame));
JS_POISON(gen, JS_FREE_PATTERN, sizeof(JSGenerator));
fop->free_(gen);
}
static void
MarkGenerator(JSTracer *trc, JSGenerator *gen)
{
StackFrame *fp = gen->floatingFrame();
/*
* MarkGenerator should only be called when regs is based on the floating frame.
* See calls to RebaseRegsFromTo.
*/
JS_ASSERT(size_t(gen->regs.sp - fp->slots()) <= fp->numSlots());
/*
* Currently, generators are not mjitted. Still, (overflow) args can be
* pushed by the mjit and need to be conservatively marked. Technically, the
* formal args and generator slots are safe for exact marking, but since the
* plan is to eventually mjit generators, it makes sense to future-proof
* this code and save someone an hour later.
*/
MarkValueRange(trc, (HeapValue *)fp->formalArgsEnd() - gen->floatingStack,
gen->floatingStack, "Generator Floating Args");
fp->mark(trc);
MarkValueRange(trc, gen->regs.sp - fp->slots(),
(HeapValue *)fp->slots(), "Generator Floating Stack");
MarkValueRange(trc,
HeapValueify(gen->fp->generatorArgsSnapshotBegin()),
HeapValueify(gen->fp->generatorArgsSnapshotEnd()),
"Generator Floating Args");
gen->fp->mark(trc);
MarkValueRange(trc,
HeapValueify(gen->fp->generatorSlotsSnapshotBegin()),
HeapValueify(gen->regs.sp),
"Generator Floating Stack");
}
static void
@ -1367,15 +1358,8 @@ generator_trace(JSTracer *trc, JSObject *obj)
if (!gen)
return;
/*
* Do not mark if the generator is running; the contents may be trash and
* will be replaced when the generator stops.
*/
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING)
return;
JS_ASSERT(gen->liveFrame() == gen->floatingFrame());
MarkGenerator(trc, gen);
if (gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN)
MarkGenerator(trc, gen);
}
Class js::GeneratorClass = {
@ -1415,9 +1399,8 @@ JSObject *
js_NewGenerator(JSContext *cx)
{
FrameRegs &stackRegs = cx->regs();
JS_ASSERT(stackRegs.stackDepth() == 0);
StackFrame *stackfp = stackRegs.fp();
JS_ASSERT(stackfp->base() == cx->regs().sp);
JS_ASSERT(stackfp->actualArgs() <= stackfp->formalArgs());
Rooted<GlobalObject*> global(cx, &stackfp->global());
JSObject *proto = global->getOrCreateGeneratorPrototype(cx);
@ -1428,15 +1411,15 @@ js_NewGenerator(JSContext *cx)
return NULL;
/* Load and compute stack slot counts. */
Value *stackvp = stackfp->actualArgs() - 2;
unsigned vplen = stackfp->formalArgsEnd() - stackvp;
Value *stackvp = stackfp->generatorArgsSnapshotBegin();
unsigned vplen = stackfp->generatorArgsSnapshotEnd() - stackvp;
/* Compute JSGenerator size. */
unsigned nbytes = sizeof(JSGenerator) +
(-1 + /* one Value included in JSGenerator */
vplen +
VALUES_PER_STACK_FRAME +
stackfp->numSlots()) * sizeof(HeapValue);
stackfp->script()->nslots) * sizeof(HeapValue);
JS_ASSERT(nbytes % sizeof(Value) == 0);
JS_STATIC_ASSERT(sizeof(StackFrame) % sizeof(HeapValue) == 0);
@ -1447,35 +1430,25 @@ js_NewGenerator(JSContext *cx)
SetValueRangeToUndefined((Value *)gen, nbytes / sizeof(Value));
/* Cut up floatingStack space. */
HeapValue *genvp = gen->floatingStack;
HeapValue *genvp = gen->stackSnapshot;
StackFrame *genfp = reinterpret_cast<StackFrame *>(genvp + vplen);
/* Initialize JSGenerator. */
gen->obj.init(obj);
gen->state = JSGEN_NEWBORN;
gen->enumerators = NULL;
gen->floating = genfp;
gen->fp = genfp;
gen->prevGenerator = NULL;
/* Copy from the stack to the generator's floating frame. */
gen->regs.rebaseFromTo(stackRegs, *genfp);
genfp->stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
genfp->copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
cx, genfp, genvp, stackfp, stackvp, stackRegs.sp);
genfp->initFloatingGenerator();
stackfp->setYielding(); /* XXX: to be removed */
obj->setPrivate(gen);
return obj;
}
JSGenerator *
js_FloatingFrameToGenerator(StackFrame *fp)
{
JS_ASSERT(fp->isGeneratorFrame() && fp->isFloatingGenerator());
char *floatingStackp = (char *)(fp->actualArgs() - 2);
char *p = floatingStackp - offsetof(JSGenerator, floatingStack);
return reinterpret_cast<JSGenerator *>(p);
}
typedef enum JSGeneratorOp {
JSGENOP_NEXT,
JSGENOP_SEND,
@ -1492,16 +1465,10 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
JSGenerator *gen, const Value &arg)
{
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING) {
js_ReportValueError(cx, JSMSG_NESTING_GENERATOR,
JSDVG_SEARCH_STACK, ObjectOrNullValue(obj),
JS_GetFunctionId(gen->floatingFrame()->fun()));
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_NESTING_GENERATOR);
return JS_FALSE;
}
/* Check for OOM errors here, where we can fail easily. */
if (!cx->ensureGeneratorStackSpace())
return JS_FALSE;
/*
* Write barrier is needed since the generator stack can be updated,
* and it's not barriered in any other way. We need to do it before
@ -1541,8 +1508,6 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
break;
}
StackFrame *genfp = gen->floatingFrame();
JSBool ok;
{
GeneratorFrameGuard gfg;
@ -1553,7 +1518,6 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
StackFrame *fp = gfg.fp();
gen->regs = cx->regs();
JS_ASSERT(gen->liveFrame() == fp);
cx->enterGenerator(gen); /* OOM check above. */
JSObject *enumerators = cx->enumerators;
@ -1566,18 +1530,18 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
cx->leaveGenerator(gen);
}
if (gen->floatingFrame()->isYielding()) {
if (gen->fp->isYielding()) {
/* Yield cannot fail, throw or be called on closing. */
JS_ASSERT(ok);
JS_ASSERT(!cx->isExceptionPending());
JS_ASSERT(gen->state == JSGEN_RUNNING);
JS_ASSERT(op != JSGENOP_CLOSE);
genfp->clearYielding();
gen->fp->clearYielding();
gen->state = JSGEN_OPEN;
return JS_TRUE;
}
genfp->clearReturnValue();
gen->fp->clearReturnValue();
gen->state = JSGEN_CLOSED;
if (ok) {
/* Returned, explicitly or by falling off the end. */
@ -1669,7 +1633,7 @@ generator_op(JSContext *cx, Native native, JSGeneratorOp op, Value *vp, unsigned
if (!SendToGenerator(cx, op, obj, gen, undef ? args[0] : UndefinedValue()))
return false;
args.rval() = gen->floatingFrame()->returnValue();
args.rval() = gen->fp->returnValue();
return true;
}

Просмотреть файл

@ -262,65 +262,28 @@ ForOf(JSContext *cx, const Value &iterable, Op op)
/*
* Generator state codes.
*/
typedef enum JSGeneratorState {
enum JSGeneratorState
{
JSGEN_NEWBORN, /* not yet started */
JSGEN_OPEN, /* started by a .next() or .send(undefined) call */
JSGEN_RUNNING, /* currently executing via .next(), etc., call */
JSGEN_CLOSING, /* close method is doing asynchronous return */
JSGEN_CLOSED /* closed, cannot be started or closed again */
} JSGeneratorState;
};
struct JSGenerator {
struct JSGenerator
{
js::HeapPtrObject obj;
JSGeneratorState state;
js::FrameRegs regs;
JSObject *enumerators;
js::StackFrame *floating;
js::HeapValue floatingStack[1];
js::StackFrame *floatingFrame() {
return floating;
}
js::StackFrame *liveFrame() {
JS_ASSERT((state == JSGEN_RUNNING || state == JSGEN_CLOSING) ==
(regs.fp() != floatingFrame()));
return regs.fp();
}
JSGenerator *prevGenerator;
js::StackFrame *fp;
js::HeapValue stackSnapshot[1];
};
extern JSObject *
js_NewGenerator(JSContext *cx);
/*
* Generator stack frames do not have stable pointers since they get copied to
* and from the generator object and the stack (see SendToGenerator). This is a
* problem for Block and With objects, which need to store a pointer to the
* enclosing stack frame. The solution is for Block and With objects to store
* a pointer to the "floating" stack frame stored in the generator object,
* since it is stable, and maintain, in the generator object, a pointer to the
* "live" stack frame (either a copy on the stack or the floating frame). Thus,
* Block and With objects must "normalize" to and from the floating/live frames
* in the case of generators using the following functions.
*/
inline js::StackFrame *
js_FloatingFrameIfGenerator(JSContext *cx, js::StackFrame *fp)
{
if (JS_UNLIKELY(fp->isGeneratorFrame()))
return cx->generatorFor(fp)->floatingFrame();
return fp;
}
/* Given a floating frame, given the JSGenerator containing it. */
extern JSGenerator *
js_FloatingFrameToGenerator(js::StackFrame *fp);
inline js::StackFrame *
js_LiveFrameIfGenerator(js::StackFrame *fp)
{
return fp->isGeneratorFrame() ? js_FloatingFrameToGenerator(fp)->liveFrame() : fp;
}
#endif
extern JSObject *

Просмотреть файл

@ -2849,6 +2849,7 @@ js::NewObjectWithType(JSContext *cx, HandleTypeObject type, JSObject *parent, gc
JS_ASSERT(type->proto->hasNewType(type));
JS_ASSERT(parent);
JS_ASSERT(kind <= gc::FINALIZE_OBJECT_LAST);
if (CanBeFinalizedInBackground(kind, &ObjectClass))
kind = GetBackgroundAllocKind(kind);
@ -3871,14 +3872,6 @@ JSObject::growSlots(JSContext *cx, uint32_t oldCount, uint32_t newCount)
JS_ASSERT(newCount >= SLOT_CAPACITY_MIN);
JS_ASSERT(!isDenseArray());
/*
* Slots are only allocated for call objects when new properties are
* added to them, which can only happen while the call is still on the
* stack (and an eval, DEFFUN, etc. happens). We thus do not need to
* worry about updating any active outer function args/vars.
*/
JS_ASSERT_IF(isCall(), asCall().maybeStackFrame() != NULL);
/*
* Slot capacities are determined by the span of allocated objects. Due to
* the limited number of bits to store shape slots, object growth is
@ -6245,15 +6238,9 @@ js_DumpStackFrame(JSContext *cx, StackFrame *start)
}
}
if (fp->hasArgs()) {
fprintf(stderr, " actuals: %p (%u) ", (void *) fp->actualArgs(), (unsigned) fp->numActualArgs());
fprintf(stderr, " formals: %p (%u)\n", (void *) fp->formalArgs(), (unsigned) fp->numFormalArgs());
fprintf(stderr, " actuals: %p (%u) ", (void *) fp->actuals(), (unsigned) fp->numActualArgs());
fprintf(stderr, " formals: %p (%u)\n", (void *) fp->formals(), (unsigned) fp->numFormalArgs());
}
if (fp->hasCallObj()) {
fprintf(stderr, " has call obj: ");
dumpValue(ObjectValue(fp->callObj()));
fprintf(stderr, "\n");
}
MaybeDumpObject("argsobj", fp->maybeArgsObj());
MaybeDumpObject("blockChain", fp->maybeBlockChain());
if (!fp->isDummyFrame()) {
MaybeDumpValue("this", fp->thisValue());

Просмотреть файл

@ -514,12 +514,12 @@ js_Disassemble1(JSContext *cx, JSScript *script, jsbytecode *pc,
}
case JOF_SCOPECOORD: {
Value v = StringValue(ScopeCoordinateName(script, pc));
Value v = StringValue(ScopeCoordinateName(cx->runtime, script, pc));
JSAutoByteString bytes;
if (!ToDisassemblySource(cx, v, &bytes))
return 0;
ScopeCoordinate sc(pc);
Sprint(sp, " %s (hops = %u, slot = %u)", bytes.ptr(), sc.hops, sc.binding);
Sprint(sp, " %s (hops = %u, slot = %u)", bytes.ptr(), sc.hops, sc.slot);
break;
}
@ -1409,6 +1409,12 @@ AddParenSlop(SprintStack *ss)
ss->sprinter.reserveAndClear(PAREN_SLOP);
}
static unsigned
StackDepth(JSScript *script)
{
return script->nslots - script->nfixed;
}
static JSBool
PushOff(SprintStack *ss, ptrdiff_t off, JSOp op, jsbytecode *pc = NULL)
{
@ -1844,7 +1850,7 @@ static bool
IsVarSlot(JSPrinter *jp, jsbytecode *pc, JSAtom **varAtom, int *localSlot)
{
if (JOF_OPTYPE(*pc) == JOF_SCOPECOORD) {
*varAtom = ScopeCoordinateName(jp->script, pc);
*varAtom = ScopeCoordinateName(jp->sprinter.context->runtime, jp->script, pc);
LOCAL_ASSERT_RV(*varAtom, NULL);
return true;
}
@ -5725,7 +5731,7 @@ js_DecompileValueGenerator(JSContext *cx, int spindex, jsval v,
* calculated value matching v under assumption that it is
* it that caused exception, see bug 328664.
*/
Value *stackBase = fp->base();
Value *stackBase = cx->regs().spForStackDepth(0);
Value *sp = cx->regs().sp;
do {
if (sp == stackBase) {

Просмотреть файл

@ -341,18 +341,14 @@ OPDEF(JSOP_FINALLY, 135,"finally", NULL, 1, 0, 2, 0, JOF_BYTE)
* 'slot' does not include RESERVED_SLOTS).
* uint32 block: the index (into the script object table) of the block chain
* at the point of the variable access.
*
* XXX: there is also a temporary 2-byte index (indicating the frame slot
* aliased by the scope chain) which will be removed with the last patch of bug
* 659577.
*/
OPDEF(JSOP_GETALIASEDVAR, 136,"getaliasedvar",NULL, 11, 0, 1, 19, JOF_SCOPECOORD|JOF_NAME)
OPDEF(JSOP_CALLALIASEDVAR,137,"callaliasedvar",NULL, 11, 0, 1, 19, JOF_SCOPECOORD|JOF_NAME)
OPDEF(JSOP_SETALIASEDVAR, 138,"setaliasedvar",NULL, 11, 1, 1, 3, JOF_SCOPECOORD|JOF_NAME|JOF_SET|JOF_DETECTING)
OPDEF(JSOP_INCALIASEDVAR, 139,"incaliasedvar",NULL, 12, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_INC|JOF_TMPSLOT3|JOF_DECOMPOSE)
OPDEF(JSOP_DECALIASEDVAR, 140,"decaliasedvar",NULL, 12, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_DEC|JOF_TMPSLOT3|JOF_DECOMPOSE)
OPDEF(JSOP_ALIASEDVARINC, 141,"aliasedvarinc",NULL, 12, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_INC|JOF_POST|JOF_TMPSLOT3|JOF_DECOMPOSE)
OPDEF(JSOP_ALIASEDVARDEC, 142,"aliasedvardec",NULL, 12, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_DEC|JOF_POST|JOF_TMPSLOT3|JOF_DECOMPOSE)
OPDEF(JSOP_GETALIASEDVAR, 136,"getaliasedvar",NULL, 9, 0, 1, 19, JOF_SCOPECOORD|JOF_NAME)
OPDEF(JSOP_CALLALIASEDVAR,137,"callaliasedvar",NULL, 9, 0, 1, 19, JOF_SCOPECOORD|JOF_NAME)
OPDEF(JSOP_SETALIASEDVAR, 138,"setaliasedvar",NULL, 9, 1, 1, 3, JOF_SCOPECOORD|JOF_NAME|JOF_SET|JOF_DETECTING)
OPDEF(JSOP_INCALIASEDVAR, 139,"incaliasedvar",NULL, 10, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_INC|JOF_TMPSLOT3|JOF_DECOMPOSE)
OPDEF(JSOP_DECALIASEDVAR, 140,"decaliasedvar",NULL, 10, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_DEC|JOF_TMPSLOT3|JOF_DECOMPOSE)
OPDEF(JSOP_ALIASEDVARINC, 141,"aliasedvarinc",NULL, 10, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_INC|JOF_POST|JOF_TMPSLOT3|JOF_DECOMPOSE)
OPDEF(JSOP_ALIASEDVARDEC, 142,"aliasedvardec",NULL, 10, 0, 1, 15, JOF_SCOPECOORD|JOF_NAME|JOF_DEC|JOF_POST|JOF_TMPSLOT3|JOF_DECOMPOSE)
/* Unused. */
OPDEF(JSOP_UNUSED8, 143,"unused8", NULL, 1, 0, 0, 0, JOF_BYTE)

Просмотреть файл

@ -289,7 +289,7 @@ Shape::getChildBinding(JSContext *cx, const StackShape &child)
* the fixed slot count here, which will feed into call objects created
* off of the bindings.
*/
uint32_t slots = child.slotSpan() + 1; /* Add one for private data. */
uint32_t slots = child.slotSpan();
gc::AllocKind kind = gc::GetGCObjectKind(slots);
/*
@ -300,11 +300,11 @@ Shape::getChildBinding(JSContext *cx, const StackShape &child)
*/
uint32_t nfixed = gc::GetGCKindSlots(kind);
if (nfixed < slots) {
nfixed = CallObject::RESERVED_SLOTS + 1;
JS_ASSERT(gc::GetGCKindSlots(gc::GetGCObjectKind(nfixed)) == CallObject::RESERVED_SLOTS + 1);
nfixed = CallObject::RESERVED_SLOTS;
JS_ASSERT(gc::GetGCKindSlots(gc::GetGCObjectKind(nfixed)) == CallObject::RESERVED_SLOTS);
}
shape->setNumFixedSlots(nfixed - 1);
shape->setNumFixedSlots(nfixed);
}
return shape;
}

Просмотреть файл

@ -67,7 +67,7 @@ Bindings::lookup(JSContext *cx, JSAtom *name, unsigned *indexp) const
if (indexp)
*indexp = shape->shortid();
if (shape->getter() == CallObject::getArgOp)
if (shape->setter() == CallObject::setArgOp)
return ARGUMENT;
return shape->writable() ? VARIABLE : CONSTANT;
@ -102,14 +102,14 @@ Bindings::add(JSContext *cx, HandleAtom name, BindingKind kind)
if (kind == ARGUMENT) {
JS_ASSERT(nvars == 0);
indexp = &nargs;
getter = CallObject::getArgOp;
getter = NULL;
setter = CallObject::setArgOp;
slot += nargs;
} else {
JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
indexp = &nvars;
getter = CallObject::getVarOp;
getter = NULL;
setter = CallObject::setVarOp;
if (kind == CONSTANT)
attrs |= JSPROP_READONLY;
@ -208,7 +208,7 @@ Bindings::getLocalNameArray(JSContext *cx, BindingNames *namesp)
const Shape &shape = r.front();
unsigned index = uint16_t(shape.shortid());
if (shape.getter() == CallObject::getArgOp) {
if (shape.setter() == CallObject::setArgOp) {
JS_ASSERT(index < nargs);
names[index].kind = ARGUMENT;
} else {
@ -221,7 +221,7 @@ Bindings::getLocalNameArray(JSContext *cx, BindingNames *namesp)
names[index].maybeAtom = JSID_TO_ATOM(shape.propid());
} else {
JS_ASSERT(JSID_IS_INT(shape.propid()));
JS_ASSERT(shape.getter() == CallObject::getArgOp);
JS_ASSERT(shape.setter() == CallObject::setArgOp);
names[index].maybeAtom = NULL;
}
}
@ -241,7 +241,7 @@ Bindings::lastArgument() const
const js::Shape *shape = lastVariable();
if (nvars > 0) {
while (shape->previous() && shape->getter() != CallObject::getArgOp)
while (shape->previous() && shape->setter() != CallObject::setArgOp)
shape = shape->previous();
}
return shape;
@ -604,10 +604,10 @@ js::XDRScript(XDRState<mode> *xdr, JSScript **scriptp, JSScript *parentScript)
script->bindingsAccessedDynamically = true;
if (scriptBits & (1 << ArgumentsHasLocalBinding)) {
PropertyName *arguments = cx->runtime->atomState.argumentsAtom;
unsigned slot;
DebugOnly<BindingKind> kind = script->bindings.lookup(cx, arguments, &slot);
unsigned local;
DebugOnly<BindingKind> kind = script->bindings.lookup(cx, arguments, &local);
JS_ASSERT(kind == VARIABLE || kind == CONSTANT);
script->setArgumentsHasLocalBinding(slot);
script->setArgumentsHasLocalBinding(local);
}
if (scriptBits & (1 << NeedsArgsObj))
script->setNeedsArgsObj(true);
@ -1311,9 +1311,9 @@ JSScript::NewScriptFromEmitter(JSContext *cx, BytecodeEmitter *bce)
if (bce->sc->inFunction()) {
if (bce->sc->funArgumentsHasLocalBinding()) {
// This must precede the script->bindings.transfer() call below.
script->setArgumentsHasLocalBinding(bce->sc->argumentsLocalSlot());
if (bce->sc->funDefinitelyNeedsArgsObj())
// This must precede the script->bindings.transfer() call below
script->setArgumentsHasLocalBinding(bce->sc->argumentsLocal());
if (bce->sc->funDefinitelyNeedsArgsObj())
script->setNeedsArgsObj(true);
} else {
JS_ASSERT(!bce->sc->funDefinitelyNeedsArgsObj());
@ -1799,7 +1799,7 @@ js::CloneScript(JSContext *cx, JSScript *src)
dst->nslots = src->nslots;
dst->staticLevel = src->staticLevel;
if (src->argumentsHasLocalBinding()) {
dst->setArgumentsHasLocalBinding(src->argumentsLocalSlot());
dst->setArgumentsHasLocalBinding(src->argumentsLocal());
if (src->analyzedArgsUsage())
dst->setNeedsArgsObj(src->needsArgsObj());
}
@ -2127,10 +2127,10 @@ JSScript::markChildren(JSTracer *trc)
}
void
JSScript::setArgumentsHasLocalBinding(uint16_t slot)
JSScript::setArgumentsHasLocalBinding(uint16_t local)
{
argsHasLocalBinding_ = true;
argsSlot_ = slot;
argsLocal_ = local;
needsArgsAnalysis_ = true;
}
@ -2162,7 +2162,7 @@ JSScript::applySpeculationFailed(JSContext *cx, JSScript *script_)
script->needsArgsObj_ = true;
const unsigned slot = script->argumentsLocalSlot();
const unsigned local = script->argumentsLocal();
/*
* By design, the apply-arguments optimization is only made when there
@ -2179,22 +2179,20 @@ JSScript::applySpeculationFailed(JSContext *cx, JSScript *script_)
for (AllFramesIter i(cx->stack.space()); !i.done(); ++i) {
StackFrame *fp = i.fp();
if (fp->isFunctionFrame() && fp->script() == script) {
if (!fp->hasArgsObj()) {
ArgumentsObject *obj = ArgumentsObject::create(cx, fp);
if (!obj) {
/*
* We can't leave stack frames where script->needsArgsObj
* and !fp->hasArgsObj. It is, however, safe to leave frames
* where fp->hasArgsObj and !fp->script->needsArgsObj.
*/
script->needsArgsObj_ = false;
return false;
}
/* Note: 'arguments' may have already been overwritten. */
if (fp->localSlot(slot).isMagic(JS_OPTIMIZED_ARGUMENTS))
fp->localSlot(slot) = ObjectValue(*obj);
ArgumentsObject *argsobj = ArgumentsObject::createExpected(cx, fp);
if (!argsobj) {
/*
* We can't leave stack frames with script->needsArgsObj but no
* arguments object. It is, however, safe to leave frames with
* an arguments object but !script->needsArgsObj.
*/
script->needsArgsObj_ = false;
return false;
}
/* Note: 'arguments' may have already been overwritten. */
if (fp->unaliasedLocal(local).isMagic(JS_OPTIMIZED_ARGUMENTS))
fp->unaliasedLocal(local) = ObjectValue(*argsobj);
}
}

Просмотреть файл

@ -114,12 +114,12 @@ class Bindings
* These functions map between argument/var indices [0, nargs/nvars) and
* and Bindings indices [0, nargs + nvars).
*/
bool bindingIsArg(uint16_t i) const { return i < nargs; }
bool bindingIsLocal(uint16_t i) const { return i >= nargs; }
uint16_t argToBinding(uint16_t i) { JS_ASSERT(i < nargs); return i; }
uint16_t localToBinding(uint16_t i) { return i + nargs; }
uint16_t bindingToArg(uint16_t i) { JS_ASSERT(bindingIsArg(i)); return i; }
uint16_t bindingToLocal(uint16_t i) { JS_ASSERT(bindingIsLocal(i)); return i - nargs; }
bool slotIsArg(uint16_t i) const { return i < nargs; }
bool slotIsLocal(uint16_t i) const { return i >= nargs; }
uint16_t argToSlot(uint16_t i) { JS_ASSERT(i < nargs); return i; }
uint16_t localToSlot(uint16_t i) { return i + nargs; }
uint16_t slotToArg(uint16_t i) { JS_ASSERT(slotIsArg(i)); return i; }
uint16_t slotToLocal(uint16_t i) { JS_ASSERT(slotIsLocal(i)); return i - nargs; }
/* Ensure these bindings have a shape lineage. */
inline bool ensureShape(JSContext *cx);
@ -493,7 +493,7 @@ struct JSScript : public js::gc::Cell
uint16_t staticLevel;/* static level for display maintenance */
private:
uint16_t argsSlot_; /* slot holding 'arguments' (if argumentsHasLocalBindings) */
uint16_t argsLocal_; /* local holding 'arguments' (if argumentsHasLocalBindings) */
// 8-bit fields.
@ -588,8 +588,8 @@ struct JSScript : public js::gc::Cell
/* See ContextFlags::funArgumentsHasLocalBinding comment. */
bool argumentsHasLocalBinding() const { return argsHasLocalBinding_; }
jsbytecode *argumentsBytecode() const { JS_ASSERT(code[0] == JSOP_ARGUMENTS); return code; }
unsigned argumentsLocalSlot() const { JS_ASSERT(argsHasLocalBinding_); return argsSlot_; }
void setArgumentsHasLocalBinding(uint16_t slot);
unsigned argumentsLocal() const { JS_ASSERT(argsHasLocalBinding_); return argsLocal_; }
void setArgumentsHasLocalBinding(uint16_t local);
/*
* As an optimization, even when argsHasLocalBinding, the function prologue
@ -943,12 +943,6 @@ JS_STATIC_ASSERT(sizeof(JSScript::ArrayBitsT) * 8 >= JSScript::LIMIT);
/* If this fails, add/remove padding within JSScript. */
JS_STATIC_ASSERT(sizeof(JSScript) % js::gc::Cell::CellSize == 0);
static JS_INLINE unsigned
StackDepth(JSScript *script)
{
return script->nslots - script->nfixed;
}
/*
* New-script-hook calling is factored from NewScriptFromEmitter so that it
* and callers of XDRScript can share this code. In the case of callers

Просмотреть файл

@ -61,8 +61,8 @@ Shape *
Bindings::initialShape(JSContext *cx) const
{
/* Get an allocation kind to match an empty call object. */
gc::AllocKind kind = gc::FINALIZE_OBJECT4;
JS_ASSERT(gc::GetGCKindSlots(kind) == CallObject::RESERVED_SLOTS + 1);
gc::AllocKind kind = gc::FINALIZE_OBJECT2_BACKGROUND;
JS_ASSERT(gc::GetGCKindSlots(kind) == CallObject::RESERVED_SLOTS);
return EmptyShape::getInitialShape(cx, &CallClass, NULL, NULL, kind,
BaseShape::VAROBJ);

Просмотреть файл

@ -2215,17 +2215,13 @@ LambdaIsGetElem(JSObject &lambda, JSContext *cx)
* real name lookup since this can trigger observable effects.
*/
Value b;
JSObject *scope = cx->stack.currentScriptedScopeChain();
RootedObject scope(cx);
scope = cx->stack.currentScriptedScopeChain();
while (true) {
if (scope->isCall()) {
if (scope->asCall().containsVarOrArg(bname, &b, cx))
break;
} else if (scope->isBlock()) {
if (scope->asClonedBlock().containsVar(bname, &b, cx))
break;
} else {
if (!scope->isCall() && !scope->isBlock())
return NULL;
}
if (HasDataProperty(cx, scope, bname, &b))
break;
scope = &scope->asScope().enclosingScope();
}

Просмотреть файл

@ -215,10 +215,11 @@ typedef enum JSWhyMagic
JS_ARG_POISON, /* used in debug builds to catch tracing errors */
JS_SERIALIZE_NO_NODE, /* an empty subnode in the AST serializer */
JS_LAZY_ARGUMENTS, /* lazy arguments value on the stack */
JS_UNASSIGNED_ARGUMENTS, /* the initial value of callobj.arguments */
JS_OPTIMIZED_ARGUMENTS, /* optimized-away 'arguments' value */
JS_IS_CONSTRUCTING, /* magic value passed to natives to indicate construction */
JS_OVERWRITTEN_CALLEE, /* arguments.callee has been overwritten */
JS_FORWARD_TO_CALL_OBJECT, /* args object element stored in call object */
JS_BLOCK_NEEDS_CLONE, /* value of static block object slot */
JS_GENERIC_MAGIC /* for local use */
} JSWhyMagic;

Просмотреть файл

@ -1077,60 +1077,22 @@ mjit::Compiler::generatePrologue()
markUndefinedLocals();
types::TypeScriptNesting *nesting = script->nesting();
/*
* Run the function prologue if necessary. This is always done in a
* stub for heavyweight functions (including nesting outer functions).
* Load the scope chain into the frame if it will be needed by NAME
* opcodes or by the nesting prologue below. The scope chain is always
* set for global and eval frames, and will have been set by
* HeavyweightFunctionPrologue for heavyweight function frames.
*/
JS_ASSERT_IF(nesting && nesting->children, script->function()->isHeavyweight());
if (script->function()->isHeavyweight()) {
prepareStubCall(Uses(0));
INLINE_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
} else {
/*
* Load the scope chain into the frame if it will be needed by NAME
* opcodes or by the nesting prologue below. The scope chain is
* always set for global and eval frames, and will have been set by
* CreateFunCallObject for heavyweight function frames.
*/
if (analysis->usesScopeChain() || nesting) {
RegisterID t0 = Registers::ReturnReg;
Jump hasScope = masm.branchTest32(Assembler::NonZero,
FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN));
masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(script->function())), t0);
masm.loadPtr(Address(t0, JSFunction::offsetOfEnvironment()), t0);
masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain()));
hasScope.linkTo(masm.label(), &masm);
}
if (nesting) {
/*
* Inline the common case for the nesting prologue: the
* function is a non-heavyweight inner function with no
* children of its own. We ensure during inference that the
* outer function does not add scope objects for 'let' or
* 'with', so that the frame's scope chain will be
* the parent's call object, and if it differs from the
* parent's current activation then the parent is reentrant.
*/
JSScript *parent = nesting->parent;
JS_ASSERT(parent);
JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
!parent->analysis()->addsScopeObjects());
RegisterID t0 = Registers::ReturnReg;
masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
masm.loadPtr(Address(t0), t0);
Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
stubcc.linkExitDirect(mismatch, stubcc.masm.label());
OOL_STUBCALL(stubs::FunctionFramePrologue, REJOIN_FUNCTION_PROLOGUE);
stubcc.crossJump(stubcc.masm.jump(), masm.label());
}
if (!script->function()->isHeavyweight() &&
(analysis->usesScopeChain() || script->nesting()))
{
RegisterID t0 = Registers::ReturnReg;
Jump hasScope = masm.branchTest32(Assembler::NonZero,
FrameFlagsAddress(), Imm32(StackFrame::HAS_SCOPECHAIN));
masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(script->function())), t0);
masm.loadPtr(Address(t0, JSFunction::offsetOfEnvironment()), t0);
masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain()));
hasScope.linkTo(masm.label(), &masm);
}
/*
@ -1161,9 +1123,50 @@ mjit::Compiler::generatePrologue()
ensureDoubleArguments();
}
if (isConstructing) {
if (!constructThis())
return Compile_Error;
/* Inline StackFrame::prologue. */
if (script->isActiveEval && script->strictModeCode) {
prepareStubCall(Uses(0));
INLINE_STUBCALL(stubs::StrictEvalPrologue, REJOIN_EVAL_PROLOGUE);
} else if (script->function()) {
if (script->function()->isHeavyweight()) {
prepareStubCall(Uses(0));
INLINE_STUBCALL(stubs::HeavyweightFunctionPrologue, REJOIN_FUNCTION_PROLOGUE);
} else if (types::TypeScriptNesting *nesting = script->nesting()) {
/*
* Inline the common case for the nesting prologue: the
* function is a non-heavyweight inner function with no
* children of its own. We ensure during inference that the
* outer function does not add scope objects for 'let' or
* 'with', so that the frame's scope chain will be
* the parent's call object, and if it differs from the
* parent's current activation then the parent is reentrant.
*/
JSScript *parent = nesting->parent;
JS_ASSERT(parent);
JS_ASSERT_IF(parent->hasAnalysis() && parent->analysis()->ranBytecode(),
!parent->analysis()->addsScopeObjects());
RegisterID t0 = Registers::ReturnReg;
masm.move(ImmPtr(&parent->nesting()->activeCall), t0);
masm.loadPtr(Address(t0), t0);
Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
Jump mismatch = masm.branchPtr(Assembler::NotEqual, t0, scopeChain);
masm.add32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
masm.load32(FrameFlagsAddress(), t0);
masm.or32(Imm32(StackFrame::HAS_NESTING), t0);
masm.store32(t0, FrameFlagsAddress());
stubcc.linkExitDirect(mismatch, stubcc.masm.label());
OOL_STUBCALL(stubs::TypeNestingPrologue, REJOIN_FUNCTION_PROLOGUE);
stubcc.crossJump(stubcc.masm.jump(), masm.label());
}
if (isConstructing) {
if (!constructThis())
return Compile_Error;
}
}
if (debugMode()) {
@ -1209,8 +1212,8 @@ void
mjit::Compiler::markUndefinedLocals()
{
/*
* Set locals to undefined, as in initCallFrameLatePrologue.
* Skip locals which aren't closed and are known to be defined before used,
* Set locals to undefined. Skip locals which aren't closed and are known
* to be defined before used,
*/
for (uint32_t i = 0; i < script->nfixed; i++)
markUndefinedLocal(0, i);
@ -2803,6 +2806,8 @@ mjit::Compiler::generateMethod()
uint32_t arg = GET_SLOTNO(PC);
if (JSObject *singleton = pushedSingleton(0))
frame.push(ObjectValue(*singleton));
else if (script->argsObjAliasesFormals())
jsop_aliasedArg(arg, /* get = */ true);
else
frame.pushArg(arg);
}
@ -2816,7 +2821,13 @@ mjit::Compiler::generateMethod()
{
jsbytecode *next = &PC[JSOP_SETARG_LENGTH];
bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
frame.storeArg(GET_SLOTNO(PC), pop);
uint32_t arg = GET_SLOTNO(PC);
if (script->argsObjAliasesFormals())
jsop_aliasedArg(arg, /* get = */ false, pop);
else
frame.storeArg(arg, pop);
updateVarType();
if (pop) {
@ -2827,26 +2838,11 @@ mjit::Compiler::generateMethod()
}
END_CASE(JSOP_SETARG)
BEGIN_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_CALLLOCAL)
BEGIN_CASE(JSOP_GETALIASEDVAR)
BEGIN_CASE(JSOP_CALLALIASEDVAR)
{
/* This is all temporary until bug 659577. */
if (JSObject *singleton = pushedSingleton(0)) {
frame.push(ObjectValue(*singleton));
} else {
ScopeCoordinate sc = ScopeCoordinate(PC);
if (script->bindings.bindingIsArg(sc.frameBinding))
frame.pushArg(script->bindings.bindingToArg(sc.frameBinding));
else
frame.pushLocal(script->bindings.bindingToLocal(sc.frameBinding));
}
}
END_CASE(JSOP_GETALIASEDVAR)
BEGIN_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_CALLLOCAL)
{
/*
* Update the var type unless we are about to pop the variable.
* Sync is not guaranteed for types of dead locals, and GETLOCAL
@ -2855,46 +2851,37 @@ mjit::Compiler::generateMethod()
jsbytecode *next = &PC[JSOP_GETLOCAL_LENGTH];
if (JSOp(*next) != JSOP_POP || analysis->jumpTarget(next))
restoreVarType();
uint32_t slot = GET_SLOTNO(PC);
if (JSObject *singleton = pushedSingleton(0))
frame.push(ObjectValue(*singleton));
else if (JOF_OPTYPE(*PC) == JOF_SCOPECOORD)
jsop_aliasedVar(ScopeCoordinate(PC), /* get = */ true);
else
frame.pushLocal(slot);
frame.pushLocal(GET_SLOTNO(PC));
PC += GetBytecodeLength(PC);
break;
}
END_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_SETLOCAL)
BEGIN_CASE(JSOP_SETALIASEDVAR)
{
/* This is all temporary until bug 659577. */
jsbytecode *next = &PC[JSOP_SETALIASEDVAR_LENGTH];
jsbytecode *next = &PC[GetBytecodeLength(PC)];
bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
ScopeCoordinate sc = ScopeCoordinate(PC);
if (script->bindings.bindingIsArg(sc.frameBinding))
frame.storeArg(script->bindings.bindingToArg(sc.frameBinding), pop);
if (JOF_OPTYPE(*PC) == JOF_SCOPECOORD)
jsop_aliasedVar(ScopeCoordinate(PC), /* get = */ false, pop);
else
frame.storeLocal(script->bindings.bindingToLocal(sc.frameBinding), pop);
frame.storeLocal(GET_SLOTNO(PC), pop);
updateVarType();
if (pop) {
frame.pop();
PC += JSOP_SETALIASEDVAR_LENGTH + JSOP_POP_LENGTH;
PC = next + JSOP_POP_LENGTH;
break;
}
}
END_CASE(JSOP_SETALIASEDVAR)
BEGIN_CASE(JSOP_SETLOCAL)
{
jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
bool pop = JSOp(*next) == JSOP_POP && !analysis->jumpTarget(next);
frame.storeLocal(GET_SLOTNO(PC), pop);
updateVarType();
if (pop) {
frame.pop();
PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
break;
}
PC = next;
break;
}
END_CASE(JSOP_SETLOCAL)
@ -3792,47 +3779,12 @@ mjit::Compiler::emitReturn(FrameEntry *fe)
return;
}
/*
* Outside the mjit, activation objects (call objects and arguments objects) are put
* by ContextStack::pop* members. For JSOP_RETURN, the interpreter only calls
* popInlineFrame if fp != entryFrame since the VM protocol is that Invoke/Execute are
* responsible for pushing/popping the initial frame. However, an mjit function
* epilogue doesn't treat the initial StackFrame of its VMFrame specially: it always
* puts activation objects. And furthermore, if the last mjit frame throws, the mjit
* does *not* put the activation objects. So we can't assume any particular state of
* puttedness upon exit from the mjit.
*
* To avoid double-putting, EnterMethodJIT calls updateEpilogueFlags to clear the
* entry frame's hasArgsObj() and hasCallObj() flags if the given objects have already
* been put.
*/
if (script->function()) {
types::TypeScriptNesting *nesting = script->nesting();
if (script->function()->isHeavyweight() || script->needsArgsObj() ||
(nesting && nesting->children) || debugMode())
{
prepareStubCall(Uses(fe ? 1 : 0));
INLINE_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
} else {
/* if hasCallObj() */
Jump putObjs = masm.branchTest32(Assembler::NonZero,
Address(JSFrameReg, StackFrame::offsetOfFlags()),
Imm32(StackFrame::HAS_CALL_OBJ));
stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
stubcc.leave();
OOL_STUBCALL(stubs::FunctionFrameEpilogue, REJOIN_NONE);
emitReturnValue(&stubcc.masm, fe);
emitFinalReturn(stubcc.masm);
/*
* Do frame count balancing inline for inner functions in a nesting
* with no children of their own.
*/
if (nesting)
masm.sub32(Imm32(1), AbsoluteAddress(&nesting->activeFrames));
}
/* Inline StackFrame::epilogue. */
if (debugMode()) {
prepareStubCall(Uses(0));
INLINE_STUBCALL(stubs::Epilogue, REJOIN_NONE);
} else if (script->function() && script->nesting()) {
masm.sub32(Imm32(1), AbsoluteAddress(&script->nesting()->activeFrames));
}
emitReturnValue(&masm, fe);
@ -5718,7 +5670,7 @@ mjit::Compiler::jsop_bindname(PropertyName *name)
analysis->resolveNameAccess(cx, NameToId(name), true);
if (access.nesting) {
RegisterID reg = frame.allocReg();
JSObject **pobj = &access.nesting->activeCall;
CallObject **pobj = &access.nesting->activeCall;
masm.move(ImmPtr(pobj), reg);
masm.loadPtr(Address(reg), reg);
frame.pushTypedPayload(JSVAL_TYPE_OBJECT, reg);
@ -5829,6 +5781,100 @@ mjit::Compiler::jsop_bindname(PropertyName *name)
}
#endif
void
mjit::Compiler::jsop_aliasedArg(unsigned arg, bool get, bool poppedAfter)
{
RegisterID reg = frame.allocReg();
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfArgsObj()), reg);
size_t dataOff = ArgumentsObject::getDataSlotOffset();
masm.loadPrivate(Address(reg, dataOff), reg);
int32_t argsOff = ArgumentsData::offsetOfArgs() + arg * sizeof(Value);
masm.addPtr(Imm32(argsOff), reg, reg);
if (get) {
FrameEntry *fe = frame.getArg(arg);
JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
frame.push(Address(reg), type, true /* = reuseBase */);
} else {
#ifdef JSGC_INCREMENTAL_MJ
if (cx->compartment->needsBarrier()) {
/* Write barrier. */
stubcc.linkExit(masm.testGCThing(Address(reg)), Uses(0));
stubcc.leave();
stubcc.masm.move(reg, Registers::ArgReg1);
OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
stubcc.rejoin(Changes(0));
}
#endif
frame.storeTo(frame.peek(-1), Address(reg), poppedAfter);
frame.freeReg(reg);
}
}
void
mjit::Compiler::jsop_aliasedVar(ScopeCoordinate sc, bool get, bool poppedAfter)
{
RegisterID reg = frame.allocReg();
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), reg);
for (unsigned i = 0; i < sc.hops; i++)
masm.loadPayload(Address(reg, ScopeObject::offsetOfEnclosingScope()), reg);
unsigned slot = ScopeObject::CALL_BLOCK_RESERVED_SLOTS + sc.slot;
/*
* TODO bug 753158: Call and Block objects should use the same layout
* strategy: up to the maximum numFixedSlots and overflow (if any) in
* dynamic slots. For now, we special case for different layouts:
*/
Address addr;
if (ScopeCoordinateBlockChain(script, PC)) {
/*
* Block objects use a fixed AllocKind which means an invariant number
* of fixed slots. Any slot below the fixed slot count is inline, any
* slot over is in the dynamic slots.
*/
uint32_t nfixed = gc::GetGCKindSlots(BlockObject::FINALIZE_KIND);
if (nfixed <= slot) {
masm.loadPtr(Address(reg, JSObject::offsetOfSlots()), reg);
addr = Address(reg, (slot - nfixed) * sizeof(Value));
} else {
addr = Address(reg, JSObject::getFixedSlotOffset(slot));
}
} else {
/*
* Using special-case hackery in Shape::getChildBinding, CallObject
* slots are either altogether in fixed slots or altogether in dynamic
* slots (by having numFixed == RESERVED_SLOTS).
*/
if (script->bindings.lastShape()->numFixedSlots() <= slot) {
masm.loadPtr(Address(reg, JSObject::offsetOfSlots()), reg);
addr = Address(reg, sc.slot * sizeof(Value));
} else {
addr = Address(reg, JSObject::getFixedSlotOffset(slot));
}
}
if (get) {
FrameEntry *fe = script->bindings.slotIsLocal(sc.slot)
? frame.getLocal(script->bindings.slotToLocal(sc.slot))
: frame.getArg(script->bindings.slotToArg(sc.slot));
JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
frame.push(addr, type, true /* = reuseBase */);
} else {
#ifdef JSGC_INCREMENTAL_MJ
if (cx->compartment->needsBarrier()) {
/* Write barrier. */
stubcc.linkExit(masm.testGCThing(addr), Uses(0));
stubcc.leave();
stubcc.masm.addPtr(Imm32(addr.offset), addr.base, Registers::ArgReg1);
OOL_STUBCALL(stubs::GCThingWriteBarrier, REJOIN_NONE);
stubcc.rejoin(Changes(0));
}
#endif
frame.storeTo(frame.peek(-1), addr, poppedAfter);
frame.freeReg(reg);
}
}
void
mjit::Compiler::jsop_this()
{

Просмотреть файл

@ -626,8 +626,8 @@ private:
void jsop_bindname(PropertyName *name);
void jsop_setglobal(uint32_t index);
void jsop_getprop_slow(PropertyName *name, bool forPrototype = false);
void jsop_getarg(uint32_t slot);
void jsop_setarg(uint32_t slot, bool popped);
void jsop_aliasedArg(unsigned i, bool get, bool poppedAfter = false);
void jsop_aliasedVar(ScopeCoordinate sc, bool get, bool poppedAfter = false);
void jsop_this();
void emitReturn(FrameEntry *fe);
void emitFinalReturn(Assembler &masm);

Просмотреть файл

@ -62,7 +62,7 @@ FindExceptionHandler(JSContext *cx)
*/
jsbytecode *pc = script->main() + tn->start + tn->length;
cx->regs().pc = pc;
cx->regs().sp = fp->base() + tn->stackDepth;
cx->regs().sp = cx->regs().spForStackDepth(tn->stackDepth);
switch (tn->kind) {
case JSTRY_CATCH:
@ -119,22 +119,6 @@ FindExceptionHandler(JSContext *cx)
/*
* Clean up a frame and return.
*/
static void
InlineReturn(VMFrame &f)
{
JS_ASSERT(f.fp() != f.entryfp);
AssertValidFunctionScopeChainAtExit(f.fp());
f.cx->stack.popInlineFrame(f.regs);
DebugOnly<JSOp> op = JSOp(*f.regs.pc);
JS_ASSERT(op == JSOP_CALL ||
op == JSOP_NEW ||
op == JSOP_EVAL ||
op == JSOP_FUNCALL ||
op == JSOP_FUNAPPLY);
f.regs.pc += JSOP_CALL_LENGTH;
}
void JS_FASTCALL
stubs::SlowCall(VMFrame &f, uint32_t argc)
@ -162,7 +146,7 @@ stubs::SlowNew(VMFrame &f, uint32_t argc)
static inline bool
CheckStackQuota(VMFrame &f)
{
JS_ASSERT(f.regs.sp == f.fp()->base());
JS_ASSERT(f.regs.stackDepth() == 0);
f.stackLimit = f.cx->stack.space().getStackLimit(f.cx, DONT_REPORT_ERROR);
if (f.stackLimit)
@ -306,10 +290,6 @@ UncachedInlineCall(VMFrame &f, InitialFrameFlags initial,
/* Finish the handoff to the new frame regs. */
PreserveRegsGuard regsGuard(cx, regs);
/* Scope with a call object parented by callee's parent. */
if (!regs.fp()->functionPrologue(cx))
return false;
/*
* If newscript was successfully compiled, run it. Skip for calls which
* will be constructing a new type object for 'this'.
@ -543,7 +523,7 @@ js_InternalThrow(VMFrame &f)
}
ScriptEpilogue(f.cx, f.fp(), false);
f.fp()->epilogue(f.cx);
// Don't remove the last frame, this is the responsibility of
// JaegerShot()'s caller. We only guarantee that ScriptEpilogue()
@ -551,8 +531,14 @@ js_InternalThrow(VMFrame &f)
if (f.entryfp == f.fp())
break;
JS_ASSERT(&cx->regs() == &f.regs);
InlineReturn(f);
f.cx->stack.popInlineFrame(f.regs);
DebugOnly<JSOp> op = JSOp(*f.regs.pc);
JS_ASSERT(op == JSOP_CALL ||
op == JSOP_NEW ||
op == JSOP_EVAL ||
op == JSOP_FUNCALL ||
op == JSOP_FUNAPPLY);
f.regs.pc += JSOP_CALL_LENGTH;
}
JS_ASSERT(&cx->regs() == &f.regs);
@ -587,11 +573,11 @@ js_InternalThrow(VMFrame &f)
if (cx->isExceptionPending()) {
JS_ASSERT(JSOp(*pc) == JSOP_ENTERBLOCK);
StaticBlockObject &blockObj = script->getObject(GET_UINT32_INDEX(pc))->asStaticBlock();
if (!cx->regs().fp()->pushBlock(cx, blockObj))
return NULL;
Value *vp = cx->regs().sp + blockObj.slotCount();
SetValueRangeToUndefined(cx->regs().sp, vp);
cx->regs().sp = vp;
if (!cx->regs().fp()->pushBlock(cx, blockObj))
return NULL;
JS_ASSERT(JSOp(pc[JSOP_ENTERBLOCK_LENGTH]) == JSOP_EXCEPTION);
cx->regs().sp[0] = cx->getPendingException();
@ -615,7 +601,7 @@ stubs::CreateThis(VMFrame &f, JSObject *proto)
JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
if (!obj)
THROW();
fp->formalArgs()[-1].setObject(*obj);
fp->thisValue() = ObjectValue(*obj);
}
void JS_FASTCALL
@ -706,7 +692,9 @@ FinishVarIncOp(VMFrame &f, RejoinState rejoin, Value ov, Value nv, Value *vp)
const JSCodeSpec *cs = &js_CodeSpec[op];
unsigned i = GET_SLOTNO(f.pc());
Value *var = (JOF_TYPE(cs->format) == JOF_LOCAL) ? f.fp()->slots() + i : &f.fp()->formalArg(i);
Value *var = (JOF_TYPE(cs->format) == JOF_LOCAL)
? &f.fp()->unaliasedLocal(i)
: &f.fp()->unaliasedFormal(i);
if (rejoin == REJOIN_POS) {
double d = ov.toNumber();
@ -722,7 +710,7 @@ FinishVarIncOp(VMFrame &f, RejoinState rejoin, Value ov, Value nv, Value *vp)
extern "C" void *
js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VMFrame &f)
{
JSRejoinState jsrejoin = f.fp()->rejoin();
FrameRejoinState jsrejoin = f.fp()->rejoin();
RejoinState rejoin;
if (jsrejoin & 0x1) {
/* Rejoin after a scripted call finished. Restore f.regs.pc and f.regs.inlined (NULL) */
@ -757,12 +745,12 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
* here. Update it to its value at the start of the opcode.
*/
Value *oldsp = f.regs.sp;
f.regs.sp = fp->base() + analysis->getCode(pc).stackDepth;
f.regs.sp = f.regs.spForStackDepth(analysis->getCode(pc).stackDepth);
jsbytecode *nextpc = pc + GetBytecodeLength(pc);
Value *nextsp = NULL;
if (nextpc != script->code + script->length && analysis->maybeCode(nextpc))
nextsp = fp->base() + analysis->getCode(nextpc).stackDepth;
nextsp = f.regs.spForStackDepth(analysis->getCode(nextpc).stackDepth);
JS_ASSERT(&cx->regs() == &f.regs);
@ -867,18 +855,13 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
f.regs.pc = nextpc;
break;
case REJOIN_DEFLOCALFUN:
fp->slots()[GET_SLOTNO(pc)].setObject(* (JSObject *) returnReg);
f.regs.pc = nextpc;
break;
case REJOIN_THIS_PROTOTYPE: {
RootedObject callee(cx, &fp->callee());
JSObject *proto = f.regs.sp[0].isObject() ? &f.regs.sp[0].toObject() : NULL;
JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
if (!obj)
return js_InternalThrow(f);
fp->formalArgs()[-1].setObject(*obj);
fp->thisValue() = ObjectValue(*obj);
if (Probes::callTrackingActive(cx))
Probes::enterJSFun(f.cx, f.fp()->maybeFun(), f.fp()->script());
@ -902,42 +885,56 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
break;
}
/*
* Each of these cases indicates a point of progress through
* generatePrologue. Execute the rest of the prologue here.
*/
case REJOIN_CHECK_ARGUMENTS:
/*
* Do all the work needed in arity check JIT prologues after the
* arguments check occurs (FixupArity has been called if needed, but
* the stack check and late prologue have not been performed.
*/
if (!CheckStackQuota(f))
return js_InternalThrow(f);
SetValueRangeToUndefined(fp->slots(), script->nfixed);
if (!fp->functionPrologue(cx))
return js_InternalThrow(f);
/* FALLTHROUGH */
case REJOIN_FUNCTION_PROLOGUE:
fp->initVarsToUndefined();
fp->scopeChain();
/* Construct the 'this' object for the frame if necessary. */
if (!ScriptPrologueOrGeneratorResume(cx, fp, types::UseNewTypeAtEntry(cx, fp)))
if (!fp->prologue(cx, types::UseNewTypeAtEntry(cx, fp)))
return js_InternalThrow(f);
/*
* Having called ScriptPrologueOrGeneratorResume, we would normally call
* ScriptDebugPrologue here. But in debug mode, we only use JITted
* functions' invokeEntry entry point, whereas CheckArgumentTypes
* (REJOIN_CHECK_ARGUMENTS) and FunctionFramePrologue
* (REJOIN_FUNCTION_PROLOGUE) are only reachable via the other entry
* points. So we should never need either of these rejoin tails in debug
* mode.
* We would normally call ScriptDebugPrologue here. But in debug mode,
* we only use JITted functions' invokeEntry entry point, whereas
* CheckArgumentTypes (REJOIN_CHECK_ARGUMENTS) is only reachable via
* the other entry points.
*
* If we fix bug 699196 ("Debug mode code could use inline caches
* now"), then these cases will become reachable again.
* now"), then this case will become reachable again.
*/
JS_ASSERT(!cx->compartment->debugMode());
break;
/* Finish executing the tail of generatePrologue. */
case REJOIN_FUNCTION_PROLOGUE:
if (fp->isConstructing()) {
RootedObject callee(cx, &fp->callee());
JSObject *obj = js_CreateThisForFunction(cx, callee, types::UseNewTypeAtEntry(cx, fp));
if (!obj)
return js_InternalThrow(f);
fp->functionThis() = ObjectValue(*obj);
}
/* FALLTHROUGH */
case REJOIN_EVAL_PROLOGUE:
if (cx->compartment->debugMode()) {
Probes::enterJSFun(cx, fp->maybeFun(), fp->script());
JSTrapStatus status = ScriptDebugPrologue(cx, fp);
switch (status) {
case JSTRAP_CONTINUE:
break;
case JSTRAP_RETURN:
return f.cx->jaegerRuntime().forceReturnFromFastCall();
case JSTRAP_ERROR:
case JSTRAP_THROW:
return js_InternalThrow(f);
default:
JS_NOT_REACHED("bad ScriptDebugPrologue status");
}
}
break;
case REJOIN_CALL_PROLOGUE:
@ -1060,7 +1057,7 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
if (nextDepth == UINT32_MAX)
nextDepth = analysis->getCode(f.regs.pc).stackDepth;
f.regs.sp = fp->base() + nextDepth;
f.regs.sp = f.regs.spForStackDepth(nextDepth);
/*
* Monitor the result of the previous op when finishing a JOF_TYPESET op.

Просмотреть файл

@ -1056,10 +1056,6 @@ mjit::EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimi
fp->markReturnValue();
}
/* See comment in mjit::Compiler::emitReturn. */
if (fp->isFunctionFrame())
fp->updateEpilogueFlags();
return ok ? Jaeger_Returned : Jaeger_Throwing;
}

Просмотреть файл

@ -109,7 +109,7 @@ struct VMFrame
Value *stackLimit;
StackFrame *entryfp;
FrameRegs *oldregs;
JSRejoinState stubRejoin; /* How to rejoin if inside a call from an IC stub. */
FrameRejoinState stubRejoin; /* How to rejoin if inside a call from an IC stub. */
#if defined(JS_CPU_X86)
void *unused0, *unused1; /* For 16 byte alignment */
@ -294,9 +294,6 @@ enum RejoinState {
REJOIN_PUSH_BOOLEAN,
REJOIN_PUSH_OBJECT,
/* Call returns an object, which should be assigned to a local per the current bytecode. */
REJOIN_DEFLOCALFUN,
/*
* During the prologue of constructing scripts, after the function's
* .prototype property has been fetched.
@ -310,9 +307,10 @@ enum RejoinState {
REJOIN_CHECK_ARGUMENTS,
/*
* The script's jitcode was discarded after marking an outer function as
* reentrant or due to a GC while creating a call object.
* The script's jitcode was discarded during one of the following steps of
* a frame's prologue.
*/
REJOIN_EVAL_PROLOGUE,
REJOIN_FUNCTION_PROLOGUE,
/*
@ -339,14 +337,14 @@ enum RejoinState {
};
/* Get the rejoin state for a StackFrame after returning from a scripted call. */
static inline JSRejoinState
static inline FrameRejoinState
ScriptedRejoin(uint32_t pcOffset)
{
return REJOIN_SCRIPTED | (pcOffset << 1);
}
/* Get the rejoin state for a StackFrame after returning from a stub call. */
static inline JSRejoinState
static inline FrameRejoinState
StubRejoin(RejoinState rejoin)
{
return rejoin << 1;

Просмотреть файл

@ -1054,7 +1054,7 @@ ic::SplatApplyArgs(VMFrame &f)
THROWV(false);
/* Steps 7-8. */
f.regs.fp()->forEachCanonicalActualArg(CopyTo(f.regs.sp));
f.regs.fp()->forEachUnaliasedActual(CopyTo(f.regs.sp));
f.regs.sp += length;
f.u.call.dynamicArgc = length;

Просмотреть файл

@ -294,8 +294,6 @@ class SetPropCompiler : public PICStubCompiler
JS_ASSERT_IF(!shape->hasDefaultSetter(), obj->isCall());
MaybeJump skipOver;
if (adding) {
JS_ASSERT(shape->hasSlot());
pic.shapeRegHasBaseShape = false;
@ -353,29 +351,11 @@ class SetPropCompiler : public PICStubCompiler
// then we can rely on fun->nargs remaining invariant.
JSFunction *fun = obj->asCall().getCalleeFunction();
uint16_t slot = uint16_t(shape->shortid());
/* Guard that the call object has a frame. */
masm.loadObjPrivate(pic.objReg, pic.shapeReg, obj->numFixedSlots());
Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
{
Address addr(pic.shapeReg, shape->setterOp() == CallObject::setArgOp
? StackFrame::offsetOfFormalArg(fun, slot)
: StackFrame::offsetOfFixed(slot));
masm.storeValue(pic.u.vr, addr);
skipOver = masm.jump();
}
escapedFrame.linkTo(masm.label(), &masm);
{
if (shape->setterOp() == CallObject::setVarOp)
slot += fun->nargs;
slot += CallObject::RESERVED_SLOTS;
Address address = masm.objPropAddress(obj, pic.objReg, slot);
masm.storeValue(pic.u.vr, address);
}
if (shape->setterOp() == CallObject::setVarOp)
slot += fun->nargs;
slot += CallObject::RESERVED_SLOTS;
Address address = masm.objPropAddress(obj, pic.objReg, slot);
masm.storeValue(pic.u.vr, address);
pic.shapeRegHasBaseShape = false;
}
@ -410,8 +390,6 @@ class SetPropCompiler : public PICStubCompiler
for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj)
buffer.link(*pj, pic.slowPathStart);
buffer.link(done, pic.fastPathRejoin);
if (skipOver.isSet())
buffer.link(skipOver.get(), pic.fastPathRejoin);
CodeLocationLabel cs = buffer.finalize(f);
JaegerSpew(JSpew_PICs, "generate setprop stub %p %p %d at %p\n",
(void*)&pic,
@ -762,6 +740,9 @@ struct GetPropHelper {
}
};
namespace js {
namespace mjit {
class GetPropCompiler : public PICStubCompiler
{
JSObject *obj;
@ -1392,6 +1373,9 @@ class GetPropCompiler : public PICStubCompiler
}
};
} // namespace mjit
} // namespace js
class ScopeNameCompiler : public PICStubCompiler
{
private:
@ -1570,9 +1554,9 @@ class ScopeNameCompiler : public PICStubCompiler
CallObjPropKind kind;
const Shape *shape = getprop.shape;
if (shape->getterOp() == CallObject::getArgOp) {
if (shape->setterOp() == CallObject::setArgOp) {
kind = ARG;
} else if (shape->getterOp() == CallObject::getVarOp) {
} else if (shape->setterOp() == CallObject::setVarOp) {
kind = VAR;
} else {
return disable("unhandled callobj sprop getter");
@ -1590,38 +1574,16 @@ class ScopeNameCompiler : public PICStubCompiler
Jump finalShape = masm.branchPtr(Assembler::NotEqual, pic.shapeReg,
ImmPtr(getprop.holder->lastProperty()));
/* Get callobj's stack frame. */
masm.loadObjPrivate(pic.objReg, pic.shapeReg, getprop.holder->numFixedSlots());
JSFunction *fun = getprop.holder->asCall().getCalleeFunction();
uint16_t slot = uint16_t(shape->shortid());
unsigned slot = shape->shortid();
if (kind == VAR)
slot += fun->nargs;
slot += CallObject::RESERVED_SLOTS;
Address address = masm.objPropAddress(obj, pic.objReg, slot);
Jump skipOver;
Jump escapedFrame = masm.branchTestPtr(Assembler::Zero, pic.shapeReg, pic.shapeReg);
/* Safe because type is loaded first. */
masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg);
/* Not-escaped case. */
{
Address addr(pic.shapeReg, kind == ARG ? StackFrame::offsetOfFormalArg(fun, slot)
: StackFrame::offsetOfFixed(slot));
masm.loadPayload(addr, pic.objReg);
masm.loadTypeTag(addr, pic.shapeReg);
skipOver = masm.jump();
}
escapedFrame.linkTo(masm.label(), &masm);
{
if (kind == VAR)
slot += fun->nargs;
slot += CallObject::RESERVED_SLOTS;
Address address = masm.objPropAddress(obj, pic.objReg, slot);
/* Safe because type is loaded first. */
masm.loadValueAsComponents(address, pic.shapeReg, pic.objReg);
}
skipOver.linkTo(masm.label(), &masm);
Jump done = masm.jump();
// All failures flow to here, so there is a common point to patch.

Просмотреть файл

@ -912,7 +912,7 @@ stubs::InitElem(VMFrame &f, uint32_t last)
FrameRegs &regs = f.regs;
/* Pop the element's value into rval. */
JS_ASSERT(regs.sp - f.fp()->base() >= 3);
JS_ASSERT(regs.stackDepth() >= 3);
const Value &rref = regs.sp[-1];
/* Find the object being initialized at top of stack. */
@ -1020,7 +1020,7 @@ InitPropOrMethod(VMFrame &f, PropertyName *name, JSOp op)
FrameRegs &regs = f.regs;
/* Load the property's initial value into rval. */
JS_ASSERT(regs.sp - f.fp()->base() >= 2);
JS_ASSERT(regs.stackDepth() >= 2);
Value rval;
rval = regs.sp[-1];
@ -1048,7 +1048,7 @@ stubs::InitProp(VMFrame &f, PropertyName *name)
void JS_FASTCALL
stubs::IterNext(VMFrame &f, int32_t offset)
{
JS_ASSERT(f.regs.sp - offset >= f.fp()->base());
JS_ASSERT(f.regs.stackDepth() >= unsigned(offset));
JS_ASSERT(f.regs.sp[-offset].isObject());
JSObject *iterobj = &f.regs.sp[-offset].toObject();
@ -1061,7 +1061,7 @@ stubs::IterNext(VMFrame &f, int32_t offset)
JSBool JS_FASTCALL
stubs::IterMore(VMFrame &f)
{
JS_ASSERT(f.regs.sp - 1 >= f.fp()->base());
JS_ASSERT(f.regs.stackDepth() >= 1);
JS_ASSERT(f.regs.sp[-1].isObject());
Value v;
@ -1075,7 +1075,7 @@ stubs::IterMore(VMFrame &f)
void JS_FASTCALL
stubs::EndIter(VMFrame &f)
{
JS_ASSERT(f.regs.sp - 1 >= f.fp()->base());
JS_ASSERT(f.regs.stackDepth() >= 1);
if (!CloseIterator(f.cx, &f.regs.sp[-1].toObject()))
THROW();
}
@ -1125,7 +1125,7 @@ stubs::Throw(VMFrame &f)
void JS_FASTCALL
stubs::Arguments(VMFrame &f)
{
ArgumentsObject *obj = ArgumentsObject::create(f.cx, f.fp());
ArgumentsObject *obj = ArgumentsObject::createExpected(f.cx, f.fp());
if (!obj)
THROW();
f.regs.sp[0] = ObjectValue(*obj);
@ -1173,27 +1173,21 @@ void JS_FASTCALL
stubs::EnterBlock(VMFrame &f, JSObject *obj)
{
FrameRegs &regs = f.regs;
StackFrame *fp = f.fp();
JS_ASSERT(!f.regs.inlined());
StaticBlockObject &blockObj = obj->asStaticBlock();
if (!fp->pushBlock(f.cx, blockObj))
THROW();
if (*regs.pc == JSOP_ENTERBLOCK) {
JS_ASSERT(fp->base() + blockObj.stackDepth() == regs.sp);
JS_ASSERT(regs.stackDepth() == blockObj.stackDepth());
JS_ASSERT(regs.stackDepth() + blockObj.slotCount() <= f.fp()->script()->nslots);
Value *vp = regs.sp + blockObj.slotCount();
JS_ASSERT(regs.sp < vp);
JS_ASSERT(vp <= fp->slots() + fp->script()->nslots);
SetValueRangeToUndefined(regs.sp, vp);
regs.sp = vp;
} else if (*regs.pc == JSOP_ENTERLET0) {
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
== regs.sp);
} else if (*regs.pc == JSOP_ENTERLET1) {
JS_ASSERT(regs.fp()->base() + blockObj.stackDepth() + blockObj.slotCount()
== regs.sp - 1);
}
/* Clone block iff there are any closed-over variables. */
if (!regs.fp()->pushBlock(f.cx, blockObj))
THROW();
}
void JS_FASTCALL
@ -1522,7 +1516,7 @@ stubs::CheckArgumentTypes(VMFrame &f)
if (!f.fp()->isConstructing())
TypeScript::SetThis(f.cx, script, fp->thisValue());
for (unsigned i = 0; i < fun->nargs; i++)
TypeScript::SetArgument(f.cx, script, i, fp->formalArg(i));
TypeScript::SetArgument(f.cx, script, i, fp->unaliasedFormal(i, DONT_CHECK_ALIASING));
}
if (monitor.recompiled())
@ -1552,7 +1546,7 @@ stubs::AssertArgumentTypes(VMFrame &f)
}
for (unsigned i = 0; i < fun->nargs; i++) {
Type type = GetValueType(f.cx, fp->formalArg(i));
Type type = GetValueType(f.cx, fp->unaliasedFormal(i, DONT_CHECK_ALIASING));
if (!TypeScript::ArgTypes(script, i)->hasType(type))
TypeFailure(f.cx, "Missing type for arg %d: %s", i, TypeString(type));
}
@ -1609,16 +1603,29 @@ stubs::Exception(VMFrame &f)
}
void JS_FASTCALL
stubs::FunctionFramePrologue(VMFrame &f)
stubs::StrictEvalPrologue(VMFrame &f)
{
if (!f.fp()->functionPrologue(f.cx))
if (!f.fp()->jitStrictEvalPrologue(f.cx))
THROW();
}
void JS_FASTCALL
stubs::FunctionFrameEpilogue(VMFrame &f)
stubs::HeavyweightFunctionPrologue(VMFrame &f)
{
f.fp()->functionEpilogue(f.cx);
if (!f.fp()->jitHeavyweightFunctionPrologue(f.cx))
THROW();
}
void JS_FASTCALL
stubs::TypeNestingPrologue(VMFrame &f)
{
f.fp()->jitTypeNestingPrologue(f.cx);
}
void JS_FASTCALL
stubs::Epilogue(VMFrame &f)
{
f.fp()->epilogue(f.cx);
}
void JS_FASTCALL
@ -1626,17 +1633,15 @@ stubs::AnyFrameEpilogue(VMFrame &f)
{
/*
* On the normal execution path, emitReturn calls ScriptDebugEpilogue
* and inlines ScriptEpilogue. This function implements forced early
* and inlines epilogue. This function implements forced early
* returns, so it must have the same effect.
*/
bool ok = true;
if (f.cx->compartment->debugMode())
ok = js::ScriptDebugEpilogue(f.cx, f.fp(), ok);
ok = ScriptEpilogue(f.cx, f.fp(), ok);
f.fp()->epilogue(f.cx);
if (!ok)
THROW();
if (f.fp()->isNonEvalFunctionFrame())
f.fp()->functionEpilogue(f.cx);
}
template <bool Clamped>

Просмотреть файл

@ -168,10 +168,12 @@ void JS_FASTCALL ConvertToTypedFloat(JSContext *cx, Value *vp);
void JS_FASTCALL Exception(VMFrame &f);
void JS_FASTCALL FunctionFramePrologue(VMFrame &f);
void JS_FASTCALL FunctionFrameEpilogue(VMFrame &f);
void JS_FASTCALL StrictEvalPrologue(VMFrame &f);
void JS_FASTCALL HeavyweightFunctionPrologue(VMFrame &f);
void JS_FASTCALL TypeNestingPrologue(VMFrame &f);
void JS_FASTCALL AnyFrameEpilogue(VMFrame &f);
void JS_FASTCALL Epilogue(VMFrame &f);
JSObject * JS_FASTCALL
NewDenseUnallocatedArray(VMFrame &f, uint32_t length);

Просмотреть файл

@ -1281,31 +1281,14 @@ AssertJit(JSContext *cx, unsigned argc, jsval *vp)
static JSScript *
ValueToScript(JSContext *cx, jsval v, JSFunction **funp = NULL)
{
JSScript *script = NULL;
JSFunction *fun = NULL;
JSFunction *fun = JS_ValueToFunction(cx, v);
if (!fun)
return NULL;
if (!JSVAL_IS_PRIMITIVE(v)) {
JSObject *obj = JSVAL_TO_OBJECT(v);
JSClass *clasp = JS_GetClass(obj);
JSScript *script = fun->maybeScript();
if (!script)
JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, JSSMSG_SCRIPTS_ONLY);
if (clasp == Jsvalify(&GeneratorClass)) {
if (JSGenerator *gen = (JSGenerator *) JS_GetPrivate(obj)) {
fun = gen->floatingFrame()->fun();
script = fun->script();
}
}
}
if (!script) {
fun = JS_ValueToFunction(cx, v);
if (!fun)
return NULL;
script = fun->maybeScript();
if (!script) {
JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL,
JSSMSG_SCRIPTS_ONLY);
}
}
if (fun && funp)
*funp = fun;

Просмотреть файл

@ -10,16 +10,9 @@
#include "ArgumentsObject.h"
namespace js {
#include "ScopeObject-inl.h"
inline void
ArgumentsObject::initInitialLength(uint32_t length)
{
JS_ASSERT(getFixedSlot(INITIAL_LENGTH_SLOT).isUndefined());
initFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(length << PACKED_BITS_COUNT));
JS_ASSERT((getFixedSlot(INITIAL_LENGTH_SLOT).toInt32() >> PACKED_BITS_COUNT) == int32_t(length));
JS_ASSERT(!hasOverriddenLength());
}
namespace js {
inline uint32_t
ArgumentsObject::initialLength() const
@ -39,26 +32,67 @@ ArgumentsObject::markLengthOverridden()
inline bool
ArgumentsObject::hasOverriddenLength() const
{
const js::Value &v = getFixedSlot(INITIAL_LENGTH_SLOT);
const Value &v = getFixedSlot(INITIAL_LENGTH_SLOT);
return v.toInt32() & LENGTH_OVERRIDDEN_BIT;
}
inline void
ArgumentsObject::initData(ArgumentsData *data)
{
JS_ASSERT(getFixedSlot(DATA_SLOT).isUndefined());
initFixedSlot(DATA_SLOT, PrivateValue(data));
}
inline ArgumentsData *
ArgumentsObject::data() const
{
return reinterpret_cast<js::ArgumentsData *>(getFixedSlot(DATA_SLOT).toPrivate());
return reinterpret_cast<ArgumentsData *>(getFixedSlot(DATA_SLOT).toPrivate());
}
inline JSScript *
ArgumentsObject::containingScript() const
{
return data()->script;
}
inline const Value &
ArgumentsObject::arg(unsigned i) const
{
JS_ASSERT(i < data()->numArgs);
const Value &v = data()->args[i];
JS_ASSERT(!v.isMagic(JS_FORWARD_TO_CALL_OBJECT));
return v;
}
inline void
ArgumentsObject::setArg(unsigned i, const Value &v)
{
JS_ASSERT(i < data()->numArgs);
HeapValue &lhs = data()->args[i];
JS_ASSERT(!lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT));
lhs = v;
}
inline const Value &
ArgumentsObject::element(uint32_t i) const
{
JS_ASSERT(!isElementDeleted(i));
const Value &v = data()->args[i];
if (v.isMagic(JS_FORWARD_TO_CALL_OBJECT))
return getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall().arg(i);
return v;
}
inline void
ArgumentsObject::setElement(uint32_t i, const Value &v)
{
JS_ASSERT(!isElementDeleted(i));
HeapValue &lhs = data()->args[i];
if (lhs.isMagic(JS_FORWARD_TO_CALL_OBJECT))
getFixedSlot(MAYBE_CALL_SLOT).toObject().asCall().setArg(i, v);
else
lhs = v;
}
inline bool
ArgumentsObject::isElementDeleted(uint32_t i) const
{
JS_ASSERT(i < data()->numArgs);
if (i >= initialLength())
return false;
return IsBitArrayElementSet(data()->deletedBits, initialLength(), i);
}
@ -74,57 +108,17 @@ ArgumentsObject::markElementDeleted(uint32_t i)
SetBitArrayElement(data()->deletedBits, initialLength(), i);
}
inline const Value &
ArgumentsObject::element(uint32_t i) const
{
JS_ASSERT(!isElementDeleted(i));
return data()->slots[i];
}
inline void
ArgumentsObject::setElement(uint32_t i, const js::Value &v)
{
JS_ASSERT(!isElementDeleted(i));
data()->slots[i] = v;
}
inline bool
ArgumentsObject::getElement(uint32_t i, Value *vp)
ArgumentsObject::maybeGetElement(uint32_t i, Value *vp)
{
if (i >= initialLength() || isElementDeleted(i))
return false;
/*
* If this arguments object has an associated stack frame, that contains
* the canonical argument value. Note that strict arguments objects do not
* alias named arguments and never have a stack frame.
*/
StackFrame *fp = maybeStackFrame();
JS_ASSERT_IF(isStrictArguments(), !fp);
if (fp)
*vp = fp->canonicalActualArg(i);
else
*vp = element(i);
*vp = element(i);
return true;
}
namespace detail {
struct STATIC_SKIP_INFERENCE CopyNonHoleArgsTo
{
CopyNonHoleArgsTo(ArgumentsObject *argsobj, Value *dst) : argsobj(*argsobj), dst(dst) {}
ArgumentsObject &argsobj;
Value *dst;
bool operator()(uint32_t argi, Value *src) {
*dst++ = *src;
return true;
}
};
} /* namespace detail */
inline bool
ArgumentsObject::getElements(uint32_t start, uint32_t count, Value *vp)
ArgumentsObject::maybeGetElements(uint32_t start, uint32_t count, Value *vp)
{
JS_ASSERT(start + count >= start);
@ -132,33 +126,9 @@ ArgumentsObject::getElements(uint32_t start, uint32_t count, Value *vp)
if (start > length || start + count > length || isAnyElementDeleted())
return false;
StackFrame *fp = maybeStackFrame();
/* If there's no stack frame for this, argument values are in elements(). */
if (!fp) {
const Value *srcbeg = Valueify(data()->slots) + start;
const Value *srcend = srcbeg + count;
const Value *src = srcbeg;
for (Value *dst = vp; src < srcend; ++dst, ++src)
*dst = *src;
return true;
}
/* Otherwise, element values are on the stack. */
JS_ASSERT(fp->numActualArgs() <= StackSpace::ARGS_LENGTH_MAX);
return fp->forEachCanonicalActualArg(detail::CopyNonHoleArgsTo(this, vp), start, count);
}
inline js::StackFrame *
ArgumentsObject::maybeStackFrame() const
{
return reinterpret_cast<js::StackFrame *>(getFixedSlot(STACK_FRAME_SLOT).toPrivate());
}
inline void
ArgumentsObject::setStackFrame(StackFrame *frame)
{
setFixedSlot(STACK_FRAME_SLOT, PrivateValue(frame));
for (uint32_t i = start, end = start + count; i < end; ++i, ++vp)
*vp = element(i);
return true;
}
inline size_t
@ -167,7 +137,7 @@ ArgumentsObject::sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const
return mallocSizeOf(data());
}
inline const js::Value &
inline const Value &
NormalArgumentsObject::callee() const
{
return data()->callee;
@ -179,6 +149,6 @@ NormalArgumentsObject::clearCallee()
data()->callee.set(compartment(), MagicValue(JS_OVERWRITTEN_CALLEE));
}
} // namespace js
} /* namespace js */
#endif /* ArgumentsObject_inl_h___ */

Просмотреть файл

@ -22,53 +22,20 @@
using namespace js;
using namespace js::gc;
struct PutArg
{
PutArg(JSCompartment *comp, ArgumentsObject &argsobj)
: compartment(comp), argsobj(argsobj), dst(argsobj.data()->slots) {}
JSCompartment *compartment;
ArgumentsObject &argsobj;
HeapValue *dst;
bool operator()(unsigned i, Value *src) {
JS_ASSERT(dst->isUndefined());
if (!argsobj.isElementDeleted(i))
dst->set(compartment, *src);
++dst;
return true;
}
};
void
js_PutArgsObject(StackFrame *fp)
{
ArgumentsObject &argsobj = fp->argsObj();
if (argsobj.isNormalArguments()) {
JS_ASSERT(argsobj.maybeStackFrame() == fp);
JSCompartment *comp = fp->compartment();
fp->forEachCanonicalActualArg(PutArg(comp, argsobj));
argsobj.setStackFrame(NULL);
} else {
JS_ASSERT(!argsobj.maybeStackFrame());
}
}
ArgumentsObject *
ArgumentsObject::create(JSContext *cx, uint32_t argc, HandleObject callee)
ArgumentsObject::create(JSContext *cx, StackFrame *fp)
{
JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX);
JS_ASSERT(!callee->toFunction()->hasRest());
RootedObject proto(cx, callee->global().getOrCreateObjectPrototype(cx));
JSFunction &callee = fp->callee();
RootedObject proto(cx, callee.global().getOrCreateObjectPrototype(cx));
if (!proto)
return NULL;
RootedTypeObject type(cx);
type = proto->getNewType(cx);
if (!type)
return NULL;
bool strict = callee->toFunction()->inStrictMode();
bool strict = callee.inStrictMode();
Class *clasp = strict ? &StrictArgumentsObjectClass : &NormalArgumentsObjectClass;
RootedShape emptyArgumentsShape(cx);
@ -79,59 +46,76 @@ ArgumentsObject::create(JSContext *cx, uint32_t argc, HandleObject callee)
if (!emptyArgumentsShape)
return NULL;
unsigned numDeletedWords = NumWordsForBitArrayOfLength(argc);
unsigned numBytes = offsetof(ArgumentsData, slots) +
unsigned numActuals = fp->numActualArgs();
unsigned numFormals = fp->numFormalArgs();
unsigned numDeletedWords = NumWordsForBitArrayOfLength(numActuals);
unsigned numArgs = Max(numActuals, numFormals);
unsigned numBytes = offsetof(ArgumentsData, args) +
numDeletedWords * sizeof(size_t) +
argc * sizeof(Value);
numArgs * sizeof(Value);
ArgumentsData *data = (ArgumentsData *)cx->malloc_(numBytes);
if (!data)
return NULL;
data->callee.init(ObjectValue(*callee));
for (HeapValue *vp = data->slots; vp != data->slots + argc; vp++)
vp->init(UndefinedValue());
data->deletedBits = (size_t *)(data->slots + argc);
data->numArgs = numArgs;
data->callee.init(ObjectValue(callee));
data->script = fp->script();
/* Copy [0, numArgs) into data->slots. */
HeapValue *dst = data->args, *dstEnd = data->args + numArgs;
for (Value *src = fp->formals(), *end = src + numFormals; src != end; ++src, ++dst)
dst->init(*src);
if (numActuals > numFormals) {
for (Value *src = fp->actuals() + numFormals; dst != dstEnd; ++src, ++dst)
dst->init(*src);
} else if (numActuals < numFormals) {
for (; dst != dstEnd; ++dst)
dst->init(UndefinedValue());
}
data->deletedBits = reinterpret_cast<size_t *>(dstEnd);
ClearAllBitArrayElements(data->deletedBits, numDeletedWords);
/* We have everything needed to fill in the object, so make the object. */
JSObject *obj = JSObject::create(cx, FINALIZE_KIND, emptyArgumentsShape, type, NULL);
if (!obj)
return NULL;
obj->initFixedSlot(INITIAL_LENGTH_SLOT, Int32Value(numActuals << PACKED_BITS_COUNT));
obj->initFixedSlot(DATA_SLOT, PrivateValue(data));
/*
* If it exists and the arguments object aliases formals, the call object
* is the canonical location for formals.
*/
JSScript *script = fp->script();
if (fp->fun()->isHeavyweight() && script->argsObjAliasesFormals()) {
obj->initFixedSlot(MAYBE_CALL_SLOT, ObjectValue(fp->callObj()));
/* Flag each slot that canonically lives in the callObj. */
if (script->bindingsAccessedDynamically) {
for (unsigned i = 0; i < numFormals; ++i)
data->args[i] = MagicValue(JS_FORWARD_TO_CALL_OBJECT);
} else {
for (unsigned i = 0; i < script->numClosedArgs(); ++i)
data->args[script->getClosedArg(i)] = MagicValue(JS_FORWARD_TO_CALL_OBJECT);
}
}
ArgumentsObject &argsobj = obj->asArguments();
JS_ASSERT(UINT32_MAX > (uint64_t(argc) << PACKED_BITS_COUNT));
argsobj.initInitialLength(argc);
argsobj.initData(data);
argsobj.setStackFrame(NULL);
JS_ASSERT(argsobj.numFixedSlots() >= NormalArgumentsObject::RESERVED_SLOTS);
JS_ASSERT(argsobj.numFixedSlots() >= StrictArgumentsObject::RESERVED_SLOTS);
JS_ASSERT(argsobj.initialLength() == numActuals);
JS_ASSERT(!argsobj.hasOverriddenLength());
return &argsobj;
}
ArgumentsObject *
ArgumentsObject::create(JSContext *cx, StackFrame *fp)
ArgumentsObject::createExpected(JSContext *cx, StackFrame *fp)
{
JS_ASSERT(fp->script()->needsArgsObj());
ArgumentsObject *argsobj = ArgumentsObject::create(cx, fp->numActualArgs(),
RootedObject(cx, &fp->callee()));
ArgumentsObject *argsobj = create(cx, fp);
if (!argsobj)
return NULL;
/*
* Strict mode functions have arguments objects that copy the initial
* actual parameter values. Non-strict mode arguments use the frame pointer
* to retrieve up-to-date parameter values.
*/
if (argsobj->isStrictArguments())
fp->forEachCanonicalActualArg(PutArg(cx->compartment, *argsobj));
else
argsobj->setStackFrame(fp);
fp->initArgsObj(*argsobj);
return argsobj;
}
@ -139,12 +123,7 @@ ArgumentsObject::create(JSContext *cx, StackFrame *fp)
ArgumentsObject *
ArgumentsObject::createUnexpected(JSContext *cx, StackFrame *fp)
{
ArgumentsObject *argsobj = create(cx, fp->numActualArgs(), RootedObject(cx, &fp->callee()));
if (!argsobj)
return NULL;
fp->forEachCanonicalActualArg(PutArg(cx->compartment, *argsobj));
return argsobj;
return create(cx, fp);
}
static JSBool
@ -153,10 +132,8 @@ args_delProperty(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
ArgumentsObject &argsobj = obj->asArguments();
if (JSID_IS_INT(id)) {
unsigned arg = unsigned(JSID_TO_INT(id));
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) {
argsobj.setElement(arg, UndefinedValue());
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg))
argsobj.markElementDeleted(arg);
}
} else if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) {
argsobj.markLengthOverridden();
} else if (JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom)) {
@ -178,22 +155,15 @@ ArgGetter(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
* prototype to point to another Arguments object with a bigger argc.
*/
unsigned arg = unsigned(JSID_TO_INT(id));
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) {
if (StackFrame *fp = argsobj.maybeStackFrame()) {
JS_ASSERT_IF(arg < fp->numFormalArgs(), fp->script()->formalIsAliased(arg));
*vp = fp->canonicalActualArg(arg);
} else {
*vp = argsobj.element(arg);
}
}
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg))
*vp = argsobj.element(arg);
} else if (JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom)) {
if (!argsobj.hasOverriddenLength())
vp->setInt32(argsobj.initialLength());
*vp = Int32Value(argsobj.initialLength());
} else {
JS_ASSERT(JSID_IS_ATOM(id, cx->runtime->atomState.calleeAtom));
const Value &v = argsobj.callee();
if (!v.isMagic(JS_OVERWRITTEN_CALLEE))
*vp = v;
if (!argsobj.callee().isMagic(JS_OVERWRITTEN_CALLEE))
*vp = argsobj.callee();
}
return true;
}
@ -205,20 +175,15 @@ ArgSetter(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp
return true;
NormalArgumentsObject &argsobj = obj->asNormalArguments();
JSScript *script = argsobj.containingScript();
if (JSID_IS_INT(id)) {
unsigned arg = unsigned(JSID_TO_INT(id));
if (arg < argsobj.initialLength()) {
if (StackFrame *fp = argsobj.maybeStackFrame()) {
JSScript *script = fp->functionScript();
JS_ASSERT(script->needsArgsObj());
if (arg < fp->numFormalArgs()) {
JS_ASSERT(fp->script()->formalIsAliased(arg));
types::TypeScript::SetArgument(cx, script, arg, *vp);
}
fp->canonicalActualArg(arg) = *vp;
return true;
}
if (arg < argsobj.initialLength() && !argsobj.isElementDeleted(arg)) {
argsobj.setElement(arg, *vp);
if (arg < script->function()->nargs)
types::TypeScript::SetArgument(cx, script, arg, *vp);
return true;
}
} else {
JS_ASSERT(JSID_IS_ATOM(id, cx->runtime->atomState.lengthAtom) ||
@ -275,13 +240,13 @@ args_resolve(JSContext *cx, HandleObject obj, HandleId id, unsigned flags,
bool
NormalArgumentsObject::optimizedGetElem(JSContext *cx, StackFrame *fp, const Value &elem, Value *vp)
{
JS_ASSERT(!fp->hasArgsObj());
JS_ASSERT(!fp->script()->needsArgsObj());
/* Fast path: no need to convert to id when elem is already an int in range. */
if (elem.isInt32()) {
int32_t i = elem.toInt32();
if (i >= 0 && uint32_t(i) < fp->numActualArgs()) {
*vp = fp->canonicalActualArg(i);
*vp = fp->unaliasedActual(i);
return true;
}
}
@ -295,7 +260,7 @@ NormalArgumentsObject::optimizedGetElem(JSContext *cx, StackFrame *fp, const Val
if (JSID_IS_INT(id)) {
int32_t i = JSID_TO_INT(id);
if (i >= 0 && uint32_t(i) < fp->numActualArgs()) {
*vp = fp->canonicalActualArg(i);
*vp = fp->unaliasedActual(i);
return true;
}
}
@ -472,34 +437,20 @@ strictargs_enumerate(JSContext *cx, HandleObject obj)
return true;
}
static void
args_finalize(FreeOp *fop, JSObject *obj)
void
ArgumentsObject::finalize(FreeOp *fop, JSObject *obj)
{
fop->free_(reinterpret_cast<void *>(obj->asArguments().data()));
}
static void
args_trace(JSTracer *trc, JSObject *obj)
void
ArgumentsObject::trace(JSTracer *trc, JSObject *obj)
{
ArgumentsObject &argsobj = obj->asArguments();
ArgumentsData *data = argsobj.data();
MarkValue(trc, &data->callee, js_callee_str);
MarkValueRange(trc, argsobj.initialLength(), data->slots, js_arguments_str);
/*
* If a generator's arguments or call object escapes, and the generator
* frame is not executing, the generator object needs to be marked because
* it is not otherwise reachable. An executing generator is rooted by its
* invocation. To distinguish the two cases (which imply different access
* paths to the generator object), we use the JSFRAME_FLOATING_GENERATOR
* flag, which is only set on the StackFrame kept in the generator object's
* JSGenerator.
*/
#if JS_HAS_GENERATORS
StackFrame *fp = argsobj.maybeStackFrame();
if (fp && fp->isFloatingGenerator())
MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
#endif
MarkValueRange(trc, data->numArgs, data->args, js_arguments_str);
MarkScriptUnbarriered(trc, &data->script, "script");
}
/*
@ -521,12 +472,12 @@ Class js::NormalArgumentsObjectClass = {
args_enumerate,
reinterpret_cast<JSResolveOp>(args_resolve),
JS_ConvertStub,
args_finalize, /* finalize */
ArgumentsObject::finalize,
NULL, /* checkAccess */
NULL, /* call */
NULL, /* construct */
NULL, /* hasInstance */
args_trace,
ArgumentsObject::trace,
{
NULL, /* equality */
NULL, /* outerObject */
@ -555,12 +506,12 @@ Class js::StrictArgumentsObjectClass = {
strictargs_enumerate,
reinterpret_cast<JSResolveOp>(strictargs_resolve),
JS_ConvertStub,
args_finalize, /* finalize */
ArgumentsObject::finalize,
NULL, /* checkAccess */
NULL, /* call */
NULL, /* construct */
NULL, /* hasInstance */
args_trace,
ArgumentsObject::trace,
{
NULL, /* equality */
NULL, /* outerObject */

Просмотреть файл

@ -16,17 +16,25 @@ namespace js {
* ArgumentsData stores the initial indexed arguments provided to the
* corresponding and that function itself. It is used to store arguments[i]
* and arguments.callee -- up until the corresponding property is modified,
* when the relevant value is overwritten with MagicValue(JS_ARGS_HOLE) to
* memorialize the modification.
* when the relevant value is flagged to memorialize the modification.
*/
struct ArgumentsData
{
/*
* arguments.callee, or MagicValue(JS_ARGS_HOLE) if arguments.callee has
* been modified.
* numArgs = Max(numFormalArgs, numActualArgs)
* The array 'args' has numArgs elements.
*/
unsigned numArgs;
/*
* arguments.callee, or MagicValue(JS_OVERWRITTEN_CALLEE) if
* arguments.callee has been modified.
*/
HeapValue callee;
/* The script for the function containing this arguments object. */
JSScript *script;
/*
* Pointer to an array of bits indicating, for every argument in 'slots',
* whether the element has been deleted. See isElementDeleted comment.
@ -34,17 +42,25 @@ struct ArgumentsData
size_t *deletedBits;
/*
* Values of the arguments for this object, or MagicValue(JS_ARGS_HOLE) if
* the indexed argument has been modified.
* This array holds either the current argument value or the magic value
* JS_FORWARD_TO_CALL_OBJECT. The latter means that the function has both a
* CallObject and an ArgumentsObject AND the particular formal variable is
* aliased by the CallObject. In such cases, the CallObject holds the
* canonical value so any element access to the arguments object should
* load the value out of the CallObject (which is pointed to by
* MAYBE_CALL_SLOT).
*/
HeapValue slots[1];
HeapValue args[1];
/* For jit use: */
static ptrdiff_t offsetOfArgs() { return offsetof(ArgumentsData, args); }
};
/*
* ArgumentsObject instances represent |arguments| objects created to store
* function arguments when a function is called. It's expensive to create such
* objects if they're never used, so they're only created lazily. (See
* js::StackFrame::setArgsObj and friends.)
* objects if they're never used, so they're only created when they are
* potentially used.
*
* Arguments objects are complicated because, for non-strict mode code, they
* must alias any named arguments which were provided to the function. Gnarly
@ -75,43 +91,27 @@ struct ArgumentsData
* been modified, then the current value of arguments.length is stored in
* another slot associated with a new property.
* DATA_SLOT
* Stores an ArgumentsData* storing argument values and the callee, or
* sentinels for any of these if the corresponding property is modified.
* Use callee() to access the callee/sentinel, and use
* element/addressOfElement/setElement to access the values stored in
* the ArgumentsData. If you're simply looking to get arguments[i],
* however, use getElement or getElements to avoid spreading arguments
* object implementation details around too much.
* STACK_FRAME_SLOT
* Stores the function's stack frame for non-strict arguments objects until
* the function returns, when it is replaced with null. When an arguments
* object is created on-trace its private is JS_ARGUMENTS_OBJECT_ON_TRACE,
* and when the trace exits its private is replaced with the stack frame or
* null, as appropriate. This slot is used by strict arguments objects as
* well, but the slot is always null. Conceptually it would be better to
* remove this oddity, but preserving it allows us to work with arguments
* objects of either kind more abstractly, so we keep it for now.
* Stores an ArgumentsData*, described above.
*/
class ArgumentsObject : public JSObject
{
protected:
static const uint32_t INITIAL_LENGTH_SLOT = 0;
static const uint32_t DATA_SLOT = 1;
static const uint32_t STACK_FRAME_SLOT = 2;
static const uint32_t MAYBE_CALL_SLOT = 2;
/* Lower-order bit stolen from the length slot. */
static const uint32_t LENGTH_OVERRIDDEN_BIT = 0x1;
static const uint32_t PACKED_BITS_COUNT = 1;
void initInitialLength(uint32_t length);
void initData(ArgumentsData *data);
static ArgumentsObject *create(JSContext *cx, uint32_t argc, HandleObject callee);
static ArgumentsObject *create(JSContext *cx, StackFrame *fp);
inline ArgumentsData *data() const;
public:
static const uint32_t RESERVED_SLOTS = 3;
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
/* Create an arguments object for a frame that is expecting them. */
static ArgumentsObject *create(JSContext *cx, StackFrame *fp);
static ArgumentsObject *createExpected(JSContext *cx, StackFrame *fp);
/*
* Purposefully disconnect the returned arguments object from the frame
@ -127,33 +127,13 @@ class ArgumentsObject : public JSObject
*/
inline uint32_t initialLength() const;
/* The script for the function containing this arguments object. */
JSScript *containingScript() const;
/* True iff arguments.length has been assigned or its attributes changed. */
inline bool hasOverriddenLength() const;
inline void markLengthOverridden();
/*
* Attempt to speedily and efficiently access the i-th element of this
* arguments object. Return true if the element was speedily returned.
* Return false if the element must be looked up more slowly using
* getProperty or some similar method.
*
* NB: Returning false does not indicate error!
*/
inline bool getElement(uint32_t i, js::Value *vp);
/*
* Attempt to speedily and efficiently get elements [start, start + count)
* of this arguments object into the locations starting at |vp|. Return
* true if all elements were copied. Return false if the elements must be
* gotten more slowly, perhaps using a getProperty or some similar method
* in a loop.
*
* NB: Returning false does not indicate error!
*/
inline bool getElements(uint32_t start, uint32_t count, js::Value *vp);
inline js::ArgumentsData *data() const;
/*
* Because the arguments object is a real object, its elements may be
* deleted. This is implemented by setting a 'deleted' flag for the arg
@ -172,18 +152,51 @@ class ArgumentsObject : public JSObject
inline bool isAnyElementDeleted() const;
inline void markElementDeleted(uint32_t i);
inline const js::Value &element(uint32_t i) const;
inline void setElement(uint32_t i, const js::Value &v);
/*
* An ArgumentsObject serves two roles:
* - a real object, accessed through regular object operations, e.g..,
* JSObject::getElement corresponding to 'arguments[i]';
* - a VM-internal data structure, storing the value of arguments (formal
* and actual) that are accessed directly by the VM when a reading the
* value of a formal parameter.
* There are two ways to access the ArgumentsData::args corresponding to
* these two use cases:
* - object access should use elements(i) which will take care of
* forwarding when the value is JS_FORWARD_TO_CALL_OBJECT;
* - VM argument access should use arg(i) which will assert that the
* value is not JS_FORWARD_TO_CALL_OBJECT (since, if such forwarding was
* needed, the frontend should have emitted JSOP_GETALIASEDVAR.
*/
inline const Value &element(uint32_t i) const;
inline void setElement(uint32_t i, const Value &v);
inline const Value &arg(unsigned i) const;
inline void setArg(unsigned i, const Value &v);
/* The stack frame for this ArgumentsObject, if the frame is still active. */
inline js::StackFrame *maybeStackFrame() const;
inline void setStackFrame(js::StackFrame *frame);
/*
* Attempt to speedily and efficiently access the i-th element of this
* arguments object. Return true if the element was speedily returned.
* Return false if the element must be looked up more slowly using
* getProperty or some similar method. The second overload copies the
* elements [start, start + count) into the locations starting at 'vp'.
*
* NB: Returning false does not indicate error!
*/
inline bool maybeGetElement(uint32_t i, Value *vp);
inline bool maybeGetElements(uint32_t start, uint32_t count, js::Value *vp);
/*
* Measures things hanging off this ArgumentsObject that are counted by the
* |miscSize| argument in JSObject::sizeOfExcludingThis().
*/
inline size_t sizeOfMisc(JSMallocSizeOfFun mallocSizeOf) const;
static void finalize(FreeOp *fop, JSObject *obj);
static void trace(JSTracer *trc, JSObject *obj);
/* For jit use: */
static size_t getDataSlotOffset() {
return getFixedSlotOffset(DATA_SLOT);
}
};
class NormalArgumentsObject : public ArgumentsObject

Просмотреть файл

@ -3137,10 +3137,16 @@ DebuggerArguments_getArg(JSContext *cx, unsigned argc, Value *vp)
*/
JS_ASSERT(i >= 0);
Value arg;
if (unsigned(i) < fp->numActualArgs())
arg = fp->canonicalActualArg(i);
else
if (unsigned(i) < fp->numActualArgs()) {
if (unsigned(i) < fp->numFormalArgs() && fp->script()->formalLivesInCallObject(i))
arg = fp->callObj().arg(i);
else if (fp->script()->argsObjAliasesFormals())
arg = fp->argsObj().arg(i);
else
arg = fp->unaliasedActual(i);
} else {
arg.setUndefined();
}
if (!Debugger::fromChildJSObject(thisobj)->wrapDebuggeeValue(cx, &arg))
return false;
@ -3370,6 +3376,7 @@ js::EvaluateInEnv(JSContext *cx, Handle<Env*> env, StackFrame *fp, const jschar
if (!script)
return false;
script->isActiveEval = true;
return ExecuteKernel(cx, script, *env, fp->thisValue(), EXECUTE_DEBUG, fp, rval);
}

Просмотреть файл

@ -14,8 +14,7 @@ namespace js {
inline
ScopeCoordinate::ScopeCoordinate(jsbytecode *pc)
: hops(GET_UINT16(pc)), binding(GET_UINT16(pc + 2)),
frameBinding(GET_UINT16(pc + 8))
: hops(GET_UINT16(pc)), slot(GET_UINT16(pc + 2))
{
JS_ASSERT(JOF_OPTYPE(*pc) == JOF_SCOPECOORD);
}
@ -36,69 +35,22 @@ ScopeObject::setEnclosingScope(JSContext *cx, HandleObject obj)
return true;
}
inline StackFrame *
ScopeObject::maybeStackFrame() const
{
JS_ASSERT(!isStaticBlock() && !isWith());
return reinterpret_cast<StackFrame *>(JSObject::getPrivate());
}
inline void
ScopeObject::setStackFrame(StackFrame *frame)
{
return setPrivate(frame);
}
inline const Value &
ScopeObject::aliasedVar(ScopeCoordinate sc)
{
/* XXX: all this is temporary until the last patch of 659577 */
StackFrame *fp = maybeStackFrame();
Bindings &bindings = fp->script()->bindings;
if (isCall()) {
JS_ASSERT(sc.binding == sc.frameBinding);
if (bindings.bindingIsArg(sc.binding)) {
unsigned arg = bindings.bindingToArg(sc.binding);
JS_ASSERT(fp->script()->formalLivesInCallObject(arg));
return fp->formalArg(arg);
}
unsigned var = bindings.bindingToLocal(sc.binding);
JS_ASSERT(fp->script()->varIsAliased(var));
return fp->localSlot(var);
}
unsigned var = bindings.bindingToLocal(sc.frameBinding);
fp = js_LiveFrameIfGenerator(fp);
JS_ASSERT(var == sc.binding + asClonedBlock().staticBlock().stackDepth() + fp->numFixed());
JS_ASSERT(asClonedBlock().staticBlock().isAliased(sc.binding));
return fp->localSlot(var);
JS_ASSERT(isCall() || isClonedBlock());
JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == CallObject::RESERVED_SLOTS);
JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == BlockObject::RESERVED_SLOTS);
return getSlot(CALL_BLOCK_RESERVED_SLOTS + sc.slot);
}
inline void
ScopeObject::setAliasedVar(ScopeCoordinate sc, const Value &v)
{
/* XXX: all this is temporary until the last patch of 659577 */
StackFrame *fp = maybeStackFrame();
Bindings &bindings = fp->script()->bindings;
if (isCall()) {
JS_ASSERT(sc.binding == sc.frameBinding);
if (bindings.bindingIsArg(sc.binding)) {
unsigned arg = bindings.bindingToArg(sc.binding);
JS_ASSERT(fp->script()->formalLivesInCallObject(arg));
fp->formalArg(arg) = v;
} else {
unsigned var = bindings.bindingToLocal(sc.binding);
JS_ASSERT(fp->script()->varIsAliased(var));
fp->localSlot(var) = v;
}
} else {
unsigned var = bindings.bindingToLocal(sc.frameBinding);
fp = js_LiveFrameIfGenerator(fp);
JS_ASSERT(var == sc.binding + asClonedBlock().staticBlock().stackDepth() + fp->numFixed());
JS_ASSERT(asClonedBlock().staticBlock().isAliased(sc.binding));
fp->localSlot(var) = v;
}
JS_ASSERT(isCall() || isClonedBlock());
JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == CallObject::RESERVED_SLOTS);
JS_STATIC_ASSERT(CALL_BLOCK_RESERVED_SLOTS == BlockObject::RESERVED_SLOTS);
setSlot(CALL_BLOCK_RESERVED_SLOTS + sc.slot, v);
}
/*static*/ inline size_t
@ -136,61 +88,35 @@ CallObject::getCalleeFunction() const
}
inline const Value &
CallObject::arg(unsigned i) const
CallObject::arg(unsigned i, MaybeCheckAliasing checkAliasing) const
{
JS_ASSERT(i < getCalleeFunction()->nargs);
JS_ASSERT_IF(checkAliasing, getCalleeFunction()->script()->formalLivesInCallObject(i));
return getSlot(RESERVED_SLOTS + i);
}
inline void
CallObject::setArg(unsigned i, const Value &v)
CallObject::setArg(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
{
JS_ASSERT(i < getCalleeFunction()->nargs);
JS_ASSERT_IF(checkAliasing, getCalleeFunction()->script()->formalLivesInCallObject(i));
setSlot(RESERVED_SLOTS + i, v);
}
inline void
CallObject::initArgUnchecked(unsigned i, const Value &v)
{
JS_ASSERT(i < getCalleeFunction()->nargs);
initSlotUnchecked(RESERVED_SLOTS + i, v);
}
inline const Value &
CallObject::var(unsigned i) const
CallObject::var(unsigned i, MaybeCheckAliasing checkAliasing) const
{
JSFunction *fun = getCalleeFunction();
JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs());
JS_ASSERT(i < fun->script()->bindings.numVars());
JS_ASSERT_IF(checkAliasing, fun->script()->varIsAliased(i));
return getSlot(RESERVED_SLOTS + fun->nargs + i);
}
inline void
CallObject::setVar(unsigned i, const Value &v)
CallObject::setVar(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
{
JSFunction *fun = getCalleeFunction();
JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs());
JS_ASSERT(i < fun->script()->bindings.numVars());
JS_ASSERT_IF(checkAliasing, fun->script()->varIsAliased(i));
setSlot(RESERVED_SLOTS + fun->nargs + i, v);
}
inline void
CallObject::initVarUnchecked(unsigned i, const Value &v)
{
JSFunction *fun = getCalleeFunction();
JS_ASSERT(fun->nargs == fun->script()->bindings.numArgs());
JS_ASSERT(i < fun->script()->bindings.numVars());
initSlotUnchecked(RESERVED_SLOTS + fun->nargs + i, v);
}
inline void
CallObject::copyValues(unsigned nargs, Value *argv, unsigned nvars, Value *slots)
{
JS_ASSERT(slotInRange(RESERVED_SLOTS + nargs + nvars, SENTINEL_ALLOWED));
copySlotRange(RESERVED_SLOTS, argv, nargs);
copySlotRange(RESERVED_SLOTS + nargs, slots, nvars);
}
inline HeapSlotArray
CallObject::argArray()
{
@ -232,6 +158,13 @@ BlockObject::slotCount() const
return propertyCount();
}
inline unsigned
BlockObject::slotToFrameLocal(JSScript *script, unsigned i)
{
JS_ASSERT(i < slotCount());
return script->nfixed + stackDepth() + i;
}
inline const Value &
BlockObject::slotValue(unsigned i)
{
@ -283,9 +216,12 @@ StaticBlockObject::maybeDefinitionParseNode(unsigned i)
inline void
StaticBlockObject::setAliased(unsigned i, bool aliased)
{
JS_ASSERT_IF(i > 0, slotValue(i-1).isBoolean());
setSlotValue(i, BooleanValue(aliased));
if (aliased)
JSObject::setPrivate(reinterpret_cast<void *>(1));
if (aliased && !needsClone()) {
setSlotValue(0, MagicValue(JS_BLOCK_NEEDS_CLONE));
JS_ASSERT(needsClone());
}
}
inline bool
@ -295,9 +231,9 @@ StaticBlockObject::isAliased(unsigned i)
}
inline bool
StaticBlockObject::needsClone() const
StaticBlockObject::needsClone()
{
return JSObject::getPrivate() != NULL;
return !slotValue(0).isFalse();
}
inline bool
@ -313,15 +249,16 @@ ClonedBlockObject::staticBlock() const
}
inline const Value &
ClonedBlockObject::var(unsigned i)
ClonedBlockObject::var(unsigned i, MaybeCheckAliasing checkAliasing)
{
JS_ASSERT(!maybeStackFrame());
JS_ASSERT_IF(checkAliasing, staticBlock().isAliased(i));
return slotValue(i);
}
inline void
ClonedBlockObject::setVar(unsigned i, const Value &v)
ClonedBlockObject::setVar(unsigned i, const Value &v, MaybeCheckAliasing checkAliasing)
{
JS_ASSERT_IF(checkAliasing, staticBlock().isAliased(i));
setSlotValue(i, v);
}

Просмотреть файл

@ -45,115 +45,24 @@ js::ScopeCoordinateBlockChain(JSScript *script, jsbytecode *pc)
}
PropertyName *
js::ScopeCoordinateName(JSScript *script, jsbytecode *pc)
js::ScopeCoordinateName(JSRuntime *rt, JSScript *script, jsbytecode *pc)
{
StaticBlockObject *maybeBlock = ScopeCoordinateBlockChain(script, pc);
ScopeCoordinate sc(pc);
uint32_t targetSlot;
Shape *shape;
if (maybeBlock) {
targetSlot = BlockObject::RESERVED_SLOTS + sc.binding;
shape = maybeBlock->lastProperty();
} else {
targetSlot = CallObject::RESERVED_SLOTS + sc.binding;
shape = script->bindings.lastShape();
}
uint32_t targetSlot = ScopeObject::CALL_BLOCK_RESERVED_SLOTS + sc.slot;
Shape *shape = maybeBlock ? maybeBlock->lastProperty() : script->bindings.lastShape();
Shape::Range r = shape->all();
while (r.front().slot() != targetSlot)
r.popFront();
return JSID_TO_ATOM(r.front().propid())->asPropertyName();
jsid id = r.front().propid();
/* Beware nameless destructuring formal. */
if (!JSID_IS_ATOM(id))
return rt->atomState.emptyAtom;
return JSID_TO_ATOM(id)->asPropertyName();
}
/*****************************************************************************/
void
js_PutCallObject(StackFrame *fp, CallObject &callobj)
{
JS_ASSERT(callobj.maybeStackFrame() == fp);
JS_ASSERT_IF(fp->isEvalFrame(), fp->isStrictEvalFrame());
JS_ASSERT(fp->isEvalFrame() == callobj.isForEval());
JSScript *script = fp->script();
Bindings &bindings = script->bindings;
if (callobj.isForEval()) {
JS_ASSERT(script->strictModeCode);
JS_ASSERT(bindings.numArgs() == 0);
/* This could be optimized as below, but keep it simple for now. */
callobj.copyValues(0, NULL, bindings.numVars(), fp->slots());
} else {
JSFunction *fun = fp->fun();
JS_ASSERT(script == callobj.getCalleeFunction()->script());
JS_ASSERT(script == fun->script());
unsigned n = bindings.count();
if (n > 0) {
uint32_t nvars = bindings.numVars();
uint32_t nargs = bindings.numArgs();
JS_ASSERT(fun->nargs == nargs);
JS_ASSERT(nvars + nargs == n);
JSScript *script = fun->script();
if (script->bindingsAccessedDynamically
#ifdef JS_METHODJIT
|| script->debugMode
#endif
) {
callobj.copyValues(nargs, fp->formalArgs(), nvars, fp->slots());
} else {
/*
* For each arg & var that is closed over, copy it from the stack
* into the call object. We use initArg/VarUnchecked because,
* when you call a getter on a call object, js_NativeGetInline
* caches the return value in the slot, so we can't assert that
* it's undefined.
*/
uint32_t nclosed = script->numClosedArgs();
for (uint32_t i = 0; i < nclosed; i++) {
uint32_t e = script->getClosedArg(i);
#ifdef JS_GC_ZEAL
callobj.setArg(e, fp->formalArg(e));
#else
callobj.initArgUnchecked(e, fp->formalArg(e));
#endif
}
nclosed = script->numClosedVars();
for (uint32_t i = 0; i < nclosed; i++) {
uint32_t e = script->getClosedVar(i);
#ifdef JS_GC_ZEAL
callobj.setVar(e, fp->slots()[e]);
#else
callobj.initVarUnchecked(e, fp->slots()[e]);
#endif
}
}
/*
* Update the args and vars for the active call if this is an outer
* function in a script nesting.
*/
types::TypeScriptNesting *nesting = script->nesting();
if (nesting && script->isOuterFunction) {
nesting->argArray = callobj.argArray();
nesting->varArray = callobj.varArray();
}
}
/* Clear private pointers to fp, which is about to go away. */
if (js_IsNamedLambda(fun)) {
JSObject &env = callobj.enclosingScope();
JS_ASSERT(env.asDeclEnv().maybeStackFrame() == fp);
env.setPrivate(NULL);
}
}
callobj.setStackFrame(NULL);
}
/*
* Construct a call object for the given bindings. If this is a call object
* for a function invocation, callee should be the function being called.
@ -161,14 +70,18 @@ js_PutCallObject(StackFrame *fp, CallObject &callobj)
* must be null.
*/
CallObject *
CallObject::create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleObject callee)
CallObject::create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleFunction callee)
{
RootedShape shape(cx);
shape = script->bindings.callObjectShape(cx);
if (shape == NULL)
return NULL;
gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots() + 1);
gc::AllocKind kind = gc::GetGCObjectKind(shape->numFixedSlots());
#ifdef JS_THREADSAFE
JS_ASSERT(CanBeFinalizedInBackground(kind, &CallClass));
kind = gc::GetBackgroundAllocKind(kind);
#endif
RootedTypeObject type(cx);
type = cx->compartment->getEmptyType(cx);
@ -194,21 +107,9 @@ CallObject::create(JSContext *cx, JSScript *script, HandleObject enclosing, Hand
return NULL;
}
#ifdef DEBUG
JS_ASSERT(!obj->inDictionaryMode());
for (Shape::Range r = obj->lastProperty(); !r.empty(); r.popFront()) {
const Shape &s = r.front();
if (s.hasSlot()) {
JS_ASSERT(s.slot() + 1 == obj->slotSpan());
break;
}
}
#endif
if (!obj->asScope().setEnclosingScope(cx, enclosing))
return NULL;
JS_ASSERT_IF(callee, callee->isFunction());
obj->initFixedSlot(CALLEE_SLOT, ObjectOrNullValue(callee));
/*
@ -227,7 +128,6 @@ CallObject *
CallObject::createForFunction(JSContext *cx, StackFrame *fp)
{
JS_ASSERT(fp->isNonEvalFunctionFrame());
JS_ASSERT(!fp->hasCallObj());
RootedObject scopeChain(cx, fp->scopeChain());
@ -241,11 +141,24 @@ CallObject::createForFunction(JSContext *cx, StackFrame *fp)
return NULL;
}
CallObject *callobj = create(cx, fp->script(), scopeChain, RootedObject(cx, &fp->callee()));
JSScript *script = fp->script();
CallObject *callobj = create(cx, script, scopeChain, RootedFunction(cx, &fp->callee()));
if (!callobj)
return NULL;
callobj->setStackFrame(fp);
/* Copy in the closed-over formal arguments. */
if (script->bindingsAccessedDynamically) {
Value *formals = fp->formals();
for (unsigned slot = 0, n = fp->fun()->nargs; slot < n; ++slot)
callobj->setArg(slot, formals[slot]);
} else if (unsigned n = script->numClosedArgs()) {
Value *formals = fp->formals();
for (unsigned i = 0; i < n; ++i) {
uint32_t slot = script->getClosedArg(i);
callobj->setArg(slot, formals[slot]);
}
}
return callobj;
}
@ -261,45 +174,29 @@ CallObject::copyUnaliasedValues(StackFrame *fp)
/* Copy the unaliased formals. */
for (unsigned i = 0; i < script->bindings.numArgs(); ++i) {
if (!script->formalLivesInCallObject(i))
setArg(i, fp->formalArg(i));
if (!script->formalLivesInCallObject(i)) {
if (script->argsObjAliasesFormals())
setArg(i, fp->argsObj().arg(i), DONT_CHECK_ALIASING);
else
setArg(i, fp->unaliasedFormal(i), DONT_CHECK_ALIASING);
}
}
/* Copy the unaliased var/let bindings. */
for (unsigned i = 0; i < script->bindings.numVars(); ++i) {
if (!script->varIsAliased(i))
setVar(i, fp->localSlot(i));
setVar(i, fp->unaliasedLocal(i), DONT_CHECK_ALIASING);
}
}
CallObject *
CallObject::createForStrictEval(JSContext *cx, StackFrame *fp)
{
CallObject *callobj = create(cx, fp->script(), fp->scopeChain(), RootedObject(cx));
if (!callobj)
return NULL;
JS_ASSERT(fp->isStrictEvalFrame());
JS_ASSERT(cx->fp() == fp);
JS_ASSERT(cx->regs().pc == fp->script()->code);
callobj->setStackFrame(fp);
fp->initScopeChain(*callobj);
return callobj;
}
JSBool
CallObject::getArgOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
{
CallObject &callobj = obj->asCall();
JS_ASSERT((int16_t) JSID_TO_INT(id) == JSID_TO_INT(id));
unsigned i = (uint16_t) JSID_TO_INT(id);
DebugOnly<JSScript *> script = callobj.getCalleeFunction()->script();
JS_ASSERT(script->formalLivesInCallObject(i));
if (StackFrame *fp = callobj.maybeStackFrame())
*vp = fp->formalArg(i);
else
*vp = callobj.arg(i);
return true;
return create(cx, fp->script(), fp->scopeChain(), RootedFunction(cx));
}
JSBool
@ -313,36 +210,12 @@ CallObject::setArgOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict
JSScript *script = callobj.getCalleeFunction()->script();
JS_ASSERT(script->formalLivesInCallObject(i));
if (StackFrame *fp = callobj.maybeStackFrame())
fp->formalArg(i) = *vp;
else
callobj.setArg(i, *vp);
callobj.setArg(i, *vp);
if (!script->ensureHasTypes(cx))
return false;
TypeScript::SetArgument(cx, script, i, *vp);
return true;
}
JSBool
CallObject::getVarOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
{
CallObject &callobj = obj->asCall();
JS_ASSERT((int16_t) JSID_TO_INT(id) == JSID_TO_INT(id));
unsigned i = (uint16_t) JSID_TO_INT(id);
DebugOnly<JSScript *> script = callobj.getCalleeFunction()->script();
JS_ASSERT(script->varIsAliased(i));
if (StackFrame *fp = callobj.maybeStackFrame())
*vp = fp->varSlot(i);
else
*vp = callobj.var(i);
JS_ASSERT(!vp->isMagic(JS_OPTIMIZED_ARGUMENTS));
return true;
}
@ -357,10 +230,7 @@ CallObject::setVarOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict
JSScript *script = callobj.getCalleeFunction()->script();
JS_ASSERT(script->varIsAliased(i));
if (StackFrame *fp = callobj.maybeStackFrame())
fp->varSlot(i) = *vp;
else
callobj.setVar(i, *vp);
callobj.setVar(i, *vp);
if (!script->ensureHasTypes(cx))
return false;
@ -369,52 +239,16 @@ CallObject::setVarOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict
return true;
}
bool
CallObject::containsVarOrArg(PropertyName *name, Value *vp, JSContext *cx)
{
jsid id = NameToId(name);
const Shape *shape = nativeLookup(cx, id);
if (!shape)
return false;
PropertyOp op = shape->getterOp();
if (op != getVarOp && op != getArgOp)
return false;
JS_ALWAYS_TRUE(op(cx, RootedObject(cx, this), RootedId(cx, INT_TO_JSID(shape->shortid())), vp));
return true;
}
static void
call_trace(JSTracer *trc, JSObject *obj)
{
JS_ASSERT(obj->isCall());
/* Mark any generator frame, as for arguments objects. */
#if JS_HAS_GENERATORS
StackFrame *fp = (StackFrame *) obj->getPrivate();
if (fp && fp->isFloatingGenerator())
MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
#endif
}
JS_PUBLIC_DATA(Class) js::CallClass = {
"Call",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS | JSCLASS_IS_ANONYMOUS |
JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS),
JSCLASS_IS_ANONYMOUS | JSCLASS_HAS_RESERVED_SLOTS(CallObject::RESERVED_SLOTS),
JS_PropertyStub, /* addProperty */
JS_PropertyStub, /* delProperty */
JS_PropertyStub, /* getProperty */
JS_StrictPropertyStub, /* setProperty */
JS_EnumerateStub,
JS_ResolveStub,
NULL, /* convert: Leave it NULL so we notice if calls ever escape */
NULL, /* finalize */
NULL, /* checkAccess */
NULL, /* call */
NULL, /* construct */
NULL, /* hasInstance */
call_trace
NULL /* convert: Leave it NULL so we notice if calls ever escape */
};
Class js::DeclEnvClass = {
@ -449,7 +283,6 @@ DeclEnvObject::create(JSContext *cx, StackFrame *fp)
if (!obj)
return NULL;
obj->setPrivate(fp);
if (!obj->asScope().setEnclosingScope(cx, fp->scopeChain()))
return NULL;
@ -753,103 +586,35 @@ ClonedBlockObject::create(JSContext *cx, Handle<StaticBlockObject *> block, Stac
obj->setReservedSlot(SCOPE_CHAIN_SLOT, ObjectValue(*fp->scopeChain()));
obj->setReservedSlot(DEPTH_SLOT, PrivateUint32Value(block->stackDepth()));
obj->setPrivate(js_FloatingFrameIfGenerator(cx, fp));
if (obj->lastProperty()->extensibleParents() && !obj->generateOwnShape(cx))
return NULL;
/*
* Copy in the closed-over locals. Closed-over locals don't need
* any fixup since the initial value is 'undefined'.
*/
Value *src = fp->base() + block->stackDepth();
unsigned nslots = block->slotCount();
for (unsigned i = 0; i < nslots; ++i, ++src) {
if (block->isAliased(i))
obj->asClonedBlock().setVar(i, *src);
}
return &obj->asClonedBlock();
}
void
ClonedBlockObject::put(StackFrame *fp)
{
uint32_t count = slotCount();
uint32_t depth = stackDepth();
/* See comments in CheckDestructuring in frontend/Parser.cpp. */
JS_ASSERT(count >= 1);
copySlotRange(RESERVED_SLOTS, fp->base() + depth, count);
/* We must clear the private slot even with errors. */
setPrivate(NULL);
}
void
ClonedBlockObject::copyUnaliasedValues(StackFrame *fp)
{
StaticBlockObject &block = staticBlock();
unsigned base = fp->script()->nfixed + stackDepth();
unsigned base = block.slotToFrameLocal(fp->script(), 0);
for (unsigned i = 0; i < slotCount(); ++i) {
if (!block.isAliased(i))
setVar(i, fp->localSlot(base + i));
setVar(i, fp->unaliasedLocal(base + i), DONT_CHECK_ALIASING);
}
}
static JSBool
block_getProperty(JSContext *cx, HandleObject obj, HandleId id, Value *vp)
{
/*
* Block objects are never exposed to script, and the engine handles them
* with care. So unlike other getters, this one can assert (rather than
* check) certain invariants about obj.
*/
ClonedBlockObject &block = obj->asClonedBlock();
unsigned index = (unsigned) JSID_TO_INT(id);
JS_ASSERT_IF(!block.compartment()->debugMode(), block.staticBlock().isAliased(index));
if (StackFrame *fp = block.maybeStackFrame()) {
fp = js_LiveFrameIfGenerator(fp);
index += fp->numFixed() + block.stackDepth();
JS_ASSERT(index < fp->numSlots());
*vp = fp->slots()[index];
return true;
}
/* Values are in slots immediately following the class-reserved ones. */
JS_ASSERT(block.var(index) == *vp);
return true;
}
static JSBool
block_setProperty(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp)
{
ClonedBlockObject &block = obj->asClonedBlock();
unsigned index = (unsigned) JSID_TO_INT(id);
JS_ASSERT_IF(!block.compartment()->debugMode(), block.staticBlock().isAliased(index));
if (StackFrame *fp = block.maybeStackFrame()) {
fp = js_LiveFrameIfGenerator(fp);
index += fp->numFixed() + block.stackDepth();
JS_ASSERT(index < fp->numSlots());
fp->slots()[index] = *vp;
return true;
}
/*
* The value in *vp will be written back to the slot in obj that was
* allocated when this let binding was defined.
*/
return true;
}
bool
ClonedBlockObject::containsVar(PropertyName *name, Value *vp, JSContext *cx)
{
RootedObject self(cx, this);
const Shape *shape = nativeLookup(cx, NameToId(name));
if (!shape)
return false;
JS_ASSERT(shape->getterOp() == block_getProperty);
JS_ALWAYS_TRUE(block_getProperty(cx, self, RootedId(cx, INT_TO_JSID(shape->shortid())), vp));
return true;
}
StaticBlockObject *
StaticBlockObject::create(JSContext *cx)
{
@ -867,7 +632,6 @@ StaticBlockObject::create(JSContext *cx)
if (!obj)
return NULL;
obj->setPrivate(NULL);
return &obj->asStaticBlock();
}
@ -890,29 +654,15 @@ StaticBlockObject::addVar(JSContext *cx, jsid id, int index, bool *redeclared)
* block's shape later.
*/
uint32_t slot = JSSLOT_FREE(&BlockClass) + index;
return addPropertyInternal(cx, id, block_getProperty, block_setProperty,
return addPropertyInternal(cx, id, /* getter = */ NULL, /* setter = */ NULL,
slot, JSPROP_ENUMERATE | JSPROP_PERMANENT,
Shape::HAS_SHORTID, index, spp,
/* allowDictionary = */ false);
}
static void
block_trace(JSTracer *trc, JSObject *obj)
{
if (obj->isStaticBlock())
return;
/* XXX: this will be removed again with bug 659577. */
#if JS_HAS_GENERATORS
StackFrame *fp = obj->asClonedBlock().maybeStackFrame();
if (fp && fp->isFloatingGenerator())
MarkObject(trc, &js_FloatingFrameToGenerator(fp)->obj, "generator object");
#endif
}
Class js::BlockClass = {
"Block",
JSCLASS_HAS_PRIVATE | JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_IMPLEMENTS_BARRIERS |
JSCLASS_HAS_RESERVED_SLOTS(BlockObject::RESERVED_SLOTS) |
JSCLASS_IS_ANONYMOUS,
JS_PropertyStub, /* addProperty */
@ -921,13 +671,7 @@ Class js::BlockClass = {
JS_StrictPropertyStub, /* setProperty */
JS_EnumerateStub,
JS_ResolveStub,
JS_ConvertStub,
NULL, /* finalize */
NULL, /* checkAccess */
NULL, /* call */
NULL, /* construct */
NULL, /* hasInstance */
block_trace
JS_ConvertStub
};
#define NO_PARENT_INDEX UINT32_MAX
@ -1044,7 +788,7 @@ js::XDRStaticBlockObject(XDRState<mode> *xdr, JSScript *script, StaticBlockObjec
*/
for (unsigned i = 0; i < count; i++) {
const Shape *shape = shapes[i];
JS_ASSERT(shape->getter() == block_getProperty);
JS_ASSERT(shape->hasDefaultGetter());
JS_ASSERT(unsigned(shape->shortid()) == i);
jsid propid = shape->propid();
@ -1263,6 +1007,9 @@ ScopeIter::settle()
} else if (fp_->isNonEvalFunctionFrame() && !fp_->hasCallObj()) {
JS_ASSERT(cur_ == fp_->fun()->environment());
fp_ = NULL;
} else if (fp_->isStrictEvalFrame() && !fp_->hasCallObj()) {
JS_ASSERT(cur_ == fp_->prev()->scopeChain());
fp_ = NULL;
} else if (cur_->isWith()) {
JS_ASSERT_IF(fp_->isFunctionFrame(), fp_->fun()->isHeavyweight());
JS_ASSERT_IF(block_, block_->needsClone());
@ -1359,14 +1106,14 @@ class DebugScopeProxy : public BaseProxyHandler
if (maybefp) {
if (action == GET)
*vp = maybefp->varSlot(i);
*vp = maybefp->unaliasedVar(i);
else
maybefp->varSlot(i) = *vp;
maybefp->unaliasedVar(i) = *vp;
} else {
if (action == GET)
*vp = callobj.var(i);
*vp = callobj.var(i, DONT_CHECK_ALIASING);
else
callobj.setVar(i, *vp);
callobj.setVar(i, *vp, DONT_CHECK_ALIASING);
}
if (action == SET)
@ -1381,15 +1128,22 @@ class DebugScopeProxy : public BaseProxyHandler
return false;
if (maybefp) {
if (action == GET)
*vp = maybefp->formalArg(i);
else
maybefp->formalArg(i) = *vp;
if (script->argsObjAliasesFormals()) {
if (action == GET)
*vp = maybefp->argsObj().arg(i);
else
maybefp->argsObj().setArg(i, *vp);
} else {
if (action == GET)
*vp = maybefp->unaliasedFormal(i);
else
maybefp->unaliasedFormal(i) = *vp;
}
} else {
if (action == GET)
*vp = callobj.arg(i);
*vp = callobj.arg(i, DONT_CHECK_ALIASING);
else
callobj.setArg(i, *vp);
callobj.setArg(i, *vp, DONT_CHECK_ALIASING);
}
if (action == SET)
@ -1409,17 +1163,17 @@ class DebugScopeProxy : public BaseProxyHandler
if (maybefp) {
JSScript *script = maybefp->script();
unsigned local = i + script->nfixed + block.stackDepth();
unsigned local = block.slotToFrameLocal(maybefp->script(), i);
if (action == GET)
*vp = maybefp->localSlot(local);
*vp = maybefp->unaliasedLocal(local);
else
maybefp->localSlot(local) = *vp;
maybefp->unaliasedLocal(local) = *vp;
JS_ASSERT(analyze::LocalSlot(script, local) >= analyze::TotalSlots(script));
} else {
if (action == GET)
*vp = block.var(i);
*vp = block.var(i, DONT_CHECK_ALIASING);
else
block.setVar(i, *vp);
block.setVar(i, *vp, DONT_CHECK_ALIASING);
}
return true;
@ -1693,7 +1447,7 @@ DebugScopes::mark(JSTracer *trc)
}
void
DebugScopes::sweep()
DebugScopes::sweep(JSRuntime *rt)
{
/*
* Note: missingScopes points to debug scopes weakly not just so that debug
@ -1705,16 +1459,36 @@ DebugScopes::sweep()
e.removeFront();
}
/*
* Scopes can be finalized when a suspended generator becomes garbage or
* when a debugger-synthesized ScopeObject is no longer rooted by its
* DebugScopeObject.
*/
for (LiveScopeMap::Enum e(liveScopes); !e.empty(); e.popFront()) {
ScopeObject &scope = *e.front().key;
if (JS_IsAboutToBeFinalized(&scope)) {
JS_ASSERT(!scope.maybeStackFrame() || scope.maybeStackFrame()->isGeneratorFrame());
ScopeObject *scope = e.front().key;
StackFrame *fp = e.front().value;
/*
* Scopes can be finalized when a debugger-synthesized ScopeObject is
* no longer reachable via its DebugScopeObject.
*/
if (JS_IsAboutToBeFinalized(scope)) {
e.removeFront();
continue;
}
/*
* As explained in onGeneratorFrameChange, liveScopes includes
* suspended generator frames. Since a generator can be finalized while
* its scope is live, we must explicitly detect finalized generators.
* Since the scope is still live, we simulate the onPop* call by
* copying unaliased variables into the scope object.
*/
if (JSGenerator *gen = fp->maybeSuspendedGenerator(rt)) {
JS_ASSERT(gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN);
if (!IsMarked(&gen->obj)) {
if (scope->isCall())
scope->asCall().copyUnaliasedValues(fp);
else if (scope->isBlock())
scope->asClonedBlock().copyUnaliasedValues(fp);
e.removeFront();
continue;
}
}
}
}
@ -1791,14 +1565,11 @@ DebugScopes::addDebugScope(JSContext *cx, ScopeIter si, DebugScopeObject &debugS
void
DebugScopes::onPopCall(StackFrame *fp)
{
if (fp->isYielding())
return;
JS_ASSERT(!fp->isYielding());
if (fp->fun()->isHeavyweight()) {
/*
* When a frame finishes executing in mjit code, the epilogue is called
* once from the return and once when the frame is popped.
* TODO: bug 659577 will remove this (with HAS_CALL_OBJ).
* The StackFrame may be observed before the prologue has created the
* CallObject. See ScopeIter::settle.
*/
if (fp->hasCallObj()) {
CallObject &callobj = fp->scopeChain()->asCall();
@ -1806,7 +1577,6 @@ DebugScopes::onPopCall(StackFrame *fp)
liveScopes.remove(&callobj);
}
} else {
JS_ASSERT(!fp->hasCallObj());
if (MissingScopeMap::Ptr p = missingScopes.lookup(ScopeIter(fp))) {
CallObject &callobj = p->value->scope().asCall();
callobj.copyUnaliasedValues(fp);
@ -1825,8 +1595,6 @@ DebugScopes::onPopBlock(JSContext *cx, StackFrame *fp)
clone.copyUnaliasedValues(fp);
liveScopes.remove(&clone);
} else {
JS_ASSERT(!fp->scopeChain()->isBlock() ||
fp->scopeChain()->asClonedBlock().staticBlock() != staticBlock);
if (MissingScopeMap::Ptr p = missingScopes.lookup(ScopeIter(fp))) {
ClonedBlockObject &clone = p->value->scope().asClonedBlock();
clone.copyUnaliasedValues(fp);
@ -1845,7 +1613,12 @@ DebugScopes::onPopWith(StackFrame *fp)
void
DebugScopes::onPopStrictEvalScope(StackFrame *fp)
{
liveScopes.remove(&fp->scopeChain()->asCall());
/*
* The StackFrame may be observed before the prologue has created the
* CallObject. See ScopeIter::settle.
*/
if (fp->hasCallObj())
liveScopes.remove(&fp->scopeChain()->asCall());
}
void
@ -1993,13 +1766,11 @@ GetDebugScopeForMissing(JSContext *cx, ScopeIter si)
if (callobj->enclosingScope().isDeclEnv()) {
JS_ASSERT(CallObjectLambdaName(callobj->getCalleeFunction()));
DeclEnvObject &declenv = callobj->enclosingScope().asDeclEnv();
declenv.setStackFrame(NULL);
enclosingDebug = DebugScopeObject::create(cx, declenv, *enclosingDebug);
if (!enclosingDebug)
return NULL;
}
callobj->setStackFrame(NULL);
debugScope = DebugScopeObject::create(cx, *callobj, *enclosingDebug);
break;
}
@ -2009,7 +1780,6 @@ GetDebugScopeForMissing(JSContext *cx, ScopeIter si)
if (!block)
return NULL;
block->setStackFrame(NULL);
debugScope = DebugScopeObject::create(cx, *block, *enclosingDebug);
break;
}

Просмотреть файл

@ -9,7 +9,6 @@
#define ScopeObject_h___
#include "jscntxt.h"
#include "jsiter.h"
#include "jsobj.h"
#include "jsweakmap.h"
@ -29,10 +28,7 @@ namespace js {
struct ScopeCoordinate
{
uint16_t hops;
uint16_t binding;
/* XXX this will be removed with the last patch of bug 659577. */
uint16_t frameBinding;
uint16_t slot;
inline ScopeCoordinate(jsbytecode *pc);
inline ScopeCoordinate() {}
@ -44,7 +40,7 @@ ScopeCoordinateBlockChain(JSScript *script, jsbytecode *pc);
/* Return the name being accessed by the given ALIASEDVAR op. */
extern PropertyName *
ScopeCoordinateName(JSScript *script, jsbytecode *pc);
ScopeCoordinateName(JSRuntime *rt, JSScript *script, jsbytecode *pc);
/*****************************************************************************/
@ -88,13 +84,13 @@ ScopeCoordinateName(JSScript *script, jsbytecode *pc);
class ScopeObject : public JSObject
{
/* Use maybeStackFrame() instead. */
void *getPrivate() const;
protected:
static const uint32_t SCOPE_CHAIN_SLOT = 0;
public:
/* Number of reserved slots for both CallObject and BlockObject. */
static const uint32_t CALL_BLOCK_RESERVED_SLOTS = 2;
/*
* Since every scope chain terminates with a global object and GlobalObject
* does not derive ScopeObject (it has a completely different layout), the
@ -112,14 +108,6 @@ class ScopeObject : public JSObject
inline const Value &aliasedVar(ScopeCoordinate sc);
inline void setAliasedVar(ScopeCoordinate sc, const Value &v);
/*
* The stack frame for this scope object, if the frame is still active.
* Note: these members may not be called for a StaticBlockObject or
* WithObject.
*/
inline StackFrame *maybeStackFrame() const;
inline void setStackFrame(StackFrame *frame);
/* For jit access. */
static inline size_t offsetOfEnclosingScope();
};
@ -129,10 +117,10 @@ class CallObject : public ScopeObject
static const uint32_t CALLEE_SLOT = 1;
static CallObject *
create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleObject callee);
create(JSContext *cx, JSScript *script, HandleObject enclosing, HandleFunction callee);
public:
static const uint32_t RESERVED_SLOTS = 3;
static const uint32_t RESERVED_SLOTS = CALL_BLOCK_RESERVED_SLOTS;
static CallObject *createForFunction(JSContext *cx, StackFrame *fp);
static CallObject *createForStrictEval(JSContext *cx, StackFrame *fp);
@ -149,14 +137,12 @@ class CallObject : public ScopeObject
inline void setCallee(JSObject *callee);
/* Returns the formal argument at the given index. */
inline const Value &arg(unsigned i) const;
inline void setArg(unsigned i, const Value &v);
inline void initArgUnchecked(unsigned i, const Value &v);
inline const Value &arg(unsigned i, MaybeCheckAliasing = CHECK_ALIASING) const;
inline void setArg(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
/* Returns the variable at the given index. */
inline const Value &var(unsigned i) const;
inline void setVar(unsigned i, const Value &v);
inline void initVarUnchecked(unsigned i, const Value &v);
inline const Value &var(unsigned i, MaybeCheckAliasing = CHECK_ALIASING) const;
inline void setVar(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
/*
* Get the actual arrays of arguments and variables. Only call if type
@ -166,16 +152,9 @@ class CallObject : public ScopeObject
inline HeapSlotArray argArray();
inline HeapSlotArray varArray();
inline void copyValues(unsigned nargs, Value *argv, unsigned nvars, Value *slots);
static JSBool getArgOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp);
static JSBool getVarOp(JSContext *cx, HandleObject obj, HandleId id, Value *vp);
static JSBool setArgOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp);
static JSBool setVarOp(JSContext *cx, HandleObject obj, HandleId id, JSBool strict, Value *vp);
/* Return whether this environment contains 'name' and, if so, its value. */
bool containsVarOrArg(PropertyName *name, Value *vp, JSContext *cx);
/* Copy in all the unaliased formals and locals. */
void copyUnaliasedValues(StackFrame *fp);
};
@ -202,10 +181,6 @@ class NestedScopeObject : public ScopeObject
class WithObject : public NestedScopeObject
{
/* These ScopeObject operations are not valid on a with object. */
js::StackFrame *maybeStackFrame() const;
void setStackFrame(StackFrame *frame);
static const unsigned THIS_SLOT = 2;
/* Use WithObject::object() instead. */
@ -213,7 +188,11 @@ class WithObject : public NestedScopeObject
public:
static const unsigned RESERVED_SLOTS = 3;
#ifdef JS_THREADSAFE
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
#else
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
#endif
static WithObject *
create(JSContext *cx, HandleObject proto, HandleObject enclosing, uint32_t depth);
@ -228,12 +207,23 @@ class WithObject : public NestedScopeObject
class BlockObject : public NestedScopeObject
{
public:
static const unsigned RESERVED_SLOTS = 2;
static const unsigned RESERVED_SLOTS = CALL_BLOCK_RESERVED_SLOTS;
#ifdef JS_THREADSAFE
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4_BACKGROUND;
#else
static const gc::AllocKind FINALIZE_KIND = gc::FINALIZE_OBJECT4;
#endif
/* Return the number of variables associated with this block. */
inline uint32_t slotCount() const;
/*
* Return the local corresponding to the ith binding where i is in the
* range [0, slotCount()) and the return local index is in the range
* [script->nfixed, script->nfixed + script->nslots).
*/
unsigned slotToFrameLocal(JSScript *script, unsigned i);
protected:
/* Blocks contain an object slot for each slot i: 0 <= i < slotCount. */
inline const Value &slotValue(unsigned i);
@ -242,10 +232,6 @@ class BlockObject : public NestedScopeObject
class StaticBlockObject : public BlockObject
{
/* These ScopeObject operations are not valid on a static block object. */
StackFrame *maybeStackFrame() const;
void setStackFrame(StackFrame *frame);
public:
static StaticBlockObject *create(JSContext *cx);
@ -273,7 +259,7 @@ class StaticBlockObject : public BlockObject
* A static block object is cloned (when entering the block) iff some
* variable of the block isAliased.
*/
bool needsClone() const;
bool needsClone();
const Shape *addVar(JSContext *cx, jsid id, int index, bool *redeclared);
};
@ -287,18 +273,9 @@ class ClonedBlockObject : public BlockObject
/* The static block from which this block was cloned. */
StaticBlockObject &staticBlock() const;
/*
* When this block's stack slots are about to be popped, 'put' must be
* called to copy the slot values into this block's object slots.
*/
void put(StackFrame *fp);
/* Assuming 'put' has been called, return the value of the ith let var. */
const Value &var(unsigned i);
void setVar(unsigned i, const Value &v);
/* Return whether this environment contains 'name' and, if so, its value. */
bool containsVar(PropertyName *name, Value *vp, JSContext *cx);
const Value &var(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
void setVar(unsigned i, const Value &v, MaybeCheckAliasing = CHECK_ALIASING);
/* Copy in all the unaliased formals and locals. */
void copyUnaliasedValues(StackFrame *fp);
@ -470,7 +447,7 @@ class DebugScopes
bool init();
void mark(JSTracer *trc);
void sweep();
void sweep(JSRuntime *rt);
DebugScopeObject *hasDebugScope(JSContext *cx, ScopeObject &scope) const;
bool addDebugScope(JSContext *cx, ScopeObject &scope, DebugScopeObject &debugScope);

Просмотреть файл

@ -92,7 +92,7 @@ StackFrame::initPrev(JSContext *cx)
prev_ = NULL;
#ifdef DEBUG
prevpc_ = (jsbytecode *)0xbadc;
prevInline_ = (JSInlinedSite *)0xbadc;
prevInline_ = (InlinedSite *)0xbadc;
#endif
}
}
@ -147,9 +147,8 @@ StackFrame::initCallFrame(JSContext *cx, JSFunction &callee,
JS_ASSERT(!hasBlockChain());
JS_ASSERT(!hasHookData());
JS_ASSERT(annotation() == NULL);
JS_ASSERT(!hasCallObj());
SetValueRangeToUndefined(slots(), script->nfixed);
initVarsToUndefined();
}
/*
@ -171,85 +170,137 @@ StackFrame::initFixupFrame(StackFrame *prev, StackFrame::Flags flags, void *ncod
u.nactual = nactual;
}
inline bool
StackFrame::jitHeavyweightFunctionPrologue(JSContext *cx)
{
JS_ASSERT(isNonEvalFunctionFrame());
JS_ASSERT(fun()->isHeavyweight());
CallObject *callobj = CallObject::createForFunction(cx, this);
if (!callobj)
return false;
pushOnScopeChain(*callobj);
flags_ |= HAS_CALL_OBJ;
if (script()->nesting()) {
types::NestingPrologue(cx, this);
flags_ |= HAS_NESTING;
}
return true;
}
inline void
StackFrame::jitTypeNestingPrologue(JSContext *cx)
{
types::NestingPrologue(cx, this);
flags_ |= HAS_NESTING;
}
inline void
StackFrame::initVarsToUndefined()
{
SetValueRangeToUndefined(slots(), script()->nfixed);
}
inline JSObject *
StackFrame::createRestParameter(JSContext *cx)
{
JS_ASSERT(fun()->hasRest());
unsigned nformal = fun()->nargs - 1, nactual = numActualArgs();
unsigned nrest = (nactual > nformal) ? nactual - nformal : 0;
return NewDenseCopiedArray(cx, nrest, actualArgs() + nformal);
return NewDenseCopiedArray(cx, nrest, actuals() + nformal);
}
inline Value &
StackFrame::canonicalActualArg(unsigned i) const
StackFrame::unaliasedVar(unsigned i, MaybeCheckAliasing checkAliasing)
{
JS_ASSERT_IF(checkAliasing, !script()->varIsAliased(i));
JS_ASSERT(i < script()->nfixed);
return slots()[i];
}
inline Value &
StackFrame::unaliasedLocal(unsigned i, MaybeCheckAliasing checkAliasing)
{
#ifdef DEBUG
if (checkAliasing) {
JS_ASSERT(i < script()->nslots);
if (i < script()->nfixed) {
JS_ASSERT(!script()->varIsAliased(i));
} else {
unsigned depth = i - script()->nfixed;
for (StaticBlockObject *b = maybeBlockChain(); b; b = b->enclosingBlock()) {
if (b->containsVarAtDepth(depth)) {
JS_ASSERT(!b->isAliased(depth - b->stackDepth()));
break;
}
}
}
}
#endif
return slots()[i];
}
inline Value &
StackFrame::unaliasedFormal(unsigned i, MaybeCheckAliasing checkAliasing)
{
JS_ASSERT(i < numFormalArgs());
JS_ASSERT_IF(checkAliasing, !script()->formalIsAliased(i));
return formals()[i];
}
inline Value &
StackFrame::unaliasedActual(unsigned i)
{
if (i < numFormalArgs())
return formalArg(i);
JS_ASSERT(i < numActualArgs());
return actualArgs()[i];
JS_ASSERT(!script()->formalIsAliased(i));
return i < numFormalArgs() ? formals()[i] : actuals()[i];
}
template <class Op>
inline bool
StackFrame::forEachCanonicalActualArg(Op op, unsigned start /* = 0 */, unsigned count /* = unsigned(-1) */)
inline void
StackFrame::forEachUnaliasedActual(Op op)
{
unsigned nformal = fun()->nargs;
JS_ASSERT(start <= nformal);
JS_ASSERT(script()->numClosedArgs() == 0);
JS_ASSERT(!script()->needsArgsObj());
Value *formals = formalArgsEnd() - nformal;
unsigned nformal = numFormalArgs();
unsigned nactual = numActualArgs();
if (count == unsigned(-1))
count = nactual - start;
unsigned end = start + count;
JS_ASSERT(end >= start);
JS_ASSERT(end <= nactual);
const Value *formalsEnd = (const Value *)this;
const Value *formals = formalsEnd - nformal;
if (end <= nformal) {
Value *p = formals + start;
for (; start < end; ++p, ++start) {
if (!op(start, p))
return false;
}
if (nactual <= nformal) {
const Value *actualsEnd = formals + nactual;
for (const Value *p = formals; p < actualsEnd; ++p)
op(*p);
} else {
for (Value *p = formals + start; start < nformal; ++p, ++start) {
if (!op(start, p))
return false;
}
JS_ASSERT(start >= nformal);
Value *actuals = formals - (nactual + 2) + start;
for (Value *p = actuals; start < end; ++p, ++start) {
if (!op(start, p))
return false;
}
}
return true;
}
for (const Value *p = formals; p < formalsEnd; ++p)
op(*p);
template <class Op>
inline bool
StackFrame::forEachFormalArg(Op op)
{
Value *formals = formalArgsEnd() - fun()->nargs;
Value *formalsEnd = formalArgsEnd();
unsigned i = 0;
for (Value *p = formals; p != formalsEnd; ++p, ++i) {
if (!op(i, p))
return false;
const Value *actualsEnd = formals - 2;
const Value *actuals = actualsEnd - nactual;
for (const Value *p = actuals + nformal; p < actualsEnd; ++p)
op(*p);
}
return true;
}
struct CopyTo
{
Value *dst;
CopyTo(Value *dst) : dst(dst) {}
bool operator()(unsigned, Value *src) {
*dst++ = *src;
return true;
}
void operator()(const Value &src) { *dst++ = src; }
};
inline unsigned
StackFrame::numFormalArgs() const
{
JS_ASSERT(hasArgs());
return fun()->nargs;
}
inline unsigned
StackFrame::numActualArgs() const
{
@ -267,23 +318,20 @@ StackFrame::numActualArgs() const
return numFormalArgs();
}
inline Value *
StackFrame::actualArgs() const
inline ArgumentsObject &
StackFrame::argsObj() const
{
JS_ASSERT(hasArgs());
Value *argv = formalArgs();
if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS))
return argv - (2 + u.nactual);
return argv;
JS_ASSERT(script()->needsArgsObj());
JS_ASSERT(flags_ & HAS_ARGS_OBJ);
return *argsObj_;
}
inline Value *
StackFrame::actualArgsEnd() const
inline void
StackFrame::initArgsObj(ArgumentsObject &argsobj)
{
JS_ASSERT(hasArgs());
if (JS_UNLIKELY(flags_ & OVERFLOW_ARGS))
return formalArgs() - 2;
return formalArgs() + numActualArgs();
JS_ASSERT(script()->needsArgsObj());
flags_ |= HAS_ARGS_OBJ;
argsObj_ = &argsobj;
}
inline ScopeObject &
@ -292,54 +340,29 @@ StackFrame::aliasedVarScope(ScopeCoordinate sc) const
JSObject *scope = &scopeChain()->asScope();
for (unsigned i = sc.hops; i; i--)
scope = &scope->asScope().enclosingScope();
#ifdef DEBUG
if (scope->isCall()) {
JS_ASSERT(scope->asCall() == callObj());
JS_ASSERT(scope->asCall().maybeStackFrame() == this);
} else {
StaticBlockObject &target = scope->asClonedBlock().staticBlock();
StaticBlockObject *b = &blockChain();
while (b != &target)
b = b->enclosingBlock();
}
#endif
return scope->asScope();
}
inline void
StackFrame::setScopeChain(JSObject &obj)
StackFrame::pushOnScopeChain(ScopeObject &scope)
{
#ifdef DEBUG
JS_ASSERT(&obj != NULL);
if (hasCallObj()) {
JSObject *pobj = &obj;
while (pobj && !pobj->isWith() && pobj->asScope().maybeStackFrame() != this)
pobj = pobj->enclosingScope();
JS_ASSERT(pobj);
} else {
for (JSObject *pobj = &obj; pobj->isScope() && !pobj->isWith(); pobj = pobj->enclosingScope())
JS_ASSERT_IF(pobj->isCall(), pobj->asScope().maybeStackFrame() != this);
}
#endif
scopeChain_ = &obj;
JS_ASSERT(*scopeChain() == scope.enclosingScope() ||
*scopeChain() == scope.asCall().enclosingScope().asDeclEnv().enclosingScope());
scopeChain_ = &scope;
flags_ |= HAS_SCOPECHAIN;
}
inline void
StackFrame::initScopeChain(CallObject &obj)
StackFrame::popOffScopeChain()
{
JS_ASSERT(&obj != NULL);
JS_ASSERT(!hasCallObj() && obj.maybeStackFrame() == this);
scopeChain_ = &obj;
flags_ |= HAS_SCOPECHAIN | HAS_CALL_OBJ;
JS_ASSERT(flags_ & HAS_SCOPECHAIN);
scopeChain_ = &scopeChain_->asScope().enclosingScope();
}
inline CallObject &
StackFrame::callObj() const
{
JS_ASSERT_IF(isNonEvalFunctionFrame() || isStrictEvalFrame(), hasCallObj());
JS_ASSERT(fun()->isHeavyweight());
JSObject *pobj = scopeChain();
while (JS_UNLIKELY(!pobj->isCall()))
@ -347,89 +370,6 @@ StackFrame::callObj() const
return pobj->asCall();
}
inline bool
StackFrame::maintainNestingState() const
{
/*
* Whether to invoke the nesting epilogue/prologue to maintain active
* frame counts and check for reentrant outer functions.
*/
return isNonEvalFunctionFrame() && !isGeneratorFrame() && script()->nesting();
}
inline bool
StackFrame::functionPrologue(JSContext *cx)
{
JS_ASSERT(isNonEvalFunctionFrame());
JS_ASSERT(!isGeneratorFrame());
if (fun()->isHeavyweight()) {
CallObject *callobj = CallObject::createForFunction(cx, this);
if (!callobj)
return false;
initScopeChain(*callobj);
} else {
/* Force instantiation of the scope chain, for JIT frames. */
scopeChain();
}
if (script()->nesting()) {
JS_ASSERT(maintainNestingState());
types::NestingPrologue(cx, this);
}
return true;
}
inline void
StackFrame::functionEpilogue(JSContext *cx)
{
JS_ASSERT(isNonEvalFunctionFrame());
if (cx->compartment->debugMode())
cx->runtime->debugScopes->onPopCall(this);
if (flags_ & (HAS_ARGS_OBJ | HAS_CALL_OBJ)) {
if (hasCallObj())
js_PutCallObject(this, scopeChain_->asCall());
if (hasArgsObj())
js_PutArgsObject(this);
}
if (maintainNestingState())
types::NestingEpilogue(this);
}
inline void
StackFrame::updateEpilogueFlags()
{
if (flags_ & (HAS_ARGS_OBJ | HAS_CALL_OBJ)) {
if (hasArgsObj() && !argsObj().maybeStackFrame())
flags_ &= ~HAS_ARGS_OBJ;
if (hasCallObj() && !callObj().maybeStackFrame()) {
/*
* For function frames, the call object may or may not have have an
* enclosing DeclEnv object, so we use the callee's parent, since
* it was the initial scope chain. For global (strict) eval frames,
* there is no callee, but the call object's parent is the initial
* scope chain.
*/
scopeChain_ = isFunctionFrame()
? callee().environment()
: &scopeChain_->asScope().enclosingScope();
flags_ &= ~HAS_CALL_OBJ;
}
}
/*
* For outer/inner function frames, undo the active frame balancing so that
* when we redo it in the epilogue we get the right final value. The other
* nesting epilogue changes (update active args/vars) are idempotent.
*/
if (maintainNestingState())
script()->nesting()->activeFrames++;
}
/*****************************************************************************/
STATIC_POSTCONDITION(!return || ubound(from) >= nvals)
@ -451,7 +391,7 @@ inline Value *
StackSpace::getStackLimit(JSContext *cx, MaybeReportError report)
{
FrameRegs &regs = cx->regs();
unsigned nvals = regs.fp()->numSlots() + STACK_JIT_EXTRA;
unsigned nvals = regs.fp()->script()->nslots + STACK_JIT_EXTRA;
return ensureSpace(cx, report, regs.sp, nvals)
? conservativeEnd_
: NULL;
@ -472,7 +412,7 @@ ContextStack::getCallFrame(JSContext *cx, MaybeReportError report, const CallArg
/* Include extra space to satisfy the method-jit stackLimit invariant. */
unsigned nvals = VALUES_PER_STACK_FRAME + script->nslots + StackSpace::STACK_JIT_EXTRA;
/* Maintain layout invariant: &formalArgs[0] == ((Value *)fp) - nformal. */
/* Maintain layout invariant: &formals[0] == ((Value *)fp) - nformal. */
if (args.length() == nformal) {
if (!space().ensureSpace(cx, report, firstUnused, nvals))
@ -564,9 +504,7 @@ ContextStack::popInlineFrame(FrameRegs &regs)
JS_ASSERT(&regs == &seg_->regs());
StackFrame *fp = regs.fp();
fp->functionEpilogue(cx_);
Value *newsp = fp->actualArgs() - 1;
Value *newsp = fp->actuals() - 1;
JS_ASSERT(newsp >= fp->prev()->base());
newsp[-1] = fp->returnValue();
@ -579,7 +517,7 @@ ContextStack::popFrameAfterOverflow()
/* Restore the regs to what they were on entry to JSOP_CALL. */
FrameRegs &regs = seg_->regs();
StackFrame *fp = regs.fp();
regs.popFrame(fp->actualArgsEnd());
regs.popFrame(fp->actuals() + fp->numActualArgs());
}
inline JSScript *

Просмотреть файл

@ -90,22 +90,22 @@ StackFrame::initDummyFrame(JSContext *cx, JSObject &chain)
flags_ = DUMMY | HAS_PREVPC | HAS_SCOPECHAIN;
initPrev(cx);
JS_ASSERT(chain.isGlobal());
setScopeChain(chain);
scopeChain_ = &chain;
}
template <class T, class U, StackFrame::TriggerPostBarriers doPostBarrier>
void
StackFrame::stealFrameAndSlots(JSContext *cx, StackFrame *fp, T *vp,
StackFrame::copyFrameAndValues(JSContext *cx, StackFrame *fp, T *vp,
StackFrame *otherfp, U *othervp, Value *othersp)
{
JS_ASSERT((U *)vp == (U *)this - ((U *)otherfp - othervp));
JS_ASSERT((Value *)othervp == otherfp->actualArgs() - 2);
JS_ASSERT((Value *)othervp == otherfp->generatorArgsSnapshotBegin());
JS_ASSERT(othersp >= otherfp->slots());
JS_ASSERT(othersp <= otherfp->base() + otherfp->numSlots());
JS_ASSERT(othersp <= otherfp->generatorSlotsSnapshotBegin() + otherfp->script()->nslots);
JS_ASSERT((T *)fp - vp == (U *)otherfp - othervp);
/* Copy args, StackFrame, and slots. */
U *srcend = (U *)otherfp->formalArgsEnd();
U *srcend = (U *)otherfp->generatorArgsSnapshotEnd();
T *dst = vp;
for (U *src = othervp; src < srcend; src++, dst++)
*dst = *src;
@ -119,39 +119,15 @@ StackFrame::stealFrameAndSlots(JSContext *cx, StackFrame *fp, T *vp,
for (U *src = (U *)otherfp->slots(); src < srcend; src++, dst++)
*dst = *src;
/*
* Repoint Call, Arguments, Block and With objects to the new live frame.
* Call and Arguments are done directly because we have pointers to them.
* Block and With objects are done indirectly through 'liveFrame'. See
* js_LiveFrameToFloating comment in jsiter.h.
*/
if (hasCallObj()) {
CallObject &obj = callObj();
obj.setStackFrame(this);
otherfp->flags_ &= ~HAS_CALL_OBJ;
if (js_IsNamedLambda(fun())) {
DeclEnvObject &env = obj.enclosingScope().asDeclEnv();
env.setStackFrame(this);
}
}
if (hasArgsObj()) {
ArgumentsObject &argsobj = argsObj();
if (argsobj.isNormalArguments())
argsobj.setStackFrame(this);
else
JS_ASSERT(!argsobj.maybeStackFrame());
otherfp->flags_ &= ~HAS_ARGS_OBJ;
}
if (cx->compartment->debugMode())
cx->runtime->debugScopes->onGeneratorFrameChange(otherfp, this);
}
/* Note: explicit instantiation for js_NewGenerator located in jsiter.cpp. */
template void StackFrame::stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>(
template void StackFrame::copyFrameAndValues<Value, HeapValue, StackFrame::NoPostBarrier>(
JSContext *, StackFrame *, Value *,
StackFrame *, HeapValue *, Value *);
template void StackFrame::stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
template void StackFrame::copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
JSContext *, StackFrame *, HeapValue *,
StackFrame *, Value *, Value *);
@ -163,7 +139,7 @@ StackFrame::writeBarrierPost()
JSObject::writeBarrierPost(scopeChain_, (void *)&scopeChain_);
if (isDummyFrame())
return;
if (hasArgsObj())
if (flags_ & HAS_ARGS_OBJ)
JSObject::writeBarrierPost(argsObj_, (void *)&argsObj_);
if (isScriptFrame()) {
if (isFunctionFrame()) {
@ -178,8 +154,29 @@ StackFrame::writeBarrierPost()
HeapValue::writeBarrierPost(rval_, &rval_);
}
JSGenerator *
StackFrame::maybeSuspendedGenerator(JSRuntime *rt)
{
/*
* A suspended generator's frame is embedded inside the JSGenerator object
* instead of on the contiguous stack like all active frames.
*/
if (!isGeneratorFrame() || rt->stackSpace.containsFast(this))
return NULL;
/*
* Once we know we have a suspended generator frame, there is a static
* offset from the frame's snapshot to beginning of the JSGenerator.
*/
char *vp = reinterpret_cast<char *>(generatorArgsSnapshotBegin());
char *p = vp - offsetof(JSGenerator, stackSnapshot);
JSGenerator *gen = reinterpret_cast<JSGenerator *>(p);
JS_ASSERT(gen->fp == this);
return gen;
}
jsbytecode *
StackFrame::prevpcSlow(JSInlinedSite **pinlined)
StackFrame::prevpcSlow(InlinedSite **pinlined)
{
JS_ASSERT(!(flags_ & HAS_PREVPC));
#if defined(JS_METHODJIT) && defined(JS_MONOIC)
@ -197,7 +194,7 @@ StackFrame::prevpcSlow(JSInlinedSite **pinlined)
}
jsbytecode *
StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, JSInlinedSite **pinlined)
StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, InlinedSite **pinlined)
{
JS_ASSERT_IF(next, next->prev() == this);
@ -219,6 +216,116 @@ StackFrame::pcQuadratic(const ContextStack &stack, StackFrame *next, JSInlinedSi
return next->prevpc(pinlined);
}
bool
StackFrame::prologue(JSContext *cx, bool newType)
{
JS_ASSERT(!isDummyFrame());
JS_ASSERT(!isGeneratorFrame());
JS_ASSERT(cx->regs().pc == script()->code);
if (isEvalFrame()) {
if (script()->strictModeCode) {
CallObject *callobj = CallObject::createForStrictEval(cx, this);
if (!callobj)
return false;
pushOnScopeChain(*callobj);
flags_ |= HAS_CALL_OBJ;
}
return true;
}
if (isGlobalFrame())
return true;
JS_ASSERT(isNonEvalFunctionFrame());
if (fun()->isHeavyweight()) {
CallObject *callobj = CallObject::createForFunction(cx, this);
if (!callobj)
return false;
pushOnScopeChain(*callobj);
flags_ |= HAS_CALL_OBJ;
}
if (script()->nesting()) {
types::NestingPrologue(cx, this);
flags_ |= HAS_NESTING;
}
if (isConstructing()) {
RootedObject callee(cx, &this->callee());
JSObject *obj = js_CreateThisForFunction(cx, callee, newType);
if (!obj)
return false;
functionThis() = ObjectValue(*obj);
}
Probes::enterJSFun(cx, fun(), script());
return true;
}
void
StackFrame::epilogue(JSContext *cx)
{
JS_ASSERT(!isDummyFrame());
JS_ASSERT(!isYielding());
JS_ASSERT(!hasBlockChain());
if (isEvalFrame()) {
if (isStrictEvalFrame()) {
JS_ASSERT_IF(hasCallObj(), scopeChain()->asCall().isForEval());
if (cx->compartment->debugMode())
cx->runtime->debugScopes->onPopStrictEvalScope(this);
} else if (isDirectEvalFrame()) {
if (isDebuggerFrame())
JS_ASSERT(!scopeChain()->isScope());
else
JS_ASSERT(scopeChain() == prev()->scopeChain());
} else {
JS_ASSERT(scopeChain()->isGlobal());
}
return;
}
if (isGlobalFrame()) {
JS_ASSERT(!scopeChain()->isScope());
return;
}
JS_ASSERT(isNonEvalFunctionFrame());
if (fun()->isHeavyweight()) {
JS_ASSERT_IF(hasCallObj(),
scopeChain()->asCall().getCalleeFunction()->script() == script());
} else {
JS_ASSERT(!scopeChain()->isCall() || scopeChain()->asCall().isForEval() ||
scopeChain()->asCall().getCalleeFunction()->script() != script());
}
if (cx->compartment->debugMode())
cx->runtime->debugScopes->onPopCall(this);
Probes::exitJSFun(cx, fun(), script());
if (script()->nesting() && (flags_ & HAS_NESTING))
types::NestingEpilogue(this);
if (isConstructing() && returnValue().isPrimitive())
setReturnValue(ObjectValue(constructorThis()));
}
bool
StackFrame::jitStrictEvalPrologue(JSContext *cx)
{
JS_ASSERT(isStrictEvalFrame());
CallObject *callobj = CallObject::createForStrictEval(cx, this);
if (!callobj)
return false;
pushOnScopeChain(*callobj);
flags_ |= HAS_CALL_OBJ;
return true;
}
bool
StackFrame::pushBlock(JSContext *cx, StaticBlockObject &block)
{
@ -230,7 +337,7 @@ StackFrame::pushBlock(JSContext *cx, StaticBlockObject &block)
if (!clone)
return false;
scopeChain_ = clone;
pushOnScopeChain(*clone);
}
flags_ |= HAS_BLOCKCHAIN;
@ -247,10 +354,8 @@ StackFrame::popBlock(JSContext *cx)
cx->runtime->debugScopes->onPopBlock(cx, this);
if (blockChain_->needsClone()) {
ClonedBlockObject &clone = scopeChain()->asClonedBlock();
JS_ASSERT(clone.staticBlock() == *blockChain_);
clone.put(cx->fp());
scopeChain_ = &clone.enclosingScope();
JS_ASSERT(scopeChain_->asClonedBlock().staticBlock() == *blockChain_);
popOffScopeChain();
}
blockChain_ = blockChain_->enclosingBlock();
@ -262,7 +367,8 @@ StackFrame::popWith(JSContext *cx)
if (cx->compartment->debugMode())
cx->runtime->debugScopes->onPopWith(this);
setScopeChain(scopeChain()->asWith().enclosingScope());
JS_ASSERT(scopeChain()->isWith());
popOffScopeChain();
}
void
@ -277,7 +383,7 @@ StackFrame::mark(JSTracer *trc)
gc::MarkObjectUnbarriered(trc, &scopeChain_, "scope chain");
if (isDummyFrame())
return;
if (hasArgsObj())
if (flags_ & HAS_ARGS_OBJ)
gc::MarkObjectUnbarriered(trc, &argsObj_, "arguments");
if (isFunctionFrame()) {
gc::MarkObjectUnbarriered(trc, &exec.fun, "fun");
@ -462,7 +568,7 @@ StackSpace::containingSegment(const StackFrame *target) const
}
void
StackSpace::markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
StackSpace::markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc)
{
Value *slotsBegin = fp->slots();
@ -536,12 +642,12 @@ StackSpace::mark(JSTracer *trc)
jsbytecode *pc = seg->maybepc();
for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
/* Mark from fp->slots() to slotsEnd. */
markFrameSlots(trc, fp, slotsEnd, pc);
markFrameValues(trc, fp, slotsEnd, pc);
fp->mark(trc);
slotsEnd = (Value *)fp;
JSInlinedSite *site;
InlinedSite *site;
pc = fp->prevpc(&site);
JS_ASSERT_IF(fp->prev(), !site);
}
@ -701,7 +807,7 @@ ContextStack::ensureOnTop(JSContext *cx, MaybeReportError report, unsigned nvars
*/
if (FrameRegs *regs = cx->maybeRegs()) {
JSFunction *fun = NULL;
if (JSInlinedSite *site = regs->inlined()) {
if (InlinedSite *site = regs->inlined()) {
mjit::JITChunk *chunk = regs->fp()->jit()->chunk(regs->pc);
fun = chunk->inlineFrames()[site->inlineIndex].fun;
} else {
@ -852,7 +958,7 @@ ContextStack::pushExecuteFrame(JSContext *cx, JSScript *script, const Value &thi
StackFrame *prev = evalInFrame ? evalInFrame : maybefp();
StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused + 2);
fp->initExecuteFrame(script, prev, seg_->maybeRegs(), thisv, scopeChain, type);
SetValueRangeToUndefined(fp->slots(), script->nfixed);
fp->initVarsToUndefined();
efg->regs_.prepareToRun(*fp, script);
/* pushRegs() below links the prev-frame; manually link the prev-call. */
@ -894,9 +1000,6 @@ ContextStack::popFrame(const FrameGuard &fg)
JS_ASSERT(space().firstUnused() == fg.regs_.sp);
JS_ASSERT(&fg.regs_ == &seg_->regs());
if (fg.regs_.fp()->isNonEvalFunctionFrame())
fg.regs_.fp()->functionEpilogue(cx_);
seg_->popRegs(fg.prevRegs_);
if (fg.pushedSeg_)
popSegment();
@ -912,11 +1015,11 @@ ContextStack::popFrame(const FrameGuard &fg)
bool
ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrameGuard *gfg)
{
StackFrame *genfp = gen->floatingFrame();
HeapValue *genvp = gen->floatingStack;
unsigned vplen = (HeapValue *)genfp - genvp;
HeapValue *genvp = gen->stackSnapshot;
JS_ASSERT(genvp == HeapValueify(gen->fp->generatorArgsSnapshotBegin()));
unsigned vplen = HeapValueify(gen->fp->generatorArgsSnapshotEnd()) - genvp;
unsigned nvars = vplen + VALUES_PER_STACK_FRAME + genfp->numSlots();
unsigned nvars = vplen + VALUES_PER_STACK_FRAME + gen->fp->script()->nslots;
Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &gfg->pushedSeg_);
if (!firstUnused)
return false;
@ -935,15 +1038,13 @@ ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrame
* We don't need to worry about generational barriers as the generator
* object has a trace hook and cannot be nursery allocated.
*/
JSObject *genobj = js_FloatingFrameToGenerator(genfp)->obj;
JS_ASSERT(genobj->getClass()->trace);
JSObject::writeBarrierPre(genobj);
JS_ASSERT(gen->obj->getClass()->trace);
JSObject::writeBarrierPre(gen->obj);
/* Copy from the generator's floating frame to the stack. */
stackfp->stealFrameAndSlots<Value, HeapValue, StackFrame::NoPostBarrier>(
cx, stackfp, stackvp, genfp, genvp, gen->regs.sp);
stackfp->copyFrameAndValues<Value, HeapValue, StackFrame::NoPostBarrier>(
cx, stackfp, stackvp, gen->fp, genvp, gen->regs.sp);
stackfp->resetGeneratorPrev(cx);
stackfp->unsetFloatingGenerator();
gfg->regs_.rebaseFromTo(gen->regs, *stackfp);
gfg->prevRegs_ = seg_->pushRegs(gfg->regs_);
@ -956,18 +1057,19 @@ void
ContextStack::popGeneratorFrame(const GeneratorFrameGuard &gfg)
{
JSGenerator *gen = gfg.gen_;
StackFrame *genfp = gen->floatingFrame();
HeapValue *genvp = gen->floatingStack;
HeapValue *genvp = gen->stackSnapshot;
JS_ASSERT(genvp == HeapValueify(gen->fp->generatorArgsSnapshotBegin()));
const FrameRegs &stackRegs = gfg.regs_;
StackFrame *stackfp = stackRegs.fp();
Value *stackvp = gfg.stackvp_;
/* Copy from the stack to the generator's floating frame. */
gen->regs.rebaseFromTo(stackRegs, *genfp);
genfp->stealFrameAndSlots<HeapValue, Value, StackFrame::DoPostBarrier>(
cx_, genfp, genvp, stackfp, stackvp, stackRegs.sp);
genfp->setFloatingGenerator();
if (stackfp->isYielding()) {
gen->regs.rebaseFromTo(stackRegs, *gen->fp);
gen->fp->copyFrameAndValues<HeapValue, Value, StackFrame::DoPostBarrier>(
cx_, gen->fp, genvp, stackfp, stackvp, stackRegs.sp);
}
/* ~FrameGuard/popFrame will finish the popping. */
JS_ASSERT(ImplicitCast<const FrameGuard>(gfg).pushed());
@ -1016,7 +1118,7 @@ StackIter::popFrame()
JS_ASSERT(seg_->contains(oldfp));
fp_ = fp_->prev();
if (seg_->contains(fp_)) {
JSInlinedSite *inline_;
InlinedSite *inline_;
pc_ = oldfp->prevpc(&inline_);
JS_ASSERT(!inline_);
@ -1029,7 +1131,7 @@ StackIter::popFrame()
*/
if (oldfp->isGeneratorFrame()) {
/* Generator's args do not overlap with the caller's expr stack. */
sp_ = (Value *)oldfp->actualArgs() - 2;
sp_ = oldfp->generatorArgsSnapshotBegin();
} else if (oldfp->isNonEvalFunctionFrame()) {
/*
* When Invoke is called from a native, there will be an enclosing
@ -1039,7 +1141,7 @@ StackIter::popFrame()
* cases, the actual arguments of the callee should be included in
* the caller's expr stack.
*/
sp_ = oldfp->actualArgsEnd();
sp_ = oldfp->actuals() + oldfp->numActualArgs();
} else if (oldfp->isFramePushedByExecute()) {
/* pushExecuteFrame pushes exactly (callee, this) before frame. */
sp_ = (Value *)oldfp - 2;
@ -1094,7 +1196,8 @@ StackIter::startOnSegment(StackSegment *seg)
static void JS_NEVER_INLINE
CrashIfInvalidSlot(StackFrame *fp, Value *vp)
{
if (vp < fp->slots() || vp >= fp->slots() + fp->script()->nslots) {
Value *slots = (Value *)(fp + 1);
if (vp < slots || vp >= slots + fp->script()->nslots) {
JS_ASSERT(false && "About to dereference invalid slot");
*(int *)0xbad = 0; // show up nicely in crash-stats
MOZ_Assert("About to dereference invalid slot", __FILE__, __LINE__);

Просмотреть файл

@ -14,14 +14,7 @@
struct JSContext;
struct JSCompartment;
#ifdef JS_METHODJIT
namespace js { namespace mjit { struct CallSite; }}
typedef js::mjit::CallSite JSInlinedSite;
#else
struct JSInlinedSite {};
#endif
typedef /* js::mjit::RejoinState */ size_t JSRejoinState;
extern void js_DumpStackFrame(JSContext *, js::StackFrame *);
namespace js {
@ -49,10 +42,18 @@ class StaticBlockObject;
#ifdef JS_METHODJIT
namespace mjit {
class CallCompiler;
class GetPropCompiler;
struct CallSite;
struct JITScript;
jsbytecode *NativeToPC(JITScript *jit, void *ncode, CallSite **pinline);
namespace ic { struct GetElementIC; }
}
typedef mjit::CallSite InlinedSite;
#else
struct InlinedSite {};
#endif
typedef size_t FrameRejoinState;
namespace detail {
struct OOMCheck;
@ -63,10 +64,9 @@ namespace detail {
/*
* VM stack layout
*
* SpiderMonkey uses a per-thread stack to store the activation records,
* SpiderMonkey uses a per-runtime stack to store the activation records,
* parameters, locals, and expression temporaries for the stack of actively
* executing scripts, functions and generators. The stack is owned by the
* StackSpace object stored in the runtime.
* executing scripts, functions and generators.
*
* The stack is subdivided into contiguous segments of memory which
* have a memory layout invariant that allows fixed offsets to be used for stack
@ -78,13 +78,13 @@ namespace detail {
* A sample memory layout of a segment looks like:
*
* regs
* .---------------------------------------------.
* | V
* | fp .--FrameRegs--. sp
* | V V
* |StackSegment| slots |StackFrame| slots |StackFrame| slots |
* | ^ |
* ? <----------' `-----------'
* .------------------------------------------------.
* | V
* | fp .--FrameRegs--. sp
* | V V
* |StackSegment| values |StackFrame| values |StackFrame| values |
* | ^ |
* ? <-----------' `------------'
* prev prev
*
* A segment starts with a fixed-size header (js::StackSegment) which logically
@ -92,14 +92,14 @@ namespace detail {
* end of the stack.
*
* Each script activation (global or function code) is given a fixed-size header
* (js::StackFrame) which is associated with the values (called "slots") before
* and after it. The frame contains bookkeeping information about the activation
* and links to the previous frame.
* (js::StackFrame) which is associated with the values before and after it.
* The frame contains bookkeeping information about the activation and links to
* the previous frame.
*
* The slots preceding a (function) StackFrame in memory are the arguments of
* the call. The slots after a StackFrame in memory are its locals followed by
* The value preceding a (function) StackFrame in memory are the arguments of
* the call. The values after a StackFrame in memory are its locals followed by
* its expression stack. There is no clean line between the arguments of a
* frame and the expression stack of the previous frame since the top slots of
* frame and the expression stack of the previous frame since the top values of
* the expression become the arguments of a call. There are also layout
* invariants concerning the arguments and StackFrame; see "Arguments" comment
* in StackFrame for more details.
@ -115,20 +115,20 @@ namespace detail {
* A call to a native (C++) function does not push a frame. Instead, an array
* of values is passed to the native. The layout of this array is abstracted by
* js::CallArgs. With respect to the StackSegment layout above, the args to a
* native call are inserted anywhere there can be slots. A sample memory layout
* native call are inserted anywhere there can be values. A sample memory layout
* looks like:
*
* regs
* .----------------------------------------.
* | V
* | fp .--FrameRegs--. sp
* | V V
* |StackSegment| native call | slots |StackFrame| slots | native call |
* | vp <--argc--> end vp <--argc--> end
* | CallArgs <------------------------------ CallArgs
* | prev ^
* `-----------------------------------------------------'
* calls
* .------------------------------------------.
* | V
* | fp .--FrameRegs--. sp
* | V V
* |StackSegment| native call | values |StackFrame| values | native call |
* | vp <--argc--> end vp <--argc--> end
* | CallArgs <------------------------------ CallArgs
* | prev ^
* `-------------------------------------------------------'
* calls
*
* Here there are two native calls on the stack. The start of each native arg
* range is recorded by a CallArgs element which is prev-linked like stack
@ -295,10 +295,14 @@ CallArgsListFromVp(unsigned argc, Value *vp, CallArgsList *prev)
/*****************************************************************************/
enum MaybeCheckAliasing { CHECK_ALIASING = true, DONT_CHECK_ALIASING = false };
/*****************************************************************************/
/* Flags specified for a frame as it is constructed. */
enum InitialFrameFlags {
INITIAL_NONE = 0,
INITIAL_CONSTRUCT = 0x80, /* == StackFrame::CONSTRUCTING, asserted below */
INITIAL_CONSTRUCT = 0x40, /* == StackFrame::CONSTRUCTING, asserted below */
INITIAL_LOWERED = 0x200000 /* == StackFrame::LOWERED_CALL_APPLY, asserted below */
};
@ -324,20 +328,22 @@ class StackFrame
EVAL = 0x8, /* frame pushed for eval() or debugger eval */
DEBUGGER = 0x10, /* frame pushed for debugger eval */
GENERATOR = 0x20, /* frame is associated with a generator */
FLOATING_GENERATOR = 0x40, /* frame is is in generator obj, not on stack */
CONSTRUCTING = 0x80, /* frame is for a constructor invocation */
CONSTRUCTING = 0x40, /* frame is for a constructor invocation */
/* Temporary frame states */
YIELDING = 0x100, /* js::Interpret dispatched JSOP_YIELD */
FINISHED_IN_INTERP = 0x200, /* set if frame finished in Interpret() */
YIELDING = 0x80, /* Interpret dispatched JSOP_YIELD */
FINISHED_IN_INTERP = 0x100, /* set if frame finished in Interpret() */
/* Function arguments */
OVERFLOW_ARGS = 0x400, /* numActualArgs > numFormalArgs */
UNDERFLOW_ARGS = 0x800, /* numActualArgs < numFormalArgs */
OVERFLOW_ARGS = 0x200, /* numActualArgs > numFormalArgs */
UNDERFLOW_ARGS = 0x400, /* numActualArgs < numFormalArgs */
/* Function prologue state */
HAS_CALL_OBJ = 0x800, /* CallObject created for heavyweight fun */
HAS_ARGS_OBJ = 0x1000, /* ArgumentsObject created for needsArgsObj script */
HAS_NESTING = 0x2000, /* NestingPrologue called for frame */
/* Lazy frame initialization */
HAS_CALL_OBJ = 0x1000, /* frame has a callobj reachable from scopeChain_ */
HAS_ARGS_OBJ = 0x2000, /* frame has an argsobj in StackFrame::args */
HAS_HOOK_DATA = 0x4000, /* frame has hookData_ set */
HAS_ANNOTATION = 0x8000, /* frame has annotation_ set */
HAS_RVAL = 0x10000, /* frame has rval_ set */
@ -360,22 +366,20 @@ class StackFrame
JSFunction *fun; /* function frame, pre GetScopeChain */
} exec;
union { /* describes the arguments of a function */
unsigned nactual; /* for non-eval frames */
unsigned nactual; /* for non-eval frames */
JSScript *evalScript; /* the script of an eval-in-function */
} u;
mutable JSObject *scopeChain_; /* current scope chain */
StackFrame *prev_; /* previous cx->regs->fp */
void *ncode_; /* return address for method JIT */
/* Lazily initialized */
Value rval_; /* return value of the frame */
StaticBlockObject *blockChain_; /* innermost let block */
ArgumentsObject *argsObj_; /* if has HAS_ARGS_OBJ */
jsbytecode *prevpc_; /* pc of previous frame*/
JSInlinedSite *prevInline_; /* inlined site in previous frame */
void *hookData_; /* closure returned by call hook */
void *annotation_; /* perhaps remove with bug 546848 */
JSRejoinState rejoin_; /* If rejoining into the interpreter
mutable JSObject *scopeChain_; /* if HAS_SCOPECHAIN, current scope chain */
StackFrame *prev_; /* if HAS_PREVPC, previous cx->regs->fp */
void *ncode_; /* for a jit frame, return address for method JIT */
Value rval_; /* if HAS_RVAL, return value of the frame */
StaticBlockObject *blockChain_; /* if HAS_BLOCKCHAIN, innermost let block */
ArgumentsObject *argsObj_; /* if HAS_ARGS_OBJ, the call's arguments object */
jsbytecode *prevpc_; /* if HAS_PREVPC, pc of previous frame*/
InlinedSite *prevInline_; /* for a jit frame, inlined site in previous frame */
void *hookData_; /* if HAS_HOOK_DATA, closure returned by call hook */
void *annotation_; /* if HAS_ANNOTATION, perhaps remove with bug 546848 */
FrameRejoinState rejoin_; /* for a jit frame rejoining the interpreter
* from JIT code, state at rejoin. */
static void staticAsserts() {
@ -384,15 +388,39 @@ class StackFrame
}
inline void initPrev(JSContext *cx);
jsbytecode *prevpcSlow(JSInlinedSite **pinlined);
jsbytecode *prevpcSlow(InlinedSite **pinlined);
void writeBarrierPost();
public:
/*
* Frame initialization
*
* After acquiring a pointer to an uninitialized stack frame on the VM
* stack from StackSpace, these members are used to initialize the stack
* frame before officially pushing the frame into the context.
* These utilities provide raw access to the values associated with a
* StackFrame (see "VM stack layout" comment). The utilities are private
* since they are not able to assert that only unaliased vars/formals are
* accessed. Normal code should prefer the StackFrame::unaliased* members
* (or FrameRegs::stackDepth for the usual "depth is at least" assertions).
*/
Value *slots() const { return (Value *)(this + 1); }
Value *base() const { return slots() + script()->nfixed; }
Value *formals() const { return (Value *)this - fun()->nargs; }
Value *actuals() const { return formals() - (flags_ & OVERFLOW_ARGS ? 2 + u.nactual : 0); }
friend class FrameRegs;
friend class ContextStack;
friend class StackSpace;
friend class StackIter;
friend class CallObject;
friend class ClonedBlockObject;
friend class ArgumentsObject;
friend void ::js_DumpStackFrame(JSContext *, StackFrame *);
friend void ::js_ReportIsNotFunction(JSContext *, const js::Value *, unsigned);
#ifdef JS_METHODJIT
friend class mjit::CallCompiler;
friend class mjit::GetPropCompiler;
friend class mjit::ic::GetElementIC;
#endif
/*
* Frame initialization, called by ContextStack operations after acquiring
* the raw memory for the frame:
*/
/* Used for Invoke, Interpret, trace-jit LeaveTree, and method-jit stubs. */
@ -406,19 +434,43 @@ class StackFrame
void initExecuteFrame(JSScript *script, StackFrame *prev, FrameRegs *regs,
const Value &thisv, JSObject &scopeChain, ExecuteType type);
/* Used when activating generators. */
enum TriggerPostBarriers {
DoPostBarrier = true,
NoPostBarrier = false
};
template <class T, class U, TriggerPostBarriers doPostBarrier>
void stealFrameAndSlots(JSContext *cx, StackFrame *fp, T *vp,
StackFrame *otherfp, U *othervp, Value *othersp);
void writeBarrierPost();
/* Perhaps one fine day we will remove dummy frames. */
void initDummyFrame(JSContext *cx, JSObject &chain);
public:
/*
* Frame prologue/epilogue
*
* Every stack frame must have 'prologue' called before executing the
* first op and 'epilogue' called after executing the last op and before
* popping the frame (whether the exit is exceptional or not).
*
* For inline JS calls/returns, it is easy to call the prologue/epilogue
* exactly once. When calling JS from C++, Invoke/Execute push the stack
* frame but do *not* call the prologue/epilogue. That means Interpret
* must call the prologue/epilogue for the entry frame. This scheme
* simplifies jit compilation.
*
* An important corner case is what happens when an error occurs (OOM,
* over-recursed) after pushing the stack frame but before 'prologue' is
* called or completes fully. To simplify usage, 'epilogue' does not assume
* 'prologue' has completed and handles all the intermediate state details.
*
* The 'newType' option indicates whether the constructed 'this' value (if
* there is one) should be given a new singleton type.
*/
bool prologue(JSContext *cx, bool newType);
void epilogue(JSContext *cx);
/* Subsets of 'prologue' called from jit code. */
inline bool jitHeavyweightFunctionPrologue(JSContext *cx);
inline void jitTypeNestingPrologue(JSContext *cx);
bool jitStrictEvalPrologue(JSContext *cx);
/* Initialize local variables of newly-pushed frame. */
void initVarsToUndefined();
/*
* Stack frame type
*
@ -507,39 +559,124 @@ class StackFrame
}
inline void resetGeneratorPrev(JSContext *cx);
inline void resetInlinePrev(StackFrame *prevfp, jsbytecode *prevpc);
inline void initInlineFrame(JSFunction *fun, StackFrame *prevfp, jsbytecode *prevpc);
/*
* (Unaliased) locals and arguments
*
* Only non-eval function frames have arguments. The arguments pushed by
* the caller are the 'actual' arguments. The declared arguments of the
* callee are the 'formal' arguments. When the caller passes less or equal
* actual arguments, the actual and formal arguments are the same array
* (but with different extents). When the caller passes too many arguments,
* the formal subset of the actual arguments is copied onto the top of the
* stack. This allows the engine to maintain a jit-time constant offset of
* arguments from the frame pointer. Since the formal subset of the actual
* arguments is potentially on the stack twice, it is important for all
* reads/writes to refer to the same canonical memory location. This is
* abstracted by the unaliased{Formal,Actual} methods.
*
* When a local/formal variable is "aliased" (accessed by nested closures,
* dynamic scope operations, or 'arguments), the canonical location for
* that value is the slot of an activation object (scope or arguments).
* Currently, all variables are given slots in *both* the stack frame and
* heap objects, even though, as just described, only one should ever be
* accessed. Thus, it is up to the code performing an access to access the
* correct value. These functions assert that accesses to stack values are
* unaliased. For more about canonical values locations.
*/
inline Value &unaliasedVar(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
inline Value &unaliasedLocal(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
bool hasArgs() const { return isNonEvalFunctionFrame(); }
inline Value &unaliasedFormal(unsigned i, MaybeCheckAliasing = CHECK_ALIASING);
inline Value &unaliasedActual(unsigned i);
template <class Op> inline void forEachUnaliasedActual(Op op);
inline unsigned numFormalArgs() const;
inline unsigned numActualArgs() const;
/*
* Arguments object
*
* If a non-eval function has script->needsArgsObj, an arguments object is
* created in the prologue and stored in the local variable for the
* 'arguments' binding (script->argumentsLocal). Since this local is
* mutable, the arguments object can be overwritten and we can "lose" the
* arguments object. Thus, StackFrame keeps an explicit argsObj_ field so
* that the original arguments object is always available.
*/
ArgumentsObject &argsObj() const;
void initArgsObj(ArgumentsObject &argsobj);
inline JSObject *createRestParameter(JSContext *cx);
/*
* Frame slots
* Scope chain
*
* A frame's 'slots' are the fixed slots associated with the frame (like
* local variables) followed by an expression stack holding temporary
* values. A frame's 'base' is the base of the expression stack.
* In theory, the scope chain would contain an object for every lexical
* scope. However, only objects that are required for dynamic lookup are
* actually created.
*
* Given that a (non-dummy) StackFrame corresponds roughly to a ES5
* Execution Context (ES5 10.3), StackFrame::varObj corresponds to the
* VariableEnvironment component of a Exection Context. Intuitively, the
* variables object is where new bindings (variables and functions) are
* stored. One might expect that this is either the Call object or
* scopeChain.globalObj for function or global code, respectively, however
* the JSAPI allows calls of Execute to specify a variables object on the
* scope chain other than the call/global object. This allows embeddings to
* run multiple scripts under the same global, each time using a new
* variables object to collect and discard the script's global variables.
*/
Value *slots() const {
return (Value *)(this + 1);
inline HandleObject scopeChain() const;
inline ScopeObject &aliasedVarScope(ScopeCoordinate sc) const;
inline GlobalObject &global() const;
inline CallObject &callObj() const;
inline JSObject &varObj();
inline void pushOnScopeChain(ScopeObject &scope);
inline void popOffScopeChain();
/*
* Block chain
*
* Entering/leaving a let (or exception) block may do 1 or 2 things: First,
* a static block object (created at compiled time and stored in the
* script) is pushed on StackFrame::blockChain. Second, if the static block
* may be cloned to hold the dynamic values if this is needed for dynamic
* scope access. A clone is created for a static block iff
* StaticBlockObject::needsClone.
*/
bool hasBlockChain() const {
return (flags_ & HAS_BLOCKCHAIN) && blockChain_;
}
Value *base() const {
return slots() + script()->nfixed;
StaticBlockObject *maybeBlockChain() {
return (flags_ & HAS_BLOCKCHAIN) ? blockChain_ : NULL;
}
Value &varSlot(unsigned i) {
JS_ASSERT(i < script()->nfixed);
JS_ASSERT_IF(maybeFun(), i < script()->bindings.numVars());
return slots()[i];
StaticBlockObject &blockChain() const {
JS_ASSERT(hasBlockChain());
return *blockChain_;
}
Value &localSlot(unsigned i) {
/* Let variables can be above script->nfixed. */
JS_ASSERT(i < script()->nslots);
return slots()[i];
}
bool pushBlock(JSContext *cx, StaticBlockObject &block);
void popBlock(JSContext *cx);
/*
* With
*
* Entering/leaving a with (or E4X filter) block pushes/pops an object
* on the scope chain. Pushing uses pushOnScopeChain, popping should use
* popWith.
*/
void popWith(JSContext *cx);
/*
* Script
@ -556,6 +693,17 @@ class StackFrame
* the same VMFrame. Other calls force expansion of the inlined frames.
*/
JSScript *script() const {
JS_ASSERT(isScriptFrame());
return isFunctionFrame()
? isEvalFrame() ? u.evalScript : fun()->script()
: exec.script;
}
JSScript *maybeScript() const {
return isScriptFrame() ? script() : NULL;
}
/*
* Get the frame's current bytecode, assuming |this| is in |cx|. next is
* frame whose prev == this, NULL if not known or if this == cx->fp().
@ -571,15 +719,12 @@ class StackFrame
*
* Using next can avoid this, but in most cases prefer ScriptFrameIter;
* it is amortized O(1).
*
* When I get to the bottom I go back to the top of the stack
* Where I stop and I turn and I go right back
* Till I get to the bottom and I see you again...
*/
jsbytecode *pcQuadratic(const ContextStack &stack, StackFrame *next = NULL,
JSInlinedSite **pinlined = NULL);
jsbytecode *prevpc(JSInlinedSite **pinlined) {
jsbytecode *pcQuadratic(const ContextStack &stack, StackFrame *next = NULL,
InlinedSite **pinlined = NULL);
jsbytecode *prevpc(InlinedSite **pinlined) {
if (flags_ & HAS_PREVPC) {
if (pinlined)
*pinlined = prevInline_;
@ -588,45 +733,11 @@ class StackFrame
return prevpcSlow(pinlined);
}
JSInlinedSite *prevInline() {
InlinedSite *prevInline() {
JS_ASSERT(flags_ & HAS_PREVPC);
return prevInline_;
}
JSScript *script() const {
JS_ASSERT(isScriptFrame());
return isFunctionFrame()
? isEvalFrame() ? u.evalScript : fun()->script()
: exec.script;
}
JSScript *functionScript() const {
JS_ASSERT(isFunctionFrame());
return isEvalFrame() ? u.evalScript : fun()->script();
}
JSScript *globalScript() const {
JS_ASSERT(isGlobalFrame());
return exec.script;
}
JSScript *maybeScript() const {
return isScriptFrame() ? script() : NULL;
}
size_t numFixed() const {
return script()->nfixed;
}
size_t numSlots() const {
return script()->nslots;
}
size_t numGlobalVars() const {
JS_ASSERT(isGlobalFrame());
return exec.script->nfixed;
}
/*
* Function
*
@ -654,94 +765,6 @@ class StackFrame
return fp->script()->function();
}
/*
* Arguments
*
* Only non-eval function frames have arguments. A frame follows its
* arguments contiguously in memory. The arguments pushed by the caller are
* the 'actual' arguments. The declared arguments of the callee are the
* 'formal' arguments. When the caller passes less or equal actual
* arguments, the actual and formal arguments are the same array (but with
* different extents). When the caller passes too many arguments, the
* formal subset of the actual arguments is copied onto the top of the
* stack. This allows the engine to maintain a jit-time constant offset of
* arguments from the frame pointer. Since the formal subset of the actual
* arguments is potentially on the stack twice, it is important for all
* reads/writes to refer to the same canonical memory location.
*
* An arguments object (the object returned by the 'arguments' keyword) is
* lazily created, so a given function frame may or may not have one.
*/
/* True if this frame has arguments. Contrast with hasArgsObj. */
bool hasArgs() const {
return isNonEvalFunctionFrame();
}
unsigned numFormalArgs() const {
JS_ASSERT(hasArgs());
return fun()->nargs;
}
Value &formalArg(unsigned i) const {
JS_ASSERT(i < numFormalArgs());
return formalArgs()[i];
}
Value *formalArgs() const {
JS_ASSERT(hasArgs());
return (Value *)this - numFormalArgs();
}
Value *formalArgsEnd() const {
JS_ASSERT(hasArgs());
return (Value *)this;
}
Value *maybeFormalArgs() const {
return (flags_ & (FUNCTION | EVAL)) == FUNCTION
? formalArgs()
: NULL;
}
inline unsigned numActualArgs() const;
inline Value *actualArgs() const;
inline Value *actualArgsEnd() const;
inline Value &canonicalActualArg(unsigned i) const;
template <class Op>
inline bool forEachCanonicalActualArg(Op op, unsigned start = 0, unsigned count = unsigned(-1));
template <class Op> inline bool forEachFormalArg(Op op);
/* XXX: all these argsObj functions will be removed with bug 659577. */
bool hasArgsObj() const {
/*
* HAS_ARGS_OBJ is still technically not equivalent to
* script()->needsArgsObj() during functionPrologue (where GC can
* observe a frame that needsArgsObj but has not yet been given the
* args). This can be fixed by creating and rooting the args/call
* object before pushing the frame, which should be done eventually.
*/
return !!(flags_ & HAS_ARGS_OBJ);
}
ArgumentsObject &argsObj() const {
JS_ASSERT(hasArgsObj());
return *argsObj_;
}
ArgumentsObject *maybeArgsObj() const {
return hasArgsObj() ? &argsObj() : NULL;
}
void initArgsObj(ArgumentsObject &argsObj) {
JS_ASSERT(script()->needsArgsObj());
JS_ASSERT(!hasArgsObj());
argsObj_ = &argsObj;
flags_ |= HAS_ARGS_OBJ;
}
/*
* This value
*
@ -752,31 +775,25 @@ class StackFrame
* frames and directly before the frame for global frames. The *Args
* members assert !isEvalFrame(), so we implement specialized inline
* methods for accessing 'this'. When the caller has static knowledge that
* a frame is a function or global frame, 'functionThis' and 'globalThis',
* respectively, allow more efficient access.
* a frame is a function, 'functionThis' allows more efficient access.
*/
Value &functionThis() const {
JS_ASSERT(isFunctionFrame());
if (isEvalFrame())
return ((Value *)this)[-1];
return formalArgs()[-1];
return formals()[-1];
}
JSObject &constructorThis() const {
JS_ASSERT(hasArgs());
return formalArgs()[-1].toObject();
}
Value &globalThis() const {
JS_ASSERT(isGlobalFrame());
return ((Value *)this)[-1];
return formals()[-1].toObject();
}
Value &thisValue() const {
if (flags_ & (EVAL | GLOBAL))
return ((Value *)this)[-1];
return formalArgs()[-1];
return formals()[-1];
}
/*
@ -802,7 +819,7 @@ class StackFrame
JS_ASSERT(isScriptFrame());
Value &calleev = flags_ & (EVAL | GLOBAL)
? ((Value *)this)[-2]
: formalArgs()[-2];
: formals()[-2];
JS_ASSERT(calleev.isObjectOrNull());
return calleev;
}
@ -811,118 +828,13 @@ class StackFrame
JS_ASSERT(isFunctionFrame());
if (isEvalFrame())
return ((Value *)this)[-2];
return formalArgs()[-2];
return formals()[-2];
}
CallReceiver callReceiver() const {
return CallReceiverFromArgv(formalArgs());
return CallReceiverFromArgv(formals());
}
/*
* Scope chain
*
* Every frame has a scopeChain which, when traversed via the 'parent' link
* to the root, indicates the current global object. A 'call object' is a
* node on a scope chain representing a function's activation record. A
* call object is used for dynamically-scoped name lookup and lexically-
* scoped upvar access. The call object holds the values of locals and
* arguments when a function returns (and its stack frame is popped). For
* performance reasons, call objects are created lazily for 'lightweight'
* functions, i.e., functions which are not statically known to require a
* call object. Thus, a given function frame may or may not have a call
* object. When a function does have a call object, it is found by walking
* up the scope chain until the first call object. Thus, it is important,
* when setting the scope chain, to indicate whether the new scope chain
* contains a new call object and thus changes the 'hasCallObj' state.
*
* The method JIT requires that HAS_SCOPECHAIN be set for all frames which
* use NAME or related opcodes that can access the scope chain (so it does
* not have to test the bit). To ensure this, we always initialize the
* scope chain when pushing frames in the VM, and only initialize it when
* pushing frames in JIT code when the above situation applies.
*
* NB: 'fp->hasCallObj()' implies that fp->callObj() needs to be 'put' when
* the frame is popped. Since the scope chain of a non-strict eval frame
* contains the call object of the parent (function) frame, it is possible
* to have:
* !fp->hasCall() && fp->scopeChain().isCall()
*/
inline HandleObject scopeChain() const;
inline ScopeObject &aliasedVarScope(ScopeCoordinate sc) const;
inline GlobalObject &global() const;
bool hasCallObj() const {
bool ret = !!(flags_ & HAS_CALL_OBJ);
JS_ASSERT_IF(ret, !isNonStrictEvalFrame());
return ret;
}
inline CallObject &callObj() const;
inline void initScopeChain(CallObject &callobj);
inline void setScopeChain(JSObject &obj);
/*
* Variables object
*
* Given that a (non-dummy) StackFrame corresponds roughly to a ES5
* Execution Context (ES5 10.3), StackFrame::varObj corresponds to the
* VariableEnvironment component of a Exection Context. Intuitively, the
* variables object is where new bindings (variables and functions) are
* stored. One might expect that this is either the callObj or
* scopeChain.globalObj for function or global code, respectively, however
* the JSAPI allows calls of Execute to specify a variables object on the
* scope chain other than the call/global object. This allows embeddings to
* run multiple scripts under the same global, each time using a new
* variables object to collect and discard the script's global variables.
*/
inline JSObject &varObj();
/* Block chain */
bool hasBlockChain() const {
return (flags_ & HAS_BLOCKCHAIN) && blockChain_;
}
StaticBlockObject *maybeBlockChain() {
return (flags_ & HAS_BLOCKCHAIN) ? blockChain_ : NULL;
}
StaticBlockObject &blockChain() const {
JS_ASSERT(hasBlockChain());
return *blockChain_;
}
/* Enter/exit execution of a lexical block. */
bool pushBlock(JSContext *cx, StaticBlockObject &block);
void popBlock(JSContext *cx);
/* Exits (via execution or exception) a with block. */
void popWith(JSContext *cx);
/*
* Prologue for function frames: make a call object for heavyweight
* functions, and maintain type nesting invariants.
*/
inline bool functionPrologue(JSContext *cx);
/*
* Epilogue for function frames: put any args or call object for the frame
* which may still be live, and maintain type nesting invariants. Note:
* this does mark the epilogue as having been completed, since the frame is
* about to be popped. Use updateEpilogueFlags for this.
*/
inline void functionEpilogue(JSContext *cx);
/*
* If callObj() or argsObj() have already been put, update our flags
* accordingly. This call must be followed by a later functionEpilogue.
*/
inline void updateEpilogueFlags();
inline bool maintainNestingState() const;
/*
* Frame compartment
*
@ -945,11 +857,11 @@ class StackFrame
/* JIT rejoin state */
JSRejoinState rejoin() const {
FrameRejoinState rejoin() const {
return rejoin_;
}
void setRejoin(JSRejoinState state) {
void setRejoin(FrameRejoinState state) {
rejoin_ = state;
}
@ -1024,35 +936,53 @@ class StackFrame
}
/*
* Generator-specific members
*
* A non-eval function frame may optionally be the activation of a
* generator. For the most part, generator frames act like ordinary frames.
* For exceptions, see js_FloatingFrameIfGenerator.
* A "generator" frame is a function frame associated with a generator.
* Since generators are not executed LIFO, the VM copies a single abstract
* generator frame back and forth between the LIFO VM stack (when the
* generator is active) and a snapshot stored in JSGenerator (when the
* generator is inactive). A generator frame is comprised of a StackFrame
* structure and the values that make up the arguments, locals, and
* expression stack. The layout in the JSGenerator snapshot matches the
* layout on the stack (see the "VM stack layout" comment above).
*/
bool isGeneratorFrame() const {
return !!(flags_ & GENERATOR);
bool ret = flags_ & GENERATOR;
JS_ASSERT_IF(ret, isNonEvalFunctionFrame());
return ret;
}
bool isFloatingGenerator() const {
JS_ASSERT_IF(flags_ & FLOATING_GENERATOR, isGeneratorFrame());
return !!(flags_ & FLOATING_GENERATOR);
void initGeneratorFrame() const {
JS_ASSERT(!isGeneratorFrame());
JS_ASSERT(isNonEvalFunctionFrame());
flags_ |= GENERATOR;
}
void initFloatingGenerator() {
JS_ASSERT(!(flags_ & GENERATOR));
flags_ |= (GENERATOR | FLOATING_GENERATOR);
Value *generatorArgsSnapshotBegin() const {
JS_ASSERT(isGeneratorFrame());
return actuals() - 2;
}
void unsetFloatingGenerator() {
flags_ &= ~FLOATING_GENERATOR;
Value *generatorArgsSnapshotEnd() const {
JS_ASSERT(isGeneratorFrame());
return (Value *)this;
}
void setFloatingGenerator() {
flags_ |= FLOATING_GENERATOR;
Value *generatorSlotsSnapshotBegin() const {
JS_ASSERT(isGeneratorFrame());
return (Value *)(this + 1);
}
enum TriggerPostBarriers {
DoPostBarrier = true,
NoPostBarrier = false
};
template <class T, class U, TriggerPostBarriers doPostBarrier>
void copyFrameAndValues(JSContext *cx, StackFrame *fp, T *vp,
StackFrame *otherfp, U *othervp, Value *othersp);
JSGenerator *maybeSuspendedGenerator(JSRuntime *rt);
/*
* js::Execute pushes both global and function frames (since eval() in a
* function pushes a frame with isFunctionFrame() && isEvalFrame()). Most
@ -1081,6 +1011,11 @@ class StackFrame
return !!(flags_ & CONSTRUCTING);
}
bool hasCallObj() const {
JS_ASSERT(isStrictEvalFrame() || fun()->isHeavyweight());
return flags_ & HAS_CALL_OBJ;
}
/*
* The method JIT call/apply optimization can erase Function.{call,apply}
* invocations from the stack and push the callee frame directly. The base
@ -1103,10 +1038,6 @@ class StackFrame
flags_ |= PREV_UP_TO_DATE;
}
bool hasOverflowArgs() const {
return !!(flags_ & OVERFLOW_ARGS);
}
bool isYielding() {
return !!(flags_ & YIELDING);
}
@ -1130,6 +1061,9 @@ class StackFrame
public:
/* Public, but only for JIT use: */
inline void resetInlinePrev(StackFrame *prevfp, jsbytecode *prevpc);
inline void initInlineFrame(JSFunction *fun, StackFrame *prevfp, jsbytecode *prevpc);
static size_t offsetOfFlags() {
return offsetof(StackFrame, flags_);
}
@ -1154,14 +1088,14 @@ class StackFrame
return offsetof(StackFrame, rval_);
}
static size_t offsetOfArgsObj() {
return offsetof(StackFrame, argsObj_);
}
static ptrdiff_t offsetOfNcode() {
return offsetof(StackFrame, ncode_);
}
static ptrdiff_t offsetOfArgsObj() {
return offsetof(StackFrame, argsObj_);
}
static ptrdiff_t offsetOfCallee(JSFunction *fun) {
JS_ASSERT(fun != NULL);
return -(fun->nargs + 2) * sizeof(Value);
@ -1235,11 +1169,11 @@ class FrameRegs
Value *sp;
jsbytecode *pc;
private:
JSInlinedSite *inlined_;
InlinedSite *inlined_;
StackFrame *fp_;
public:
StackFrame *fp() const { return fp_; }
JSInlinedSite *inlined() const { return inlined_; }
InlinedSite *inlined() const { return inlined_; }
/* For jit use (need constant): */
static const size_t offsetOfFp = 3 * sizeof(void *);
@ -1250,6 +1184,16 @@ class FrameRegs
}
void clearInlined() { inlined_ = NULL; }
unsigned stackDepth() const {
JS_ASSERT(sp >= fp_->base());
return sp - fp_->base();
}
Value *spForStackDepth(unsigned depth) const {
JS_ASSERT(fp_->script()->nfixed + depth <= fp_->script()->nslots);
return fp_->base() + depth;
}
/* For generator: */
void rebaseFromTo(const FrameRegs &from, StackFrame &to) {
fp_ = &to;
@ -1496,6 +1440,10 @@ class StackSpace
StackSegment &findContainingSegment(const StackFrame *target) const;
bool containsFast(StackFrame *fp) {
return (Value *)fp >= base_ && (Value *)fp <= trustedEnd_;
}
public:
StackSpace();
bool init();
@ -1548,7 +1496,7 @@ class StackSpace
/* Called during GC: mark segments, frames, and slots under firstUnused. */
void mark(JSTracer *trc);
void markFrameSlots(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
void markFrameValues(JSTracer *trc, StackFrame *fp, Value *slotsEnd, jsbytecode *pc);
/* Called during GC: sets active flag on compartments with active frames. */
void markActiveCompartments();