Bug 881902 - Remove ContextStack and StackSpace. r=luke,njn

This commit is contained in:
Jan de Mooij 2013-06-21 08:28:06 +02:00
Родитель a64f133e8b
Коммит f5b9908a45
43 изменённых файлов: 702 добавлений и 1659 удалений

Просмотреть файл

@ -135,7 +135,7 @@ struct RuntimeSizes
size_t dtoa;
size_t temporary;
size_t regexpData;
size_t stack;
size_t interpreterStack;
size_t gcMarker;
size_t mathCache;
size_t scriptData;

Просмотреть файл

@ -405,7 +405,7 @@ static inline bool
WarnOnTooManyArgs(JSContext *cx, const CallArgs &args)
{
if (args.length() > 1) {
Rooted<JSScript*> script(cx, cx->stack.currentScript());
Rooted<JSScript*> script(cx, cx->currentScript());
if (script && !script->warnedAboutTwoArgumentEval) {
static const char TWO_ARGUMENT_WARNING[] =
"Support for eval(code, scopeObject) has been removed. "

Просмотреть файл

@ -443,8 +443,8 @@ IntlInitialize(JSContext *cx, HandleObject obj, Handle<PropertyName*> initialize
JS_ASSERT(initializerValue.isObject());
JS_ASSERT(initializerValue.toObject().is<JSFunction>());
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, 3, &args))
InvokeArgs args(cx);
if (!args.init(3))
return false;
args.setCallee(initializerValue);
@ -509,8 +509,8 @@ GetInternals(JSContext *cx, HandleObject obj, MutableHandleObject internals)
JS_ASSERT(getInternalsValue.isObject());
JS_ASSERT(getInternalsValue.toObject().is<JSFunction>());
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, 1, &args))
InvokeArgs args(cx);
if (!args.init(1))
return false;
args.setCallee(getInternalsValue);

Просмотреть файл

@ -530,14 +530,14 @@ obj_getPrototypeOf(JSContext *cx, unsigned argc, Value *vp)
* Implement [[Prototype]]-getting -- particularly across compartment
* boundaries -- by calling a cached __proto__ getter function.
*/
InvokeArgsGuard nested;
if (!cx->stack.pushInvokeArgs(cx, 0, &nested))
InvokeArgs args2(cx);
if (!args2.init(0))
return false;
nested.setCallee(cx->global()->protoGetter());
nested.setThis(args[0]);
if (!Invoke(cx, nested))
args2.setCallee(cx->global()->protoGetter());
args2.setThis(args[0]);
if (!Invoke(cx, args2))
return false;
args.rval().set(nested.rval());
args.rval().set(args2.rval());
return true;
}

Просмотреть файл

@ -133,7 +133,7 @@ ParallelArrayObject::constructHelper(JSContext *cx, MutableHandleFunction ctor,
if (cx->typeInferenceEnabled()) {
jsbytecode *pc;
RootedScript script(cx, cx->stack.currentScript(&pc));
RootedScript script(cx, cx->currentScript(&pc));
if (script) {
if (ctor->nonLazyScript()->shouldCloneAtCallsite) {
ctor.set(CloneFunctionAtCallsite(cx, ctor, script, pc));
@ -163,8 +163,8 @@ ParallelArrayObject::constructHelper(JSContext *cx, MutableHandleFunction ctor,
}
}
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, args0.length(), &args))
InvokeArgs args(cx);
if (!args.init(args0.length()))
return false;
args.setCallee(ObjectValue(*ctor));

Просмотреть файл

@ -513,24 +513,6 @@ MarkValueInternal(JSTracer *trc, Value *v)
}
}
static inline void
MarkValueInternalMaybeNullPayload(JSTracer *trc, Value *v)
{
if (v->isMarkable()) {
void *thing = v->toGCThing();
if (thing) {
JS_SET_TRACING_LOCATION(trc, (void *)v);
MarkKind(trc, &thing, v->gcKind());
if (v->isString())
v->setString((JSString *)thing);
else
v->setObjectOrNull((JSObject *)thing);
return;
}
}
JS_UNSET_TRACING_LOCATION(trc);
}
void
gc::MarkValue(JSTracer *trc, EncapsulatedValue *v, const char *name)
{
@ -581,16 +563,6 @@ gc::MarkValueRootRange(JSTracer *trc, size_t len, Value *vec, const char *name)
}
}
void
gc::MarkValueRootRangeMaybeNullPayload(JSTracer *trc, size_t len, Value *vec, const char *name)
{
JS_ROOT_MARKING_ASSERT(trc);
for (size_t i = 0; i < len; ++i) {
JS_SET_TRACING_INDEX(trc, name, i);
MarkValueInternalMaybeNullPayload(trc, &vec[i]);
}
}
bool
gc::IsValueMarked(Value *v)
{

Просмотреть файл

@ -181,9 +181,6 @@ MarkValueRootRange(JSTracer *trc, Value *begin, Value *end, const char *name)
MarkValueRootRange(trc, end - begin, begin, name);
}
void
MarkValueRootRangeMaybeNullPayload(JSTracer *trc, size_t len, Value *vec, const char *name);
void
MarkTypeRoot(JSTracer *trc, types::Type *v, const char *name);

Просмотреть файл

@ -737,7 +737,7 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
c->debugScopes->mark(trc);
}
rt->stackSpace.mark(trc);
MarkInterpreterActivations(rt, trc);
#ifdef JS_ION
ion::MarkJitActivations(rt, trc);

Просмотреть файл

@ -411,8 +411,8 @@ HandleDynamicLinkFailure(JSContext *cx, CallArgs args, AsmJSModule &module, Hand
unsigned argc = args.length();
InvokeArgsGuard args2;
if (!cx->stack.pushInvokeArgs(cx, argc, &args2))
InvokeArgs args2(cx);
if (!args2.init(argc))
return false;
args2.setCallee(ObjectValue(*fun));

Просмотреть файл

@ -1204,7 +1204,7 @@ ion::FinishBailoutToBaseline(BaselineBailoutInfo *bailoutInfo)
// Check that we can get the current script's PC.
#ifdef DEBUG
jsbytecode *pc;
cx->stack.currentScript(&pc);
cx->currentScript(&pc);
IonSpew(IonSpew_BaselineBailouts, " Got pc=%p", pc);
#endif

Просмотреть файл

@ -963,9 +963,6 @@ JSRuntime::init(uint32_t maxbytes)
dateTimeInfo.updateTimeZoneAdjustment();
if (!stackSpace.init())
return false;
if (!scriptDataTable.init())
return false;
@ -5784,8 +5781,8 @@ JS_New(JSContext *cx, JSObject *ctorArg, unsigned argc, jsval *argv)
// is not a simple variation of JSOP_CALL. We have to determine what class
// of object to create, create it, and clamp the return value to an object,
// among other details. InvokeConstructor does the hard work.
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, argc, &args))
InvokeArgs args(cx);
if (!args.init(argc))
return NULL;
args.setCallee(ObjectValue(*ctor));

Просмотреть файл

@ -1009,17 +1009,17 @@ array_toString(JSContext *cx, unsigned argc, Value *vp)
return true;
}
InvokeArgsGuard ag;
if (!cx->stack.pushInvokeArgs(cx, 0, &ag))
InvokeArgs args2(cx);
if (!args2.init(0))
return false;
ag.setCallee(join);
ag.setThis(ObjectValue(*obj));
args2.setCallee(join);
args2.setThis(ObjectValue(*obj));
/* Do the call. */
if (!Invoke(cx, ag))
if (!Invoke(cx, args2))
return false;
args.rval().set(ag.rval());
args.rval().set(args2.rval());
return true;
}
@ -1421,20 +1421,20 @@ SortComparatorFunction::operator()(const Value &a, const Value &b, bool *lessOrE
if (!JS_CHECK_OPERATION_LIMIT(cx))
return false;
InvokeArgsGuard &ag = fig.args();
if (!ag.pushed() && !cx->stack.pushInvokeArgs(cx, 2, &ag))
InvokeArgs &args = fig.args();
if (!args.init(2))
return false;
ag.setCallee(fval);
ag.setThis(UndefinedValue());
ag[0] = a;
ag[1] = b;
args.setCallee(fval);
args.setThis(UndefinedValue());
args[0] = a;
args[1] = b;
if (!fig.invoke(cx))
return false;
double cmp;
if (!ToNumber(cx, ag.rval(), &cmp))
if (!ToNumber(cx, args.rval(), &cmp))
return false;
/*
@ -2637,7 +2637,7 @@ array_filter(JSContext *cx, unsigned argc, Value *vp)
/* Step 9. */
JS_ASSERT(!InParallelSection());
FastInvokeGuard fig(cx, ObjectValue(*callable));
InvokeArgsGuard &ag = fig.args();
InvokeArgs &args2 = fig.args();
RootedValue kValue(cx);
while (k < len) {
if (!JS_CHECK_OPERATION_LIMIT(cx))
@ -2650,17 +2650,17 @@ array_filter(JSContext *cx, unsigned argc, Value *vp)
/* Step c.ii-iii. */
if (!kNotPresent) {
if (!ag.pushed() && !cx->stack.pushInvokeArgs(cx, 3, &ag))
if (!args2.init(3))
return false;
ag.setCallee(ObjectValue(*callable));
ag.setThis(thisv);
ag[0] = kValue;
ag[1] = NumberValue(k);
ag[2] = ObjectValue(*obj);
args2.setCallee(ObjectValue(*callable));
args2.setThis(thisv);
args2[0] = kValue;
args2[1] = NumberValue(k);
args2[2] = ObjectValue(*obj);
if (!fig.invoke(cx))
return false;
if (ToBoolean(ag.rval())) {
if (ToBoolean(args2.rval())) {
if (!SetArrayElement(cx, arr, to, kValue))
return false;
to++;

Просмотреть файл

@ -126,7 +126,7 @@ JSRuntime::sizeOfIncludingThis(JSMallocSizeOfFun mallocSizeOf, JS::RuntimeSizes
rtSizes->regexpData = bumpAlloc_ ? bumpAlloc_->sizeOfNonHeapData() : 0;
rtSizes->stack = stackSpace.sizeOf();
rtSizes->interpreterStack = interpreterStack_.sizeOfExcludingThis(mallocSizeOf);
rtSizes->gcMarker = gcMarker.sizeOfExcludingThis(mallocSizeOf);
@ -560,7 +560,7 @@ checkReportFlags(JSContext *cx, unsigned *flags)
* otherwise. We assume that if the top frame is a native, then it is
* strict if the nearest scripted frame is strict, see bug 536306.
*/
JSScript *script = cx->stack.currentScript();
JSScript *script = cx->currentScript();
if (script && script->strict)
*flags &= ~JSREPORT_WARNING;
else if (cx->hasExtraWarningsOption())
@ -1179,7 +1179,6 @@ JSContext::JSContext(JSRuntime *rt)
enterCompartmentDepth_(0),
savedFrameChains_(),
defaultCompartmentObject_(NULL),
stack(thisDuringConstruction()),
cycleDetectorSet(thisDuringConstruction()),
errorReporter(NULL),
operationCallback(NULL),
@ -1302,14 +1301,9 @@ JSContext::runningWithTrustedPrincipals() const
bool
JSContext::saveFrameChain()
{
if (!stack.saveFrameChain())
if (!savedFrameChains_.append(SavedFrameChain(compartment(), enterCompartmentDepth_)))
return false;
if (!savedFrameChains_.append(SavedFrameChain(compartment(), enterCompartmentDepth_))) {
stack.restoreFrameChain();
return false;
}
if (Activation *act = mainThread().activation())
act->saveFrameChain();
@ -1331,8 +1325,6 @@ JSContext::restoreFrameChain()
setCompartment(sfc.compartment);
enterCompartmentDepth_ = sfc.enterCompartmentCount;
stack.restoreFrameChain();
if (Activation *act = mainThread().activation())
act->restoreFrameChain();
@ -1532,7 +1524,7 @@ JSContext::findVersion() const
if (hasVersionOverride)
return versionOverride;
if (JSScript *script = stack.currentScript(NULL, js::ContextStack::ALLOW_CROSS_COMPARTMENT))
if (JSScript *script = currentScript(NULL, ALLOW_CROSS_COMPARTMENT))
return script->getVersion();
return defaultVersion;

Просмотреть файл

@ -726,9 +726,6 @@ struct JSRuntime : public JS::shadow::Runtime,
void assertValidThread() const {}
#endif
/* Keeper of the contiguous stack used by all contexts in this thread. */
js::StackSpace stackSpace;
/* Temporary arena pool used while compiling and decompiling. */
static const size_t TEMP_LIFO_ALLOC_PRIMARY_CHUNK_SIZE = 4 * 1024;
js::LifoAlloc tempLifoAlloc;
@ -750,6 +747,9 @@ struct JSRuntime : public JS::shadow::Runtime,
JSObject *selfHostingGlobal_;
/* Space for interpreter frames. */
js::InterpreterStack interpreterStack_;
JSC::ExecutableAllocator *createExecutableAllocator(JSContext *cx);
WTF::BumpPointerAllocator *createBumpPointerAllocator(JSContext *cx);
js::ion::IonRuntime *createIonRuntime(JSContext *cx);
@ -777,6 +777,9 @@ struct JSRuntime : public JS::shadow::Runtime,
bool hasIonRuntime() const {
return !!ionRuntime_;
}
js::InterpreterStack &interpreterStack() {
return interpreterStack_;
}
//-------------------------------------------------------------------------
// Self-hosting support
@ -1686,9 +1689,6 @@ struct JSContext : js::ThreadSafeContext,
inline void setDefaultCompartmentObjectIfUnset(JSObject *obj);
JSObject *maybeDefaultCompartmentObject() const { return defaultCompartmentObject_; }
/* Current execution stack. */
js::ContextStack stack;
/*
* Current global. This is only safe to use within the scope of the
* AutoCompartment from which it's called.
@ -1748,8 +1748,7 @@ struct JSContext : js::ThreadSafeContext,
* default version.
*/
void maybeMigrateVersionOverride() {
JS_ASSERT(stack.empty());
if (JS_UNLIKELY(isVersionOverridden())) {
if (JS_UNLIKELY(isVersionOverridden()) && !currentlyRunning()) {
defaultVersion = versionOverride;
clearVersionOverride();
}
@ -1820,6 +1819,19 @@ struct JSContext : js::ThreadSafeContext,
return mainThread().activation()->asInterpreter()->regs();
}
/*
* Get the topmost script and optional pc on the stack. By default, this
* function only returns a JSScript in the current compartment, returning
* NULL if the current script is in a different compartment. This behavior
* can be overridden by passing ALLOW_CROSS_COMPARTMENT.
*/
enum MaybeAllowCrossCompartment {
DONT_ALLOW_CROSS_COMPARTMENT = false,
ALLOW_CROSS_COMPARTMENT = true
};
inline JSScript *currentScript(jsbytecode **pc = NULL,
MaybeAllowCrossCompartment = DONT_ALLOW_CROSS_COMPARTMENT) const;
#ifdef MOZ_TRACE_JSCALLS
/* Function entry/exit debugging callback. */
JSFunctionCallback functionCallback;
@ -2015,6 +2027,12 @@ class MOZ_STACK_CLASS AutoKeepAtoms
~AutoKeepAtoms() { JS_UNKEEP_ATOMS(rt); }
};
// Maximum supported value of arguments.length. This bounds the maximum
// number of arguments that can be supplied to Function.prototype.apply.
// This value also bounds the number of elements parsed in an array
// initialiser.
static const unsigned ARGS_LENGTH_MAX = 500 * 1000;
} /* namespace js */
class JSAutoResolveFlags

Просмотреть файл

@ -16,6 +16,7 @@
#include "builtin/Object.h" // For js::obj_construct
#include "frontend/ParseMaps.h"
#include "ion/IonFrames.h" // For GetPcScript
#include "vm/Interpreter.h"
#include "vm/Probes.h"
#include "vm/RegExpObject.h"
@ -123,25 +124,6 @@ NewObjectCache::newObjectFromHit(JSContext *cx, EntryIndex entry_, js::gc::Initi
return NULL;
}
struct PreserveRegsGuard
{
PreserveRegsGuard(JSContext *cx, FrameRegs &regs)
: prevContextRegs(cx->stack.maybeRegs()), cx(cx), regs_(regs) {
cx->stack.repointRegs(&regs_);
}
~PreserveRegsGuard() {
JS_ASSERT(cx->stack.maybeRegs() == &regs_);
*prevContextRegs = regs_;
cx->stack.repointRegs(prevContextRegs);
}
FrameRegs *prevContextRegs;
private:
JSContext *cx;
FrameRegs &regs_;
};
#ifdef JS_CRASH_DIAGNOSTICS
class CompartmentChecker
{
@ -575,6 +557,48 @@ JSContext::setCompartment(JSCompartment *comp)
allocator_ = zone_ ? &zone_->allocator : NULL;
}
inline JSScript *
JSContext::currentScript(jsbytecode **ppc,
MaybeAllowCrossCompartment allowCrossCompartment) const
{
if (ppc)
*ppc = NULL;
js::Activation *act = mainThread().activation();
while (act && (act->cx() != this || !act->isActive()))
act = act->prev();
if (!act)
return NULL;
JS_ASSERT(act->cx() == this);
#ifdef JS_ION
if (act->isJit()) {
JSScript *script = NULL;
js::ion::GetPcScript(const_cast<JSContext *>(this), &script, ppc);
if (!allowCrossCompartment && script->compartment() != compartment())
return NULL;
return script;
}
#endif
JS_ASSERT(act->isInterpreter());
js::StackFrame *fp = act->asInterpreter()->current();
JS_ASSERT(!fp->runningInJit());
JSScript *script = fp->script();
if (!allowCrossCompartment && script->compartment() != compartment())
return NULL;
if (ppc) {
*ppc = act->asInterpreter()->regs().pc;
JS_ASSERT(*ppc >= script->code && *ppc < script->code + script->length);
}
return script;
}
template <typename T>
inline bool
js::ThreadSafeContext::isInsideCurrentZone(T thing) const

Просмотреть файл

@ -2590,16 +2590,16 @@ date_toJSON(JSContext *cx, unsigned argc, Value *vp)
}
/* Step 6. */
InvokeArgsGuard ag;
if (!cx->stack.pushInvokeArgs(cx, 0, &ag))
InvokeArgs args2(cx);
if (!args2.init(0))
return false;
ag.setCallee(toISO);
ag.setThis(ObjectValue(*obj));
args2.setCallee(toISO);
args2.setThis(ObjectValue(*obj));
if (!Invoke(cx, ag))
if (!Invoke(cx, args2))
return false;
args.rval().set(ag.rval());
args.rval().set(args2.rval());
return true;
}

Просмотреть файл

@ -1238,8 +1238,6 @@ JSObject *
JSAbstractFramePtr::scopeChain(JSContext *cx)
{
AbstractFramePtr frame = Valueify(*this);
JS_ASSERT_IF(frame.isStackFrame(),
cx->stack.space().containsSlow(frame.asStackFrame()));
RootedObject scopeChain(cx, frame.scopeChain());
AutoCompartment ac(cx, scopeChain);
return GetDebugScopeForFrame(cx, frame);
@ -1249,9 +1247,6 @@ JSObject *
JSAbstractFramePtr::callObject(JSContext *cx)
{
AbstractFramePtr frame = Valueify(*this);
JS_ASSERT_IF(frame.isStackFrame(),
cx->stack.space().containsSlow(frame.asStackFrame()));
if (!frame.isFunctionFrame())
return NULL;

Просмотреть файл

@ -800,7 +800,7 @@ js::SetActivityCallback(JSRuntime *rt, ActivityCallback cb, void *arg)
JS_FRIEND_API(bool)
js::IsContextRunningJS(JSContext *cx)
{
return !cx->stack.empty();
return cx->currentlyRunning();
}
JS_FRIEND_API(JS::GCSliceCallback)

Просмотреть файл

@ -817,8 +817,8 @@ js_fun_call(JSContext *cx, unsigned argc, Value *vp)
}
/* Allocate stack space for fval, obj, and the args. */
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, argc, &args))
InvokeArgs args(cx);
if (!args.init(argc))
return JS_FALSE;
/* Push fval, thisv, and the args. */
@ -833,13 +833,13 @@ js_fun_call(JSContext *cx, unsigned argc, Value *vp)
#ifdef JS_ION
static bool
PushBaselineFunApplyArguments(JSContext *cx, ion::IonFrameIterator &frame, InvokeArgsGuard &args,
PushBaselineFunApplyArguments(JSContext *cx, ion::IonFrameIterator &frame, InvokeArgs &args,
Value *vp)
{
unsigned length = frame.numActualArgs();
JS_ASSERT(length <= StackSpace::ARGS_LENGTH_MAX);
JS_ASSERT(length <= ARGS_LENGTH_MAX);
if (!cx->stack.pushInvokeArgs(cx, length, &args))
if (!args.init(length))
return false;
/* Push fval, obj, and aobj's elements as args. */
@ -867,7 +867,7 @@ js_fun_apply(JSContext *cx, unsigned argc, Value *vp)
if (argc < 2 || vp[3].isNullOrUndefined())
return js_fun_call(cx, (argc > 0) ? 1 : 0, vp);
InvokeArgsGuard args;
InvokeArgs args(cx);
/*
* GuardFunApplyArgumentsOptimization already called IsOptimizedArguments,
@ -895,9 +895,9 @@ js_fun_apply(JSContext *cx, unsigned argc, Value *vp)
ion::InlineFrameIterator iter(cx, &frame);
unsigned length = iter.numActualArgs();
JS_ASSERT(length <= StackSpace::ARGS_LENGTH_MAX);
JS_ASSERT(length <= ARGS_LENGTH_MAX);
if (!cx->stack.pushInvokeArgs(cx, length, &args))
if (!args.init(length))
return false;
/* Push fval, obj, and aobj's elements as args. */
@ -932,9 +932,9 @@ js_fun_apply(JSContext *cx, unsigned argc, Value *vp)
{
StackFrame *fp = cx->interpreterFrame();
unsigned length = fp->numActualArgs();
JS_ASSERT(length <= StackSpace::ARGS_LENGTH_MAX);
JS_ASSERT(length <= ARGS_LENGTH_MAX);
if (!cx->stack.pushInvokeArgs(cx, length, &args))
if (!args.init(length))
return false;
/* Push fval, obj, and aobj's elements as args. */
@ -961,12 +961,12 @@ js_fun_apply(JSContext *cx, unsigned argc, Value *vp)
return false;
/* Step 6. */
if (length > StackSpace::ARGS_LENGTH_MAX) {
if (length > ARGS_LENGTH_MAX) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_TOO_MANY_FUN_APPLY_ARGS);
return false;
}
if (!cx->stack.pushInvokeArgs(cx, length, &args))
if (!args.init(length))
return false;
/* Push fval, obj, and aobj's elements as args. */
@ -1118,7 +1118,7 @@ js::CallOrConstructBoundFunction(JSContext *cx, unsigned argc, Value *vp)
/* 15.3.4.5.1 step 1, 15.3.4.5.2 step 3. */
unsigned argslen = fun->getBoundFunctionArgumentCount();
if (argc + argslen > StackSpace::ARGS_LENGTH_MAX) {
if (argc + argslen > ARGS_LENGTH_MAX) {
js_ReportAllocationOverflow(cx);
return false;
}
@ -1129,8 +1129,8 @@ js::CallOrConstructBoundFunction(JSContext *cx, unsigned argc, Value *vp)
/* 15.3.4.5.1 step 2. */
const Value &boundThis = fun->getBoundFunctionThis();
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, argc + argslen, &args))
InvokeArgs args(cx);
if (!args.init(argc + argslen))
return false;
/* 15.3.4.5.1, 15.3.4.5.2 step 4. */

Просмотреть файл

@ -2558,6 +2558,7 @@ PurgeRuntime(JSRuntime *rt)
comp->purge();
rt->freeLifoAlloc.transferUnusedFrom(&rt->tempLifoAlloc);
rt->interpreterStack().purge(rt);
rt->gsnCache.purge();
rt->newObjectCache.purge();
@ -4132,8 +4133,6 @@ AutoGCSlice::AutoGCSlice(JSRuntime *rt)
* is set at the beginning of the mark phase. During incremental GC, we also
* set it at the start of every phase.
*/
rt->stackSpace.markActiveCompartments();
for (ActivationIterator iter(rt); !iter.done(); ++iter)
iter.activation()->compartment()->zone()->active = true;

Просмотреть файл

@ -5426,7 +5426,7 @@ types::MarkIteratorUnknownSlow(JSContext *cx)
/* Check whether we are actually at an ITER opcode. */
jsbytecode *pc;
RootedScript script(cx, cx->stack.currentScript(&pc));
RootedScript script(cx, cx->currentScript(&pc));
if (!script || !pc)
return;

Просмотреть файл

@ -521,7 +521,7 @@ GetTypeCallerInitObject(JSContext *cx, JSProtoKey key)
{
if (cx->typeInferenceEnabled()) {
jsbytecode *pc;
RootedScript script(cx, cx->stack.currentScript(&pc));
RootedScript script(cx, cx->currentScript(&pc));
if (script)
return TypeScript::InitObject(cx, script, pc, key);
}
@ -961,7 +961,7 @@ TypeScript::MonitorUnknown(JSContext *cx, JSScript *script, jsbytecode *pc)
/* static */ inline void
TypeScript::GetPcScript(JSContext *cx, JSScript **script, jsbytecode **pc)
{
*script = cx->stack.currentScript(pc);
*script = cx->currentScript(pc);
}
/* static */ inline void

Просмотреть файл

@ -1392,15 +1392,15 @@ GeneratorState::GeneratorState(JSContext *cx, JSGenerator *gen, JSGeneratorState
GeneratorState::~GeneratorState()
{
gen_->fp->setSuspended();
if (entered_)
cx_->leaveGenerator(gen_);
}
StackFrame *
GeneratorState::pushInterpreterFrame(JSContext *cx)
GeneratorState::pushInterpreterFrame(JSContext *cx, FrameGuard *)
{
gfg_.construct();
/*
* Write barrier is needed since the generator stack can be updated,
* and it's not barriered in any other way. We need to do it before
@ -1414,21 +1414,17 @@ GeneratorState::pushInterpreterFrame(JSContext *cx)
*/
GeneratorWriteBarrierPre(cx, gen_);
if (!cx->stack.pushGeneratorFrame(cx, gen_, gfg_.addr())) {
SetGeneratorClosed(cx, gen_);
return NULL;
}
/*
* Don't change the state until after the frame is successfully pushed
* or else we might fail to scan some generator values.
*/
gen_->state = futureState_;
gen_->regs = cx->stack.regs();
gen_->fp->clearSuspended();
cx->enterGenerator(gen_); /* OOM check above. */
entered_ = true;
return gfg_.ref().fp();
return gen_->fp;
}
static void
@ -1504,13 +1500,14 @@ js_NewGenerator(JSContext *cx, const FrameRegs &stackRegs)
JS_ASSERT(nbytes % sizeof(Value) == 0);
JS_STATIC_ASSERT(sizeof(StackFrame) % sizeof(HeapValue) == 0);
JSGenerator *gen = (JSGenerator *) cx->malloc_(nbytes);
JSGenerator *gen = (JSGenerator *) cx->calloc_(nbytes);
if (!gen)
return NULL;
SetValueRangeToUndefined((Value *)gen, nbytes / sizeof(Value));
/* Cut up floatingStack space. */
HeapValue *genvp = gen->stackSnapshot;
SetValueRangeToUndefined((Value *)genvp, vplen);
StackFrame *genfp = reinterpret_cast<StackFrame *>(genvp + vplen);
/* Initialize JSGenerator. */
@ -1523,7 +1520,7 @@ js_NewGenerator(JSContext *cx, const FrameRegs &stackRegs)
gen->regs.rebaseFromTo(stackRegs, *genfp);
genfp->copyFrameAndValues<StackFrame::DoPostBarrier>(cx, (Value *)genvp, stackfp,
stackvp, stackRegs.sp);
genfp->setSuspended();
obj->setPrivate(gen);
return obj;
}

Просмотреть файл

@ -1433,7 +1433,7 @@ bool
js::NewObjectScriptedCall(JSContext *cx, MutableHandleObject pobj)
{
jsbytecode *pc;
RootedScript script(cx, cx->stack.currentScript(&pc));
RootedScript script(cx, cx->currentScript(&pc));
gc::AllocKind allocKind = NewObjectGCKind(&ObjectClass);
NewObjectKind newKind = script
? UseNewTypeForInitializer(cx, script, pc, &ObjectClass)
@ -1641,7 +1641,7 @@ js_InferFlags(JSContext *cx, unsigned defaultFlags)
* handle the case of cross-compartment property access.
*/
jsbytecode *pc;
JSScript *script = cx->stack.currentScript(&pc, ContextStack::ALLOW_CROSS_COMPARTMENT);
JSScript *script = cx->currentScript(&pc, JSContext::ALLOW_CROSS_COMPARTMENT);
if (!script)
return defaultFlags;
@ -3795,7 +3795,7 @@ NativeGetInline(JSContext *cx,
{
jsbytecode *pc;
JSScript *script = cx->stack.currentScript(&pc);
JSScript *script = cx->currentScript(&pc);
if (script && script->hasAnalysis()) {
analyze::Bytecode *code = script->analysis()->maybeCode(pc);
if (code)
@ -3930,7 +3930,7 @@ GetPropertyHelperInline(JSContext *cx,
*/
if (vp.isUndefined()) {
jsbytecode *pc = NULL;
RootedScript script(cx, cx->stack.currentScript(&pc));
RootedScript script(cx, cx->currentScript(&pc));
if (!pc)
return true;
JSOp op = (JSOp) *pc;
@ -4157,7 +4157,7 @@ static bool
MaybeReportUndeclaredVarAssignment(JSContext *cx, JSString *propname)
{
{
JSScript *script = cx->stack.currentScript(NULL, ContextStack::ALLOW_CROSS_COMPARTMENT);
JSScript *script = cx->currentScript(NULL, JSContext::ALLOW_CROSS_COMPARTMENT);
if (!script)
return true;
@ -4181,7 +4181,7 @@ js::ReportIfUndeclaredVarAssignment(JSContext *cx, HandleString propname)
{
{
jsbytecode *pc;
JSScript *script = cx->stack.currentScript(&pc, ContextStack::ALLOW_CROSS_COMPARTMENT);
JSScript *script = cx->currentScript(&pc, JSContext::ALLOW_CROSS_COMPARTMENT);
if (!script)
return true;

Просмотреть файл

@ -228,8 +228,8 @@ PreprocessValue(JSContext *cx, HandleObject holder, KeyType key, MutableHandleVa
if (!keyStr)
return false;
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, 1, &args))
InvokeArgs args(cx);
if (!args.init(1))
return false;
args.setCallee(toJSON);
@ -250,8 +250,8 @@ PreprocessValue(JSContext *cx, HandleObject holder, KeyType key, MutableHandleVa
return false;
}
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, 2, &args))
InvokeArgs args(cx);
if (!args.init(2))
return false;
args.setCallee(ObjectValue(*scx->replacer));
@ -771,8 +771,8 @@ Walk(JSContext *cx, HandleObject holder, HandleId name, HandleValue reviver, Mut
if (!key)
return false;
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, 2, &args))
InvokeArgs args(cx);
if (!args.init(2))
return false;
args.setCallee(reviver);

Просмотреть файл

@ -91,16 +91,10 @@ enum RegExpFlag
AllFlags = 0x0f
};
class ExecuteArgsGuard;
class InvokeFrameGuard;
class InvokeArgsGuard;
class StringBuffer;
class FrameRegs;
class StackFrame;
class StackSegment;
class StackSpace;
class ContextStack;
class ScriptFrameIter;
class Proxy;

Просмотреть файл

@ -2131,8 +2131,8 @@ FindReplaceLength(JSContext *cx, RegExpStatics *res, ReplaceData &rdata, size_t
unsigned p = res->getMatches().parenCount();
unsigned argc = 1 + p + 2;
InvokeArgsGuard &args = rdata.fig.args();
if (!args.pushed() && !cx->stack.pushInvokeArgs(cx, argc, &args))
InvokeArgs &args = rdata.fig.args();
if (!args.init(argc))
return false;
args.setCallee(ObjectValue(*lambda));
@ -2639,7 +2639,7 @@ str_replace_flat_lambda(JSContext *cx, CallArgs outerArgs, ReplaceData &rdata, c
/* lambda(matchStr, matchStart, textstr) */
static const uint32_t lambdaArgc = 3;
if (!cx->stack.pushInvokeArgs(cx, lambdaArgc, &rdata.fig.args()))
if (!rdata.fig.args().init(lambdaArgc))
return false;
CallArgs &args = rdata.fig.args();
@ -3535,7 +3535,7 @@ js::str_fromCharCode(JSContext *cx, unsigned argc, Value *vp)
{
CallArgs args = CallArgsFromVp(argc, vp);
JS_ASSERT(args.length() <= StackSpace::ARGS_LENGTH_MAX);
JS_ASSERT(args.length() <= ARGS_LENGTH_MAX);
if (args.length() == 1) {
uint16_t code;
if (!ToUint16(cx, args[0], &code))

Просмотреть файл

@ -1730,7 +1730,7 @@ class TypedArrayTemplate
return NewBuiltinClassInstance(cx, fastClass(), SingletonObject);
jsbytecode *pc;
RootedScript script(cx, cx->stack.currentScript(&pc));
RootedScript script(cx, cx->currentScript(&pc));
NewObjectKind newKind = script
? UseNewTypeForInitializer(cx, script, pc, fastClass())
: GenericObject;
@ -2163,19 +2163,19 @@ class TypedArrayTemplate
if (!FindProto(cx, fastClass(), &proto))
return NULL;
InvokeArgsGuard ag;
if (!cx->stack.pushInvokeArgs(cx, 3, &ag))
InvokeArgs args(cx);
if (!args.init(3))
return NULL;
ag.setCallee(cx->compartment()->maybeGlobal()->createArrayFromBuffer<NativeType>());
ag.setThis(ObjectValue(*bufobj));
ag[0] = NumberValue(byteOffset);
ag[1] = Int32Value(lengthInt);
ag[2] = ObjectValue(*proto);
args.setCallee(cx->compartment()->maybeGlobal()->createArrayFromBuffer<NativeType>());
args.setThis(ObjectValue(*bufobj));
args[0] = NumberValue(byteOffset);
args[1] = Int32Value(lengthInt);
args[2] = ObjectValue(*proto);
if (!Invoke(cx, ag))
if (!Invoke(cx, args))
return NULL;
return &ag.rval().toObject();
return &args.rval().toObject();
}
}
@ -2770,16 +2770,16 @@ DataViewObject::class_constructor(JSContext *cx, unsigned argc, Value *vp)
if (!proto)
return false;
InvokeArgsGuard ag;
if (!cx->stack.pushInvokeArgs(cx, args.length() + 1, &ag))
InvokeArgs args2(cx);
if (!args2.init(args.length() + 1))
return false;
ag.setCallee(global->createDataViewForThis());
ag.setThis(ObjectValue(*bufobj));
PodCopy(ag.array(), args.array(), args.length());
ag[argc] = ObjectValue(*proto);
if (!Invoke(cx, ag))
args2.setCallee(global->createDataViewForThis());
args2.setThis(ObjectValue(*bufobj));
PodCopy(args2.array(), args.array(), args.length());
args2[argc] = ObjectValue(*proto);
if (!Invoke(cx, args2))
return false;
args.rval().set(ag.rval());
args.rval().set(args2.rval());
return true;
}

Просмотреть файл

@ -212,7 +212,7 @@ DataViewNewObjectKind(JSContext *cx, uint32_t byteLength, JSObject *proto)
if (!proto && byteLength >= TypedArray::SINGLETON_TYPE_BYTE_LENGTH)
return SingletonObject;
jsbytecode *pc;
JSScript *script = cx->stack.currentScript(&pc);
JSScript *script = cx->currentScript(&pc);
if (!script)
return GenericObject;
return types::UseNewTypeForInitializer(cx, script, pc, &DataViewObject::class_);
@ -243,7 +243,7 @@ DataViewObject::create(JSContext *cx, uint32_t byteOffset, uint32_t byteLength,
JS_ASSERT(obj->hasSingletonType());
} else {
jsbytecode *pc;
RootedScript script(cx, cx->stack.currentScript(&pc));
RootedScript script(cx, cx->currentScript(&pc));
if (script) {
if (!types::SetInitializerObjectType(cx, script, pc, obj, newKind))
return NULL;

Просмотреть файл

@ -204,15 +204,6 @@ UnsignedPtrDiff(const void *bigger, const void *smaller)
return size_t(bigger) - size_t(smaller);
}
/*
* Ordinarily, a function taking a JSContext* 'cx' parameter reports errors on
* the context. In some cases, functions optionally report and indicate this by
* taking a nullable 'maybecx' parameter. In some cases, though, a function
* always needs a 'cx', but optionally reports. This option is presented by the
* MaybeReportError.
*/
enum MaybeReportError { REPORT_ERROR = true, DONT_REPORT_ERROR = false };
/*****************************************************************************/
/* A bit array is an array of bits represented by an array of words (size_t). */

Просмотреть файл

@ -479,8 +479,8 @@ CrossCompartmentWrapper::nativeCall(JSContext *cx, IsAcceptableThis test, Native
RootedObject wrapped(cx, wrappedObject(wrapper));
{
AutoCompartment call(cx, wrapped);
InvokeArgsGuard dstArgs;
if (!cx->stack.pushInvokeArgs(cx, srcArgs.length(), &dstArgs))
InvokeArgs dstArgs(cx);
if (!dstArgs.init(srcArgs.length()))
return false;
Value *src = srcArgs.base();
@ -513,7 +513,6 @@ CrossCompartmentWrapper::nativeCall(JSContext *cx, IsAcceptableThis test, Native
return false;
srcArgs.rval().set(dstArgs.rval());
dstArgs.pop();
}
return cx->compartment()->wrap(cx, srcArgs.rval());
}

Просмотреть файл

@ -3520,7 +3520,7 @@ EnableStackWalkingAssertion(JSContext *cx, unsigned argc, jsval *vp)
static JSBool
GetMaxArgs(JSContext *cx, unsigned arg, jsval *vp)
{
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(StackSpace::ARGS_LENGTH_MAX));
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(ARGS_LENGTH_MAX));
return true;
}

Просмотреть файл

@ -17,7 +17,7 @@ inline uint32_t
ArgumentsObject::initialLength() const
{
uint32_t argc = uint32_t(getFixedSlot(INITIAL_LENGTH_SLOT).toInt32()) >> PACKED_BITS_COUNT;
JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX);
JS_ASSERT(argc <= ARGS_LENGTH_MAX);
return argc;
}

Просмотреть файл

@ -4884,13 +4884,13 @@ ApplyOrCall(JSContext *cx, unsigned argc, Value *vp, ApplyOrCallMode mode)
RootedObject argsobj(cx, &args[1].toObject());
if (!GetLengthProperty(cx, argsobj, &callArgc))
return false;
callArgc = unsigned(Min(callArgc, StackSpace::ARGS_LENGTH_MAX));
callArgc = unsigned(Min(callArgc, ARGS_LENGTH_MAX));
if (!argv.growBy(callArgc) || !GetElements(cx, argsobj, callArgc, argv.begin()))
return false;
callArgv = argv.begin();
}
} else {
callArgc = argc > 0 ? unsigned(Min(argc - 1, StackSpace::ARGS_LENGTH_MAX)) : 0;
callArgc = argc > 0 ? unsigned(Min(argc - 1, ARGS_LENGTH_MAX)) : 0;
callArgv = args.array() + 1;
}

Просмотреть файл

@ -146,8 +146,8 @@ ExecuteSequentially(JSContext *cx, HandleValue funVal, bool *complete)
FastInvokeGuard fig(cx, funVal);
bool allComplete = true;
for (uint32_t i = 0; i < numSlices; i++) {
InvokeArgsGuard &args = fig.args();
if (!args.pushed() && !cx->stack.pushInvokeArgs(cx, 3, &args))
InvokeArgs &args = fig.args();
if (!args.init(3))
return false;
args.setCallee(funVal);
args.setThis(UndefinedValue());
@ -1947,7 +1947,7 @@ class ParallelSpewer
if (cx) {
jsbytecode *pc;
JSScript *script = cx->stack.currentScript(&pc);
JSScript *script = cx->currentScript(&pc);
if (script && pc) {
NonBuiltinScriptFrameIter iter(cx);
if (iter.done()) {

Просмотреть файл

@ -268,7 +268,7 @@ FetchNameNoGC(JSObject *pobj, Shape *shape, MutableHandleValue vp)
inline bool
GetIntrinsicOperation(JSContext *cx, jsbytecode *pc, MutableHandleValue vp)
{
RootedPropertyName name(cx, cx->stack.currentScript()->getName(pc));
RootedPropertyName name(cx, cx->currentScript()->getName(pc));
return cx->global()->getIntrinsicValue(cx, name, vp);
}
@ -279,45 +279,6 @@ SetIntrinsicOperation(JSContext *cx, JSScript *script, jsbytecode *pc, HandleVal
return cx->global()->setIntrinsicValue(cx, name, val);
}
inline bool
NameOperation(JSContext *cx, jsbytecode *pc, MutableHandleValue vp)
{
JSObject *obj = cx->stack.currentScriptedScopeChain();
PropertyName *name = cx->stack.currentScript()->getName(pc);
/*
* Skip along the scope chain to the enclosing global object. This is
* used for GNAME opcodes where the bytecode emitter has determined a
* name access must be on the global. It also insulates us from bugs
* in the emitter: type inference will assume that GNAME opcodes are
* accessing the global object, and the inferred behavior should match
* the actual behavior even if the id could be found on the scope chain
* before the global object.
*/
if (IsGlobalOp(JSOp(*pc)))
obj = &obj->global();
Shape *shape = NULL;
JSObject *scope = NULL, *pobj = NULL;
if (LookupNameNoGC(cx, name, obj, &scope, &pobj, &shape)) {
if (FetchNameNoGC(pobj, shape, vp))
return true;
}
RootedObject objRoot(cx, obj), scopeRoot(cx), pobjRoot(cx);
RootedPropertyName nameRoot(cx, name);
RootedShape shapeRoot(cx);
if (!LookupName(cx, nameRoot, objRoot, &scopeRoot, &pobjRoot, &shapeRoot))
return false;
/* Kludge to allow (typeof foo == "undefined") tests. */
JSOp op2 = JSOp(pc[JSOP_NAME_LENGTH]);
if (op2 == JSOP_TYPEOF)
return FetchName<true>(cx, scopeRoot, pobjRoot, nameRoot, shapeRoot, vp);
return FetchName<false>(cx, scopeRoot, pobjRoot, nameRoot, shapeRoot, vp);
}
inline bool
SetNameOperation(JSContext *cx, JSScript *script, jsbytecode *pc, HandleObject scope,
HandleValue val)
@ -955,7 +916,7 @@ ReportIfNotFunction(JSContext *cx, const Value &v, MaybeConstruct construct = NO
*/
class FastInvokeGuard
{
InvokeArgsGuard args_;
InvokeArgs args_;
RootedFunction fun_;
RootedScript script_;
#ifdef JS_ION
@ -967,7 +928,8 @@ class FastInvokeGuard
public:
FastInvokeGuard(JSContext *cx, const Value &fval)
: fun_(cx)
: args_(cx)
, fun_(cx)
, script_(cx)
#ifdef JS_ION
, useIon_(ion::IsEnabled(cx))
@ -985,7 +947,7 @@ class FastInvokeGuard
}
}
InvokeArgsGuard &args() {
InvokeArgs &args() {
return args_;
}

Просмотреть файл

@ -229,8 +229,8 @@ js::OnUnknownMethod(JSContext *cx, HandleObject obj, Value idval_, MutableHandle
static JSBool
NoSuchMethod(JSContext *cx, unsigned argc, Value *vp)
{
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, 2, &args))
InvokeArgs args(cx);
if (!args.init(2))
return JS_FALSE;
JS_ASSERT(vp[0].isObject());
@ -298,6 +298,45 @@ GetPropertyOperation(JSContext *cx, StackFrame *fp, HandleScript script, jsbytec
return true;
}
static inline bool
NameOperation(JSContext *cx, StackFrame *fp, jsbytecode *pc, MutableHandleValue vp)
{
JSObject *obj = fp->scopeChain();
PropertyName *name = fp->script()->getName(pc);
/*
* Skip along the scope chain to the enclosing global object. This is
* used for GNAME opcodes where the bytecode emitter has determined a
* name access must be on the global. It also insulates us from bugs
* in the emitter: type inference will assume that GNAME opcodes are
* accessing the global object, and the inferred behavior should match
* the actual behavior even if the id could be found on the scope chain
* before the global object.
*/
if (IsGlobalOp(JSOp(*pc)))
obj = &obj->global();
Shape *shape = NULL;
JSObject *scope = NULL, *pobj = NULL;
if (LookupNameNoGC(cx, name, obj, &scope, &pobj, &shape)) {
if (FetchNameNoGC(pobj, shape, vp))
return true;
}
RootedObject objRoot(cx, obj), scopeRoot(cx), pobjRoot(cx);
RootedPropertyName nameRoot(cx, name);
RootedShape shapeRoot(cx);
if (!LookupName(cx, nameRoot, objRoot, &scopeRoot, &pobjRoot, &shapeRoot))
return false;
/* Kludge to allow (typeof foo == "undefined") tests. */
JSOp op2 = JSOp(pc[JSOP_NAME_LENGTH]);
if (op2 == JSOP_TYPEOF)
return FetchName<true>(cx, scopeRoot, pobjRoot, nameRoot, shapeRoot, vp);
return FetchName<false>(cx, scopeRoot, pobjRoot, nameRoot, shapeRoot, vp);
}
inline bool
SetPropertyOperation(JSContext *cx, HandleScript script, jsbytecode *pc, HandleValue lval,
HandleValue rval)
@ -350,28 +389,16 @@ static JS_NEVER_INLINE bool
Interpret(JSContext *cx, RunState &state);
StackFrame *
InvokeState::pushInterpreterFrame(JSContext *cx)
InvokeState::pushInterpreterFrame(JSContext *cx, FrameGuard *fg)
{
ifg_.construct();
if (!cx->stack.pushInvokeFrame(cx, args_, initial_, ifg_.addr()))
return NULL;
return ifg_.ref().fp();
return cx->runtime()->interpreterStack().pushInvokeFrame(cx, args_, initial_, fg);
}
StackFrame *
ExecuteState::pushInterpreterFrame(JSContext *cx)
ExecuteState::pushInterpreterFrame(JSContext *cx, FrameGuard *fg)
{
efg_.construct();
if (!cx->stack.pushExecuteFrame(cx, script_, thisv_, scopeChain_, type_, evalInFrame_,
efg_.addr()))
{
return NULL;
}
return efg_.ref().fp();
return cx->runtime()->interpreterStack().pushExecuteFrame(cx, script_, thisv_, scopeChain_,
type_, evalInFrame_, fg);
}
bool
@ -420,7 +447,7 @@ js::RunScript(JSContext *cx, RunState &state)
bool
js::Invoke(JSContext *cx, CallArgs args, MaybeConstruct construct)
{
JS_ASSERT(args.length() <= StackSpace::ARGS_LENGTH_MAX);
JS_ASSERT(args.length() <= ARGS_LENGTH_MAX);
JS_ASSERT(!cx->compartment()->activeAnalysis);
/* We should never enter a new script while cx->iterValue is live. */
@ -480,8 +507,8 @@ bool
js::Invoke(JSContext *cx, const Value &thisv, const Value &fval, unsigned argc, Value *argv,
Value *rval)
{
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, argc, &args))
InvokeArgs args(cx);
if (!args.init(argc))
return false;
args.setCallee(fval);
@ -547,8 +574,8 @@ js::InvokeConstructor(JSContext *cx, CallArgs args)
bool
js::InvokeConstructor(JSContext *cx, const Value &fval, unsigned argc, Value *argv, Value *rval)
{
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, argc, &args))
InvokeArgs args(cx);
if (!args.init(argc))
return false;
args.setCallee(fval);
@ -824,8 +851,8 @@ EnterWith(JSContext *cx, AbstractFramePtr frame, HandleValue val, uint32_t stack
void
js::UnwindScope(JSContext *cx, AbstractFramePtr frame, uint32_t stackDepth)
{
JS_ASSERT_IF(frame.isStackFrame(), cx->stack.fp() == frame.asStackFrame());
JS_ASSERT_IF(frame.isStackFrame(), stackDepth <= cx->stack.regs().stackDepth());
JS_ASSERT_IF(frame.isStackFrame(), frame.asStackFrame() == cx->interpreterFrame());
JS_ASSERT_IF(frame.isStackFrame(), stackDepth <= cx->interpreterRegs().stackDepth());
for (ScopeIter si(frame, cx); !si.done(); ++si) {
switch (si.type()) {
@ -1018,6 +1045,26 @@ js::IteratorNext(JSContext *cx, HandleObject iterobj, MutableHandleValue rval)
return js_IteratorNext(cx, iterobj, rval);
}
FrameGuard::FrameGuard(RunState &state, FrameRegs &regs)
: state_(state),
regs_(regs),
stack_(NULL),
fp_(NULL)
{ }
FrameGuard::~FrameGuard()
{
if (state_.isGenerator()) {
JSGenerator *gen = state_.asGenerator()->gen();
gen->fp->unsetPushedSPSFrame();
gen->regs = regs_;
return;
}
if (fp_)
stack_->releaseFrame(fp_);
}
static JS_NEVER_INLINE bool
Interpret(JSContext *cx, RunState &state)
{
@ -1094,16 +1141,21 @@ Interpret(JSContext *cx, RunState &state)
interrupts.enable(); \
JS_END_MACRO
StackFrame *entryFrame = state.pushInterpreterFrame(cx);
FrameRegs regs;
FrameGuard fg(state, regs);
StackFrame *entryFrame = state.pushInterpreterFrame(cx, &fg);
if (!entryFrame)
return false;
JS_ASSERT_IF(!state.isGenerator(), cx->stack.regs().pc == state.script()->code);
JS_ASSERT_IF(entryFrame->isEvalFrame(), state.script()->isActiveEval);
if (!state.isGenerator()) {
regs.prepareToRun(*entryFrame, state.script());
JS_ASSERT(regs.pc == state.script()->code);
} else {
regs = state.asGenerator()->gen()->regs;
}
/* Repoint cx->regs to a local variable for faster access. */
FrameRegs regs = cx->stack.regs();
PreserveRegsGuard interpGuard(cx, regs);
JS_ASSERT_IF(entryFrame->isEvalFrame(), state.script()->isActiveEval);
InterpreterActivation activation(cx, entryFrame, regs);
@ -1453,8 +1505,7 @@ BEGIN_CASE(JSOP_STOP)
jit_return_pop_frame:
#endif
activation.popFrame(regs.fp());
cx->stack.popInlineFrame(regs);
activation.popInlineFrame(regs.fp());
SET_SCRIPT(regs.fp()->script());
#if defined(JS_ION)
@ -1983,7 +2034,7 @@ BEGIN_CASE(JSOP_DELNAME)
name = script->getName(regs.pc);
RootedObject &scopeObj = rootObject0;
scopeObj = cx->stack.currentScriptedScopeChain();
scopeObj = regs.fp()->scopeChain();
PUSH_BOOLEAN(true);
MutableHandleValue res = MutableHandleValue::fromMarkedLocation(&regs.sp[-1]);
@ -2292,11 +2343,9 @@ BEGIN_CASE(JSOP_FUNCALL)
TypeMonitorCall(cx, args, construct);
funScript = fun->nonLazyScript();
if (!cx->stack.pushInlineFrame(cx, regs, args, fun, funScript, initial))
if (!activation.pushInlineFrame(args, funScript, initial))
goto error;
activation.pushFrame(regs.fp());
if (newType)
regs.fp()->setUseNewType();
@ -2337,7 +2386,7 @@ BEGIN_CASE(JSOP_IMPLICITTHIS)
name = script->getName(regs.pc);
RootedObject &scopeObj = rootObject0;
scopeObj = cx->stack.currentScriptedScopeChain();
scopeObj = regs.fp()->scopeChain();
RootedObject &scope = rootObject1;
if (!LookupNameWithGlobalDefault(cx, name, scopeObj, &scope))
@ -2357,7 +2406,7 @@ BEGIN_CASE(JSOP_CALLNAME)
{
RootedValue &rval = rootValue0;
if (!NameOperation(cx, regs.pc, &rval))
if (!NameOperation(cx, regs.fp(), regs.pc, &rval))
goto error;
PUSH_COPY(rval);
@ -3060,7 +3109,6 @@ END_CASE(JSOP_ARRAYPUSH)
} /* for (;;) */
error:
JS_ASSERT(&cx->stack.regs() == &regs);
JS_ASSERT(uint32_t(regs.pc - script->code) < script->length);
if (cx->isExceptionPending()) {

Просмотреть файл

@ -206,7 +206,7 @@ class RunState
JSScript *script() const { return script_; }
virtual StackFrame *pushInterpreterFrame(JSContext *cx) = 0;
virtual StackFrame *pushInterpreterFrame(JSContext *cx, FrameGuard *fg) = 0;
virtual void setReturnValue(Value v) = 0;
private:
@ -220,7 +220,6 @@ class RunState
// Eval or global script.
class ExecuteState : public RunState
{
mozilla::Maybe<ExecuteFrameGuard> efg_;
ExecuteType type_;
RootedValue thisv_;
@ -244,7 +243,7 @@ class ExecuteState : public RunState
JSObject *scopeChain() const { return scopeChain_; }
ExecuteType type() const { return type_; }
virtual StackFrame *pushInterpreterFrame(JSContext *cx);
virtual StackFrame *pushInterpreterFrame(JSContext *cx, FrameGuard *fg);
virtual void setReturnValue(Value v) {
if (result_)
@ -255,7 +254,6 @@ class ExecuteState : public RunState
// Data to invoke a function.
class InvokeState : public RunState
{
mozilla::Maybe<InvokeFrameGuard> ifg_;
CallArgs &args_;
InitialFrameFlags initial_;
bool useNewType_;
@ -274,7 +272,7 @@ class InvokeState : public RunState
bool constructing() const { return InitialFrameFlagsAreConstructing(initial_); }
CallArgs &args() const { return args_; }
virtual StackFrame *pushInterpreterFrame(JSContext *cx);
virtual StackFrame *pushInterpreterFrame(JSContext *cx, FrameGuard *fg);
virtual void setReturnValue(Value v) {
args_.rval().set(v);
@ -284,7 +282,6 @@ class InvokeState : public RunState
// Generator script.
class GeneratorState : public RunState
{
mozilla::Maybe<GeneratorFrameGuard> gfg_;
JSContext *cx_;
JSGenerator *gen_;
JSGeneratorState futureState_;
@ -294,8 +291,10 @@ class GeneratorState : public RunState
GeneratorState(JSContext *cx, JSGenerator *gen, JSGeneratorState futureState);
~GeneratorState();
virtual StackFrame *pushInterpreterFrame(JSContext *cx);
virtual StackFrame *pushInterpreterFrame(JSContext *cx, FrameGuard *fg);
virtual void setReturnValue(Value) { }
JSGenerator *gen() const { return gen_; }
};
extern bool

Просмотреть файл

@ -685,8 +685,8 @@ js::GetProperty(JSContext *cx, Handle<ObjectImpl*> obj, Handle<ObjectImpl*> rece
return true;
}
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, 0, &args))
InvokeArgs args(cx);
if (!args.init(0))
return false;
args.setCallee(get);
@ -751,8 +751,8 @@ js::GetElement(JSContext *cx, Handle<ObjectImpl*> obj, Handle<ObjectImpl*> recei
return true;
}
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, 0, &args))
InvokeArgs args(cx);
if (!args.init(0))
return false;
/* Push getter, receiver, and no args. */
@ -986,8 +986,8 @@ js::SetElement(JSContext *cx, Handle<ObjectImpl*> obj, Handle<ObjectImpl*> recei
return true;
}
InvokeArgsGuard args;
if (!cx->stack.pushInvokeArgs(cx, 1, &args))
InvokeArgs args(cx);
if (!args.init(1))
return false;
/* Push set, receiver, and v as the sole argument. */

Просмотреть файл

@ -71,41 +71,21 @@ StackFrame::compartment() const
}
inline void
StackFrame::initPrev(JSContext *cx)
{
JS_ASSERT(flags_ & HAS_PREVPC);
if (FrameRegs *regs = cx->stack.maybeRegs()) {
prev_ = regs->fp();
prevpc_ = regs->pc;
JS_ASSERT(uint32_t(prevpc_ - prev_->script()->code) < prev_->script()->length);
} else {
prev_ = NULL;
#ifdef DEBUG
prevpc_ = (jsbytecode *)0xbadc;
#endif
}
}
inline void
StackFrame::resetGeneratorPrev(JSContext *cx)
{
flags_ |= HAS_PREVPC;
initPrev(cx);
}
inline void
StackFrame::initCallFrame(JSContext *cx, JSFunction &callee,
JSScript *script, uint32_t nactual, StackFrame::Flags flagsArg)
StackFrame::initCallFrame(JSContext *cx, StackFrame *prev, jsbytecode *prevpc, Value *prevsp, JSFunction &callee,
JSScript *script, Value *argv, uint32_t nactual, StackFrame::Flags flagsArg)
{
JS_ASSERT((flagsArg & ~CONSTRUCTING) == 0);
JS_ASSERT(callee.nonLazyScript() == script);
/* Initialize stack frame members. */
flags_ = FUNCTION | HAS_PREVPC | HAS_SCOPECHAIN | HAS_BLOCKCHAIN | flagsArg;
flags_ = FUNCTION | HAS_SCOPECHAIN | HAS_BLOCKCHAIN | flagsArg;
argv_ = argv;
exec.fun = &callee;
u.nactual = nactual;
scopeChain_ = callee.environment();
initPrev(cx);
prev_ = prev;
prevpc_ = prevpc;
prevsp_ = prevsp;
blockChain_= NULL;
JS_ASSERT(!hasBlockChain());
JS_ASSERT(!hasHookData());
@ -234,144 +214,100 @@ StackFrame::callObj() const
/*****************************************************************************/
STATIC_POSTCONDITION(!return || ubound(from) >= nvals)
JS_ALWAYS_INLINE bool
StackSpace::ensureSpace(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals) const
inline void
InterpreterStack::purge(JSRuntime *rt)
{
assertInvariants();
JS_ASSERT(from >= firstUnused());
#ifdef XP_WIN
JS_ASSERT(from <= commitEnd_);
#endif
if (JS_UNLIKELY(conservativeEnd_ - from < nvals))
return ensureSpaceSlow(cx, report, from, nvals);
return true;
rt->freeLifoAlloc.transferUnusedFrom(&allocator_);
}
/*****************************************************************************/
JS_ALWAYS_INLINE StackFrame *
ContextStack::getCallFrame(JSContext *cx, MaybeReportError report, const CallArgs &args,
JSFunction *fun, HandleScript script, StackFrame::Flags *flags) const
uint8_t *
InterpreterStack::allocateFrame(JSContext *cx, size_t size)
{
JS_ASSERT(fun->nonLazyScript() == script);
unsigned nformal = fun->nargs;
Value *firstUnused = args.end();
JS_ASSERT(firstUnused == space().firstUnused());
unsigned nvals = VALUES_PER_STACK_FRAME + script->nslots;
if (args.length() >= nformal) {
if (!space().ensureSpace(cx, report, firstUnused, nvals))
return NULL;
return reinterpret_cast<StackFrame *>(firstUnused);
if (JS_UNLIKELY(frameCount_ >= MAX_FRAMES)) {
js_ReportOverRecursed(cx);
return NULL;
}
/* Pad any missing arguments with |undefined|. */
JS_ASSERT(args.length() < nformal);
unsigned nmissing = nformal - args.length();
if (!space().ensureSpace(cx, report, firstUnused, nmissing + nvals))
uint8_t *buffer = reinterpret_cast<uint8_t *>(allocator_.alloc(size));
if (!buffer)
return NULL;
SetValueRangeToUndefined(firstUnused, nmissing);
return reinterpret_cast<StackFrame *>(firstUnused + nmissing);
frameCount_++;
return buffer;
}
JS_ALWAYS_INLINE StackFrame *
InterpreterStack::getCallFrame(JSContext *cx, const CallArgs &args, HandleScript script,
StackFrame::Flags *flags, Value **pargv)
{
JSFunction *fun = &args.callee().as<JSFunction>();
JS_ASSERT(fun->nonLazyScript() == script);
unsigned nformal = fun->nargs;
unsigned nvals = script->nslots;
if (args.length() >= nformal) {
*pargv = args.array();
uint8_t *buffer = allocateFrame(cx, sizeof(StackFrame) + nvals * sizeof(Value));
return reinterpret_cast<StackFrame *>(buffer);
}
// Pad any missing arguments with |undefined|.
JS_ASSERT(args.length() < nformal);
nvals += nformal + 2; // Include callee, |this|.
uint8_t *buffer = allocateFrame(cx, sizeof(StackFrame) + nvals * sizeof(Value));
if (!buffer)
return NULL;
Value *argv = reinterpret_cast<Value *>(buffer);
unsigned nmissing = nformal - args.length();
mozilla::PodCopy(argv, args.base(), 2 + args.length());
SetValueRangeToUndefined(argv + 2 + args.length(), nmissing);
*pargv = argv + 2;
return reinterpret_cast<StackFrame *>(argv + 2 + nformal);
}
JS_ALWAYS_INLINE bool
ContextStack::pushInlineFrame(JSContext *cx, FrameRegs &regs, const CallArgs &args,
HandleFunction callee, HandleScript script,
InitialFrameFlags initial, MaybeReportError report)
InterpreterStack::pushInlineFrame(JSContext *cx, FrameRegs &regs, const CallArgs &args,
HandleScript script, InitialFrameFlags initial)
{
JS_ASSERT(onTop());
JSFunction *callee = &args.callee().as<JSFunction>();
JS_ASSERT(regs.sp == args.end());
/* Cannot assert callee == args.callee() since this is called from LeaveTree. */
JS_ASSERT(callee->nonLazyScript() == script);
StackFrame *prev = regs.fp();
jsbytecode *prevpc = regs.pc;
Value *prevsp = regs.sp;
JS_ASSERT(prev);
LifoAlloc::Mark mark = allocator_.mark();
StackFrame::Flags flags = ToFrameFlags(initial);
StackFrame *fp = getCallFrame(cx, report, args, callee, script, &flags);
Value *argv;
StackFrame *fp = getCallFrame(cx, args, script, &flags, &argv);
if (!fp)
return false;
/* Initialize frame, locals, regs. */
fp->initCallFrame(cx, *callee, script, args.length(), flags);
fp->mark_ = mark;
/* Initialize frame, locals, regs. */
fp->initCallFrame(cx, prev, prevpc, prevsp, *callee, script, argv, args.length(), flags);
/*
* N.B. regs may differ from the active registers, if the parent is about
* to repoint the active registers to regs. See UncachedInlineCall.
*/
regs.prepareToRun(*fp, script);
return true;
}
JS_ALWAYS_INLINE bool
ContextStack::pushInlineFrame(JSContext *cx, FrameRegs &regs, const CallArgs &args,
HandleFunction callee, HandleScript script,
InitialFrameFlags initial, Value **stackLimit)
{
if (!pushInlineFrame(cx, regs, args, callee, script, initial))
return false;
*stackLimit = space().conservativeEnd_;
return true;
}
JS_ALWAYS_INLINE void
ContextStack::popInlineFrame(FrameRegs &regs)
InterpreterStack::popInlineFrame(FrameRegs &regs)
{
JS_ASSERT(onTop());
JS_ASSERT(&regs == &seg_->regs());
StackFrame *fp = regs.fp();
Value *newsp = fp->argv() - 1;
JS_ASSERT(newsp >= fp->prev()->base());
newsp[-1] = fp->returnValue();
regs.popFrame(newsp);
}
inline JSScript *
ContextStack::currentScript(jsbytecode **ppc,
MaybeAllowCrossCompartment allowCrossCompartment) const
{
if (ppc)
*ppc = NULL;
Activation *act = cx_->mainThread().activation();
while (act && (act->cx() != cx_ || !act->isActive()))
act = act->prev();
if (!act)
return NULL;
JS_ASSERT(act->cx() == cx_);
#ifdef JS_ION
if (act->isJit()) {
JSScript *script = NULL;
ion::GetPcScript(cx_, &script, ppc);
if (!allowCrossCompartment && script->compartment() != cx_->compartment())
return NULL;
return script;
}
#endif
JS_ASSERT(act->isInterpreter());
StackFrame *fp = act->asInterpreter()->current();
JS_ASSERT(!fp->runningInJit());
JSScript *script = fp->script();
if (!allowCrossCompartment && script->compartment() != cx_->compartment())
return NULL;
if (ppc)
*ppc = fp->pcQuadratic(*this);
return script;
}
inline HandleObject
ContextStack::currentScriptedScopeChain() const
{
return fp()->scopeChain();
regs.popInlineFrame();
regs.sp[-1] = fp->returnValue();
releaseFrame(fp);
JS_ASSERT(regs.fp());
}
template <class Op>
@ -892,6 +828,7 @@ Activation::~Activation()
{
JS_ASSERT(cx_->mainThread().activation_ == this);
cx_->mainThread().activation_ = prev_;
cx_->maybeMigrateVersionOverride();
}
InterpreterActivation::InterpreterActivation(JSContext *cx, StackFrame *entry, FrameRegs &regs)
@ -899,11 +836,43 @@ InterpreterActivation::InterpreterActivation(JSContext *cx, StackFrame *entry, F
entry_(entry),
current_(entry),
regs_(regs)
#ifdef DEBUG
, oldFrameCount_(cx_->runtime()->interpreterStack().frameCount_)
#endif
{}
// Define destructor explicitly to silence GCC used-but-never-defined warning.
InterpreterActivation::~InterpreterActivation()
{}
{
// Pop all inline frames.
while (current_ != entry_)
popInlineFrame(current_);
JS_ASSERT(oldFrameCount_ == cx_->runtime()->interpreterStack().frameCount_);
JS_ASSERT_IF(oldFrameCount_ == 0, cx_->runtime()->interpreterStack().allocator_.used() == 0);
}
inline bool
InterpreterActivation::pushInlineFrame(const CallArgs &args, HandleScript script,
InitialFrameFlags initial)
{
if (!cx_->runtime()->interpreterStack().pushInlineFrame(cx_, regs_, args, script, initial))
return false;
JS_ASSERT(regs_.fp()->script()->compartment() == compartment_);
current_ = regs_.fp();
return true;
}
inline void
InterpreterActivation::popInlineFrame(StackFrame *frame)
{
JS_ASSERT(current_ == frame);
JS_ASSERT(current_ != entry_);
current_ = frame->prev();
JS_ASSERT(current_);
cx_->runtime()->interpreterStack().popInlineFrame(regs_);
}
} /* namespace js */

Просмотреть файл

@ -21,24 +21,6 @@
#include "vm/Stack-inl.h"
#include "vm/Probes-inl.h"
/* Includes to get to low-level memory-mapping functionality. */
#ifdef XP_WIN
# include "jswin.h"
#elif defined(XP_OS2)
# define INCL_DOSMEMMGR
# include <os2.h>
#else
# include <unistd.h>
# include <sys/mman.h>
# if !defined(MAP_ANONYMOUS)
# if defined(MAP_ANON)
# define MAP_ANONYMOUS MAP_ANON
# else
# define MAP_ANONYMOUS 0
# endif
# endif
#endif
using namespace js;
using mozilla::PodCopy;
@ -46,27 +28,44 @@ using mozilla::PodCopy;
/*****************************************************************************/
void
StackFrame::initExecuteFrame(JSScript *script, StackFrame *prevLink, AbstractFramePtr prev,
FrameRegs *regs, const Value &thisv, JSObject &scopeChain,
ExecuteType type)
StackFrame::initExecuteFrame(JSContext *cx, JSScript *script, AbstractFramePtr evalInFramePrev,
const Value &thisv, JSObject &scopeChain, ExecuteType type)
{
/*
* See encoding of ExecuteType. When GLOBAL isn't set, we are executing a
* script in the context of another frame and the frame type is determined
* by the context.
*/
flags_ = type | HAS_SCOPECHAIN | HAS_BLOCKCHAIN | HAS_PREVPC;
if (!(flags_ & GLOBAL)) {
JS_ASSERT(prev.isFunctionFrame() || prev.isGlobalFrame());
flags_ |= prev.isFunctionFrame() ? FUNCTION : GLOBAL;
flags_ = type | HAS_SCOPECHAIN | HAS_BLOCKCHAIN;
JSObject *callee = NULL;
if (!(flags_ & (GLOBAL))) {
if (evalInFramePrev) {
JS_ASSERT(evalInFramePrev.isFunctionFrame() || evalInFramePrev.isGlobalFrame());
if (evalInFramePrev.isFunctionFrame()) {
callee = evalInFramePrev.callee();
flags_ |= FUNCTION;
} else {
flags_ |= GLOBAL;
}
} else {
ScriptFrameIter iter(cx);
JS_ASSERT(iter.isFunctionFrame() || iter.isGlobalFrame());
if (iter.isFunctionFrame()) {
callee = iter.callee();
flags_ |= FUNCTION;
} else {
flags_ |= GLOBAL;
}
}
}
Value *dstvp = (Value *)this - 2;
dstvp[1] = thisv;
if (isFunctionFrame()) {
dstvp[0] = prev.calleev();
exec.fun = prev.fun();
dstvp[0] = ObjectValue(*callee);
exec.fun = &callee->as<JSFunction>();
u.evalScript = script;
} else {
JS_ASSERT(isGlobalFrame());
@ -78,13 +77,13 @@ StackFrame::initExecuteFrame(JSScript *script, StackFrame *prevLink, AbstractFra
}
scopeChain_ = &scopeChain;
prev_ = prevLink;
prevpc_ = regs ? regs->pc : (jsbytecode *)0xbad;
prev_ = NULL;
prevpc_ = NULL;
prevsp_ = NULL;
blockChain_ = NULL;
/* Set evalInFramePrev_ if this is an eval-in-frame. */
JS_ASSERT_IF(isDebuggerFrame(), isEvalFrame());
evalInFramePrev_ = isDebuggerFrame() ? prev : (StackFrame *)NULL;
JS_ASSERT_IF(evalInFramePrev, isDebuggerFrame());
evalInFramePrev_ = evalInFramePrev;
#ifdef DEBUG
Debug_SetValueRangeToCrashOnTouch(&rval_, 1);
@ -97,11 +96,9 @@ void
StackFrame::copyFrameAndValues(JSContext *cx, Value *vp, StackFrame *otherfp,
const Value *othervp, Value *othersp)
{
JS_ASSERT(vp == (Value *)this - ((Value *)otherfp - othervp));
JS_ASSERT(othervp == otherfp->generatorArgsSnapshotBegin());
JS_ASSERT(othersp >= otherfp->slots());
JS_ASSERT(othersp <= otherfp->generatorSlotsSnapshotBegin() + otherfp->script()->nslots);
JS_ASSERT((Value *)this - vp == (Value *)otherfp - othervp);
/* Copy args, StackFrame, and slots. */
const Value *srcend = otherfp->generatorArgsSnapshotEnd();
@ -113,6 +110,7 @@ StackFrame::copyFrameAndValues(JSContext *cx, Value *vp, StackFrame *otherfp,
}
*this = *otherfp;
argv_ = vp + 2;
unsetPushedSPSFrame();
if (doPostBarrier)
writeBarrierPost();
@ -161,9 +159,9 @@ StackFrame::maybeSuspendedGenerator(JSRuntime *rt)
{
/*
* A suspended generator's frame is embedded inside the JSGenerator object
* instead of on the contiguous stack like all active frames.
* and is not currently running.
*/
if (!isGeneratorFrame() || rt->stackSpace.containsFast(this))
if (!isGeneratorFrame() || !isSuspended())
return NULL;
/*
@ -177,31 +175,6 @@ StackFrame::maybeSuspendedGenerator(JSRuntime *rt)
return gen;
}
jsbytecode *
StackFrame::pcQuadratic(const ContextStack &stack, size_t maxDepth)
{
StackSegment &seg = stack.space().containingSegment(this);
FrameRegs &regs = seg.regs();
/*
* This isn't just an optimization; seg->computeNextFrame(fp) is only
* defined if fp != seg->regs->fp.
*/
if (regs.fp() == this)
return regs.pc;
/*
* To compute fp's pc, we need the next frame (where next->prev == fp).
* This requires a linear search which we allow the caller to limit (in
* cases where we do not have a hard requirement to find the correct pc).
*/
if (StackFrame *next = seg.computeNextFrame(this, maxDepth))
return next->prevpc();
/* If we hit the limit, just return the beginning of the script. */
return regs.fp()->script()->code;
}
bool
StackFrame::copyRawFrameSlots(AutoValueVector *vec)
{
@ -454,622 +427,78 @@ StackFrame::mark(JSTracer *trc)
gc::MarkValueUnbarriered(trc, &returnValue(), "rval");
}
void
StackFrame::markValues(JSTracer *trc, Value *sp)
{
JS_ASSERT(sp >= slots());
gc::MarkValueRootRange(trc, sp - slots(), slots(), "vm_stack");
if (hasArgs())
gc::MarkValueRootRange(trc, js::Max(numActualArgs(), numFormalArgs()), argv_, "fp argv");
}
static void
MarkInterpreterActivation(JSTracer *trc, InterpreterActivation *act)
{
for (InterpreterFrameIterator frames(act); !frames.done(); ++frames) {
StackFrame *fp = frames.frame();
fp->markValues(trc, frames.sp());
fp->mark(trc);
}
}
void
js::MarkInterpreterActivations(JSRuntime *rt, JSTracer *trc)
{
for (ActivationIterator iter(rt); !iter.done(); ++iter) {
Activation *act = iter.activation();
if (act->isInterpreter())
MarkInterpreterActivation(trc, act->asInterpreter());
}
}
/*****************************************************************************/
bool
StackSegment::contains(const StackFrame *fp) const
{
/* NB: this depends on the continuity of segments in memory. */
return (Value *)fp >= slotsBegin() && (Value *)fp <= (Value *)maybefp();
}
bool
StackSegment::contains(const FrameRegs *regs) const
{
return regs && contains(regs->fp());
}
StackFrame *
StackSegment::computeNextFrame(const StackFrame *f, size_t maxDepth) const
InterpreterStack::pushInvokeFrame(JSContext *cx, const CallArgs &args, InitialFrameFlags initial,
FrameGuard *fg)
{
JS_ASSERT(contains(f) && f != fp());
LifoAlloc::Mark mark = allocator_.mark();
StackFrame *next = fp();
for (size_t i = 0; i <= maxDepth; ++i) {
if (next->prev() == f)
return next;
next = next->prev();
}
return NULL;
}
Value *
StackSegment::end() const
{
/* NB: this depends on the continuity of segments in memory. */
JS_ASSERT_IF(regs_, contains(regs_));
Value *p = regs_ ? regs_->sp : slotsBegin();
if (invokeArgsEnd_ > p)
p = invokeArgsEnd_;
JS_ASSERT(p >= slotsBegin());
return p;
}
FrameRegs *
StackSegment::pushRegs(FrameRegs &regs)
{
JS_ASSERT_IF(contains(regs_), regs.fp()->prev() == regs_->fp());
FrameRegs *prev = regs_;
regs_ = &regs;
return prev;
}
void
StackSegment::popRegs(FrameRegs *regs)
{
JS_ASSERT_IF(regs && contains(regs->fp()), regs->fp() == regs_->fp()->prev());
regs_ = regs;
}
/*****************************************************************************/
StackSpace::StackSpace()
: seg_(NULL),
base_(NULL),
conservativeEnd_(NULL),
#ifdef XP_WIN
commitEnd_(NULL),
#endif
defaultEnd_(NULL),
trustedEnd_(NULL)
{
assertInvariants();
}
bool
StackSpace::init()
{
void *p;
#ifdef XP_WIN
p = VirtualAlloc(NULL, CAPACITY_BYTES, MEM_RESERVE, PAGE_READWRITE);
if (!p)
return false;
void *check = VirtualAlloc(p, COMMIT_BYTES, MEM_COMMIT, PAGE_READWRITE);
if (p != check)
return false;
base_ = reinterpret_cast<Value *>(p);
conservativeEnd_ = commitEnd_ = base_ + COMMIT_VALS;
trustedEnd_ = base_ + CAPACITY_VALS;
defaultEnd_ = trustedEnd_ - BUFFER_VALS;
Debug_SetValueRangeToCrashOnTouch(base_, commitEnd_);
#elif defined(XP_OS2)
if (DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE | OBJ_ANY) &&
DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE))
return false;
base_ = reinterpret_cast<Value *>(p);
trustedEnd_ = base_ + CAPACITY_VALS;
conservativeEnd_ = defaultEnd_ = trustedEnd_ - BUFFER_VALS;
Debug_SetValueRangeToCrashOnTouch(base_, trustedEnd_);
#else
JS_ASSERT(CAPACITY_BYTES % getpagesize() == 0);
p = mmap(NULL, CAPACITY_BYTES, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
if (p == MAP_FAILED)
return false;
base_ = reinterpret_cast<Value *>(p);
trustedEnd_ = base_ + CAPACITY_VALS;
conservativeEnd_ = defaultEnd_ = trustedEnd_ - BUFFER_VALS;
Debug_SetValueRangeToCrashOnTouch(base_, trustedEnd_);
#endif
assertInvariants();
return true;
}
StackSpace::~StackSpace()
{
assertInvariants();
JS_ASSERT(!seg_);
if (!base_)
return;
#ifdef XP_WIN
VirtualFree(base_, (commitEnd_ - base_) * sizeof(Value), MEM_DECOMMIT);
VirtualFree(base_, 0, MEM_RELEASE);
#elif defined(XP_OS2)
DosFreeMem(base_);
#else
#ifdef SOLARIS
munmap((caddr_t)base_, CAPACITY_BYTES);
#else
munmap(base_, CAPACITY_BYTES);
#endif
#endif
}
StackSegment &
StackSpace::containingSegment(const StackFrame *target) const
{
for (StackSegment *s = seg_; s; s = s->prevInMemory()) {
if (s->contains(target))
return *s;
}
JS_NOT_REACHED("frame not in stack space");
return *(StackSegment *)NULL;
}
void
StackSpace::markFrame(JSTracer *trc, StackFrame *fp, Value *slotsEnd)
{
/*
* JM may leave values with object/string type but a null payload on the
* stack. This can happen if the script was initially compiled by Ion,
* which replaced dead values with undefined, and later ran under JM which
* assumed values were of the original type.
*/
Value *slotsBegin = fp->slots();
gc::MarkValueRootRangeMaybeNullPayload(trc, slotsEnd - slotsBegin, slotsBegin, "vm_stack");
}
void
StackSpace::mark(JSTracer *trc)
{
/* NB: this depends on the continuity of segments in memory. */
Value *nextSegEnd = firstUnused();
for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
/*
* A segment describes a linear region of memory that contains a stack
* of native and interpreted calls. For marking purposes, though, we
* only need to distinguish between frames and values and mark
* accordingly. Since native calls only push values on the stack, we
* can effectively lump them together and just iterate over interpreted
* calls. Thus, marking can view the stack as the regex:
* (segment slots (frame slots)*)*
* which gets marked in reverse order.
*/
Value *slotsEnd = nextSegEnd;
for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev()) {
/* Mark from fp->slots() to slotsEnd. */
markFrame(trc, fp, slotsEnd);
fp->mark(trc);
slotsEnd = (Value *)fp;
}
gc::MarkValueRootRange(trc, seg->slotsBegin(), slotsEnd, "vm_stack");
nextSegEnd = (Value *)seg;
}
}
void
StackSpace::markActiveCompartments()
{
for (StackSegment *seg = seg_; seg; seg = seg->prevInMemory()) {
for (StackFrame *fp = seg->maybefp(); (Value *)fp > (Value *)seg; fp = fp->prev())
MarkCompartmentActive(fp);
}
}
JS_FRIEND_API(bool)
StackSpace::ensureSpaceSlow(JSContext *cx, MaybeReportError report, Value *from, ptrdiff_t nvals) const
{
assertInvariants();
JSCompartment *dest = cx->compartment();
bool trusted = dest->principals == cx->runtime()->trustedPrincipals();
Value *end = trusted ? trustedEnd_ : defaultEnd_;
/*
* conservativeEnd_ must stay below defaultEnd_: if conservativeEnd_ were
* to be bumped past defaultEnd_, untrusted JS would be able to consume the
* buffer space at the end of the stack reserved for trusted JS.
*/
if (end - from < nvals) {
if (report)
js_ReportOverRecursed(cx);
return false;
}
#ifdef XP_WIN
if (commitEnd_ - from < nvals) {
Value *newCommit = commitEnd_;
Value *request = from + nvals;
/* Use a dumb loop; will probably execute once. */
JS_ASSERT((trustedEnd_ - newCommit) % COMMIT_VALS == 0);
do {
newCommit += COMMIT_VALS;
JS_ASSERT((trustedEnd_ - newCommit) >= 0);
} while (newCommit < request);
/* The cast is safe because CAPACITY_BYTES is small. */
int32_t size = static_cast<int32_t>(newCommit - commitEnd_) * sizeof(Value);
if (!VirtualAlloc(commitEnd_, size, MEM_COMMIT, PAGE_READWRITE)) {
if (report)
js_ReportOverRecursed(cx);
return false;
}
Debug_SetValueRangeToCrashOnTouch(commitEnd_, newCommit);
commitEnd_ = newCommit;
conservativeEnd_ = Min(commitEnd_, defaultEnd_);
assertInvariants();
}
#endif
return true;
}
size_t
StackSpace::sizeOf()
{
#if defined(XP_UNIX)
/*
* Measure how many of our pages are resident in RAM using mincore, and
* return that as our size. This is slow, but hopefully nobody expects
* this method to be fast.
*
* Note that using mincore means that we don't count pages of the stack
* which are swapped out to disk. We really should, but what we have here
* is better than counting the whole stack!
*/
const int pageSize = getpagesize();
size_t numBytes = (trustedEnd_ - base_) * sizeof(Value);
size_t numPages = (numBytes + pageSize - 1) / pageSize;
// On Linux, mincore's third argument has type unsigned char*.
#ifdef __linux__
typedef unsigned char MincoreArgType;
#else
typedef char MincoreArgType;
#endif
MincoreArgType *vec = (MincoreArgType *) js_malloc(numPages);
int result = mincore(base_, numBytes, vec);
if (result) {
js_free(vec);
/*
* If mincore fails us, return the vsize (like we do below if we're not
* on Windows or Unix).
*/
return (trustedEnd_ - base_) * sizeof(Value);
}
size_t residentBytes = 0;
for (size_t i = 0; i < numPages; i++) {
/* vec[i] has its least-significant bit set iff page i is in RAM. */
if (vec[i] & 0x1)
residentBytes += pageSize;
}
js_free(vec);
return residentBytes;
#elif defined(XP_WIN)
return (commitEnd_ - base_) * sizeof(Value);
#else
/*
* Return the stack's virtual size, which is at least an upper bound on its
* resident size.
*/
return (trustedEnd_ - base_) * sizeof(Value);
#endif
}
#ifdef DEBUG
bool
StackSpace::containsSlow(StackFrame *fp)
{
if (!seg_)
return false;
for (AllFramesIter i(seg_->cx()); !i.done(); ++i) {
/*
* Debug-mode currently disables Ion compilation in the compartment of
* the debuggee.
*/
if (i.isJit())
continue;
if (i.interpFrame() == fp)
return true;
}
return false;
}
#endif
/*****************************************************************************/
ContextStack::ContextStack(JSContext *cx)
: seg_(NULL),
space_(&cx->runtime()->stackSpace),
cx_(cx)
{}
ContextStack::~ContextStack()
{
JS_ASSERT(!seg_);
}
bool
ContextStack::onTop() const
{
return seg_ && seg_ == space().seg_;
}
/*
* This helper function brings the ContextStack to the top of the thread stack
* (so that it can be extended to push a frame and/or arguments) by potentially
* pushing a StackSegment. The 'pushedSeg' outparam indicates whether such a
* segment was pushed (and hence whether the caller needs to call popSegment).
*
* Additionally, to minimize calls to ensureSpace, ensureOnTop ensures that
* there is space for nvars slots on top of the stack.
*/
Value *
ContextStack::ensureOnTop(JSContext *cx, MaybeReportError report, unsigned nvars,
MaybeExtend extend, bool *pushedSeg)
{
Value *firstUnused = space().firstUnused();
FrameRegs *regs = cx->stack.maybeRegs();
if (onTop() && extend) {
if (!space().ensureSpace(cx, report, firstUnused, nvars))
return NULL;
return firstUnused;
}
if (!space().ensureSpace(cx, report, firstUnused, VALUES_PER_STACK_SEGMENT + nvars))
return NULL;
regs = (seg_ && extend) ? seg_->maybeRegs() : NULL;
seg_ = new(firstUnused) StackSegment(cx, seg_, space().seg_, regs);
space().seg_ = seg_;
*pushedSeg = true;
return seg_->slotsBegin();
}
void
ContextStack::popSegment()
{
space().seg_ = seg_->prevInMemory();
seg_ = seg_->prevInContext();
if (!seg_)
cx_->maybeMigrateVersionOverride();
}
bool
ContextStack::pushInvokeArgs(JSContext *cx, unsigned argc, InvokeArgsGuard *iag,
MaybeReportError report)
{
JS_ASSERT(argc <= StackSpace::ARGS_LENGTH_MAX);
unsigned nvars = 2 + argc;
Value *firstUnused = ensureOnTop(cx, report, nvars, CAN_EXTEND, &iag->pushedSeg_);
if (!firstUnused)
return false;
MakeRangeGCSafe(firstUnused, nvars);
ImplicitCast<CallArgs>(*iag) = CallArgsFromVp(argc, firstUnused);
seg_->pushInvokeArgsEnd(iag->end(), &iag->prevInvokeArgsEnd_);
JS_ASSERT(space().firstUnused() == iag->end());
iag->setPushed(*this);
return true;
}
void
ContextStack::popInvokeArgs(const InvokeArgsGuard &iag)
{
JS_ASSERT(iag.pushed());
JS_ASSERT(onTop());
JS_ASSERT(space().firstUnused() == seg_->invokeArgsEnd());
Value *oldend = seg_->end();
seg_->popInvokeArgsEnd(iag.prevInvokeArgsEnd_);
if (iag.pushedSeg_)
popSegment();
Debug_SetValueRangeToCrashOnTouch(space().firstUnused(), oldend);
}
StackFrame *
ContextStack::pushInvokeFrame(JSContext *cx, MaybeReportError report,
const CallArgs &args, JSFunction *funArg,
InitialFrameFlags initial, FrameGuard *fg)
{
JS_ASSERT(onTop());
JS_ASSERT(space().firstUnused() == args.end());
RootedFunction fun(cx, funArg);
RootedFunction fun(cx, &args.callee().as<JSFunction>());
RootedScript script(cx, fun->nonLazyScript());
StackFrame::Flags flags = ToFrameFlags(initial);
StackFrame *fp = getCallFrame(cx, report, args, fun, script, &flags);
Value *argv;
StackFrame *fp = getCallFrame(cx, args, script, &flags, &argv);
if (!fp)
return NULL;
fp->initCallFrame(cx, *fun, script, args.length(), flags);
fg->regs_.prepareToRun(*fp, script);
fg->prevRegs_ = seg_->pushRegs(fg->regs_);
JS_ASSERT(space().firstUnused() == fg->regs_.sp);
fg->setPushed(*this);
fp->mark_ = mark;
fp->initCallFrame(cx, NULL, NULL, NULL, *fun, script, argv, args.length(), flags);
fg->setPushed(*this, fp);
return fp;
}
bool
ContextStack::pushInvokeFrame(JSContext *cx, const CallArgs &args,
InitialFrameFlags initial, InvokeFrameGuard *ifg)
StackFrame *
InterpreterStack::pushExecuteFrame(JSContext *cx, HandleScript script, const Value &thisv,
HandleObject scopeChain, ExecuteType type,
AbstractFramePtr evalInFrame, FrameGuard *fg)
{
JSObject &callee = args.callee();
JSFunction *fun = &callee.as<JSFunction>();
if (!pushInvokeFrame(cx, REPORT_ERROR, args, fun, initial, ifg))
return false;
return true;
}
LifoAlloc::Mark mark = allocator_.mark();
bool
ContextStack::pushExecuteFrame(JSContext *cx, HandleScript script, const Value &thisv,
HandleObject scopeChain, ExecuteType type,
AbstractFramePtr evalInFrame, ExecuteFrameGuard *efg)
{
/*
* Even though global code and indirect eval do not execute in the context
* of the current frame, prev-link these to the current frame so that the
* callstack looks right to the debugger (via CAN_EXTEND). This is safe
* since the scope chain is what determines name lookup and access, not
* prev-links.
*
* Eval-in-frame is the exception since it prev-links to an arbitrary frame
* (possibly in the middle of some previous segment). Thus pass CANT_EXTEND
* (to start a new segment) and link the frame and call chain manually
* below. If |evalInFrame| is a baseline JIT frame, prev-link to its entry
* frame.
*/
MaybeExtend extend;
StackFrame *prevLink;
AbstractFramePtr prev = NullFramePtr();
if (evalInFrame) {
JS_ASSERT_IF(evalInFrame.isStackFrame(), !evalInFrame.asStackFrame()->runningInJit());
prevLink = NULL;
prev = evalInFrame;
extend = CANT_EXTEND;
} else {
prevLink = maybefp();
extend = CAN_EXTEND;
ScriptFrameIter iter(cx);
if (!iter.done())
prev = iter.isIon() ? maybefp() : iter.abstractFramePtr();
}
unsigned nvars = 2 /* callee, this */ + script->nslots;
uint8_t *buffer = allocateFrame(cx, sizeof(StackFrame) + nvars * sizeof(Value));
if (!buffer)
return NULL;
unsigned nvars = 2 /* callee, this */ + VALUES_PER_STACK_FRAME + script->nslots;
Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, extend, &efg->pushedSeg_);
if (!firstUnused)
return false;
StackFrame *fp = reinterpret_cast<StackFrame *>(firstUnused + 2);
fp->initExecuteFrame(script, prevLink, prev, seg_->maybeRegs(), thisv, *scopeChain, type);
StackFrame *fp = reinterpret_cast<StackFrame *>(buffer + 2 * sizeof(Value));
fp->mark_ = mark;
fp->initExecuteFrame(cx, script, evalInFrame, thisv, *scopeChain, type);
fp->initVarsToUndefined();
efg->regs_.prepareToRun(*fp, script);
efg->prevRegs_ = seg_->pushRegs(efg->regs_);
JS_ASSERT(space().firstUnused() == efg->regs_.sp);
efg->setPushed(*this);
return true;
}
void
ContextStack::popFrame(const FrameGuard &fg)
{
JS_ASSERT(fg.pushed());
JS_ASSERT(onTop());
JS_ASSERT(space().firstUnused() == fg.regs_.sp);
JS_ASSERT(&fg.regs_ == &seg_->regs());
Value *oldend = seg_->end();
seg_->popRegs(fg.prevRegs_);
if (fg.pushedSeg_)
popSegment();
Debug_SetValueRangeToCrashOnTouch(space().firstUnused(), oldend);
}
bool
ContextStack::pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrameGuard *gfg)
{
HeapValue *genvp = gen->stackSnapshot;
JS_ASSERT(genvp == HeapValueify(gen->fp->generatorArgsSnapshotBegin()));
unsigned vplen = HeapValueify(gen->fp->generatorArgsSnapshotEnd()) - genvp;
unsigned nvars = vplen + VALUES_PER_STACK_FRAME + gen->fp->script()->nslots;
Value *firstUnused = ensureOnTop(cx, REPORT_ERROR, nvars, CAN_EXTEND, &gfg->pushedSeg_);
if (!firstUnused)
return false;
StackFrame *stackfp = reinterpret_cast<StackFrame *>(firstUnused + vplen);
Value *stackvp = (Value *)stackfp - vplen;
/* Save this for popGeneratorFrame. */
gfg->gen_ = gen;
gfg->stackvp_ = stackvp;
/*
* Trigger incremental barrier on the floating frame's generator object.
* This is normally traced through only by associated arguments/call
* objects, but only when the generator is not actually on the stack.
* We don't need to worry about generational barriers as the generator
* object has a trace hook and cannot be nursery allocated.
*/
JS_ASSERT(gen->obj->getClass()->trace);
JSObject::writeBarrierPre(gen->obj);
/* Copy from the generator's floating frame to the stack. */
stackfp->copyFrameAndValues<StackFrame::NoPostBarrier>(cx, stackvp, gen->fp,
Valueify(genvp), gen->regs.sp);
stackfp->resetGeneratorPrev(cx);
gfg->regs_.rebaseFromTo(gen->regs, *stackfp);
gfg->prevRegs_ = seg_->pushRegs(gfg->regs_);
JS_ASSERT(space().firstUnused() == gfg->regs_.sp);
gfg->setPushed(*this);
return true;
}
void
ContextStack::popGeneratorFrame(const GeneratorFrameGuard &gfg)
{
JSGenerator *gen = gfg.gen_;
HeapValue *genvp = gen->stackSnapshot;
const FrameRegs &stackRegs = gfg.regs_;
StackFrame *stackfp = stackRegs.fp();
Value *stackvp = gfg.stackvp_;
/* Copy from the stack to the generator's floating frame. */
if (stackfp->isYielding()) {
/*
* Assert that the frame is not markable so that we don't need an
* incremental write barrier when updating the generator's saved slots.
*/
JS_ASSERT(!GeneratorHasMarkableFrame(gen));
gen->regs.rebaseFromTo(stackRegs, *gen->fp);
gen->fp->copyFrameAndValues<StackFrame::DoPostBarrier>(cx_, (Value *)genvp, stackfp,
stackvp, stackRegs.sp);
}
/* ~FrameGuard/popFrame will finish the popping. */
JS_ASSERT(ImplicitCast<const FrameGuard>(gfg).pushed());
}
bool
ContextStack::saveFrameChain()
{
bool pushedSeg;
if (!ensureOnTop(cx_, REPORT_ERROR, 0, CANT_EXTEND, &pushedSeg))
return false;
JS_ASSERT(pushedSeg);
JS_ASSERT(!hasfp());
JS_ASSERT(onTop());
JS_ASSERT(seg_->isEmpty());
return true;
}
void
ContextStack::restoreFrameChain()
{
JS_ASSERT(!hasfp());
JS_ASSERT(onTop());
JS_ASSERT(seg_->isEmpty());
popSegment();
fg->setPushed(*this, fp);
return fp;
}
/*****************************************************************************/
@ -1278,9 +707,8 @@ ScriptFrameIter::operator++()
case DONE:
JS_NOT_REACHED("Unexpected state");
case SCRIPTED:
if (interpFrame()->isDebuggerFrame()) {
if (interpFrame()->isDebuggerFrame() && interpFrame()->evalInFramePrev()) {
AbstractFramePtr eifPrev = interpFrame()->evalInFramePrev();
JS_ASSERT(eifPrev);
// Eval-in-frame can cross contexts and works across saved frame
// chains.
@ -1493,9 +921,20 @@ ScriptFrameIter::updatePcQuadratic()
switch (data_.state_) {
case DONE:
break;
case SCRIPTED:
data_.pc_ = interpFrame()->pcQuadratic(data_.cx_);
case SCRIPTED: {
StackFrame *frame = interpFrame();
InterpreterActivation *activation = data_.activations_.activation()->asInterpreter();
// Look for the current frame.
data_.interpFrames_ = InterpreterFrameIterator(activation);
while (data_.interpFrames_.frame() != frame)
++data_.interpFrames_;
// Update the pc.
JS_ASSERT(data_.interpFrames_.frame() == frame);
data_.pc_ = data_.interpFrames_.pc();
return;
}
case JIT:
#ifdef JS_ION
if (data_.ionFrames_.isBaselineJS()) {
@ -1775,8 +1214,8 @@ ScriptFrameIter::numFrameSlots() const
}
case SCRIPTED:
JS_ASSERT(data_.cx_);
JS_ASSERT(data_.cx_->stack.regs().spForStackDepth(0) == interpFrame()->base());
return data_.cx_->stack.regs().sp - interpFrame()->base();
JS_ASSERT(data_.cx_->interpreterRegs().spForStackDepth(0) == interpFrame()->base());
return data_.cx_->interpreterRegs().sp - interpFrame()->base();
}
JS_NOT_REACHED("Unexpected state");
return 0;
@ -1880,8 +1319,15 @@ InterpreterFrameIterator &
InterpreterFrameIterator::operator++()
{
JS_ASSERT(!done());
pc_ = fp_->prevpc();
fp_ = (fp_ != activation_->entry_) ? fp_->prev() : NULL;
if (fp_ != activation_->entry_) {
pc_ = fp_->prevpc();
sp_ = fp_->prevsp();
fp_ = fp_->prev();
} else {
pc_ = NULL;
sp_ = NULL;
fp_ = NULL;
}
return *this;
}

Просмотреть файл

@ -19,17 +19,12 @@ namespace js {
class StackFrame;
class FrameRegs;
class StackSegment;
class StackSpace;
class ContextStack;
class InvokeArgsGuard;
class InvokeFrameGuard;
class FrameGuard;
class ExecuteFrameGuard;
class GeneratorFrameGuard;
class CallIter;
class ScriptFrameIter;
class AllFramesIter;
@ -39,77 +34,35 @@ class StaticBlockObject;
struct ScopeCoordinate;
/*****************************************************************************/
// VM stack layout
//
// A JSRuntime's stack consists of a linked list of activations. Every activation
// contains a number of scripted frames that are either running in the interpreter
// (InterpreterActivation) or JIT code (JitActivation). The frames inside a single
// activation are contiguous: whenever C++ calls back into JS, a new activation is
// pushed.
//
// Every activation is tied to a single JSContext and JSCompartment. This means we
// can reconstruct a given context's stack by skipping activations belonging to other
// contexts. This happens whenever an embedding enters the JS engine on cx1 and
// then, from a native called by the JS engine, reenters the VM on cx2.
/*
* VM stack layout
*
* SpiderMonkey uses a per-runtime stack to store the activation records,
* parameters, locals, and expression temporaries for the stack of actively
* executing scripts, functions and generators.
*
* The stack is subdivided into contiguous segments of memory which
* have a memory layout invariant that allows fixed offsets to be used for stack
* access (by jit code) as well as fast call/return. This memory layout is
* encapsulated by a set of types that describe different regions of memory.
* This encapsulation has holes: to avoid calling into C++ from generated code,
* JIT compilers generate code that simulates analogous operations in C++.
*
* A sample memory layout of a segment looks like:
*
* regs
* .------------------------------------------------.
* | V
* | fp .--FrameRegs--. sp
* | V V
* |StackSegment| values |StackFrame| values |StackFrame| values |
* | ^ |
* ? <-----------' `------------'
* prev prev
*
* A segment starts with a fixed-size header (js::StackSegment) which logically
* describes the segment, links it to the rest of the stack, and points to the
* end of the stack.
*
* Each script activation (global or function code) is given a fixed-size header
* (js::StackFrame) which is associated with the values before and after it.
* The frame contains bookkeeping information about the activation and links to
* the previous frame.
*
* The value preceding a (function) StackFrame in memory are the arguments of
* the call. The values after a StackFrame in memory are its locals followed by
* its expression stack. There is no clean line between the arguments of a
* frame and the expression stack of the previous frame since the top values of
* the expression become the arguments of a call. There are also layout
* invariants concerning the arguments and StackFrame; see "Arguments" comment
* in StackFrame for more details.
*
* The top of a segment's current frame's expression stack is pointed to by the
* segment's "current regs", which contains the stack pointer 'sp'. In the
* interpreter, sp is adjusted as individual values are pushed and popped from
* the stack and the FrameRegs struct (pointed by the StackSegment) is a local
* var of js::Interpret. Ideally, we'd like to remove all dependence on FrameRegs
* outside the interpreter.
*
* An additional feature (perhaps not for much longer: bug 650361) is that
* multiple independent "contexts" can interleave (LIFO) on a single contiguous
* stack. "Independent" here means that each context has its own callstack.
* Note, though, that eval-in-frame allows one context's callstack to join
* another context's callstack. Thus, in general, the structure of calls in a
* StackSpace is a forest.
*
* More concretely, an embedding may enter the JS engine on cx1 and then, from
* a native called by the JS engine, reenter the VM on cx2. Changing from cx1
* to cx2 causes a new segment to be started for cx2's stack on top of cx1's
* current segment. These two segments are linked from the perspective of
* StackSpace, since they are adjacent on the thread's stack, but not from the
* perspective of cx1 and cx2. Each independent stack is encapsulated and
* managed by the js::ContextStack object stored in JSContext. ContextStack
* is the primary interface to the rest of the engine for pushing and popping
* the stack.
*/
/*****************************************************************************/
// Interpreter frames (StackFrame)
//
// Each interpreter script activation (global or function code) is given a
// fixed-size header (js::StackFrame). The frame contains bookkeeping information
// about the activation and links to the previous frame.
//
// The values after a StackFrame in memory are its locals followed by its
// expression stack. StackFrame::argv_ points to the frame's arguments. Missing
// formal arguments are padded with |undefined|, so the number of arguments is
// always >= the number of formals.
//
// The top of an activation's current frame's expression stack is pointed to by the
// activation's "current regs", which contains the stack pointer 'sp'. In the
// interpreter, sp is adjusted as individual values are pushed and popped from
// the stack and the FrameRegs struct (pointed to by the InterpreterActivation)
// is a local var of js::Interpret.
enum MaybeCheckAliasing { CHECK_ALIASING = true, DONT_CHECK_ALIASING = false };
@ -281,18 +234,26 @@ class StackFrame
GENERATOR = 0x10, /* frame is associated with a generator */
CONSTRUCTING = 0x20, /* frame is for a constructor invocation */
/* Temporary frame states */
/*
* Generator frame state
*
* YIELDING and SUSPENDED are similar, but there are differences. After
* a generator yields, SendToGenerator immediately clears the YIELDING
* flag, but the frame will still have the SUSPENDED flag. Also, when the
* generator returns but before it's GC'ed, YIELDING is not set but
* SUSPENDED is.
*/
YIELDING = 0x40, /* Interpret dispatched JSOP_YIELD */
SUSPENDED = 0x80, /* Generator is not running. */
/* Function prologue state */
HAS_CALL_OBJ = 0x80, /* CallObject created for heavyweight fun */
HAS_ARGS_OBJ = 0x100, /* ArgumentsObject created for needsArgsObj script */
HAS_CALL_OBJ = 0x100, /* CallObject created for heavyweight fun */
HAS_ARGS_OBJ = 0x200, /* ArgumentsObject created for needsArgsObj script */
/* Lazy frame initialization */
HAS_HOOK_DATA = 0x200, /* frame has hookData_ set */
HAS_RVAL = 0x400, /* frame has rval_ set */
HAS_SCOPECHAIN = 0x800, /* frame has scopeChain_ set */
HAS_PREVPC = 0x1000, /* frame has prevpc_ and prevInline_ set */
HAS_HOOK_DATA = 0x400, /* frame has hookData_ set */
HAS_RVAL = 0x800, /* frame has rval_ set */
HAS_SCOPECHAIN = 0x1000, /* frame has scopeChain_ set */
HAS_BLOCKCHAIN = 0x2000, /* frame has blockChain_ set */
/* Debugger state */
@ -322,20 +283,28 @@ class StackFrame
JSScript *evalScript; /* the script of an eval-in-function */
} u;
mutable JSObject *scopeChain_; /* if HAS_SCOPECHAIN, current scope chain */
StackFrame *prev_; /* if HAS_PREVPC, previous cx->regs->fp */
Value rval_; /* if HAS_RVAL, return value of the frame */
StaticBlockObject *blockChain_; /* if HAS_BLOCKCHAIN, innermost let block */
ArgumentsObject *argsObj_; /* if HAS_ARGS_OBJ, the call's arguments object */
jsbytecode *prevpc_; /* if HAS_PREVPC, pc of previous frame*/
/*
* Previous frame and its pc and sp. Always NULL for InterpreterActivation's
* entry frame, always non-NULL for inline frames.
*/
StackFrame *prev_;
jsbytecode *prevpc_;
Value *prevsp_;
void *hookData_; /* if HAS_HOOK_DATA, closure returned by call hook */
AbstractFramePtr evalInFramePrev_; /* for an eval/debugger frame, the prev frame */
Value *argv_; /* If hasArgs(), points to frame's arguments. */
LifoAlloc::Mark mark_; /* Used to release memory for this frame. */
static void staticAsserts() {
JS_STATIC_ASSERT(offsetof(StackFrame, rval_) % sizeof(Value) == 0);
JS_STATIC_ASSERT(sizeof(StackFrame) % sizeof(Value) == 0);
}
inline void initPrev(JSContext *cx);
void writeBarrierPost();
/*
@ -348,30 +317,28 @@ class StackFrame
public:
Value *slots() const { return (Value *)(this + 1); }
Value *base() const { return slots() + script()->nfixed; }
Value *argv() const { return (Value *)this - Max(numActualArgs(), numFormalArgs()); }
Value *argv() const { return argv_; }
private:
friend class FrameRegs;
friend class ContextStack;
friend class StackSpace;
friend class InterpreterStack;
friend class ScriptFrameIter;
friend class CallObject;
friend class ClonedBlockObject;
friend class ArgumentsObject;
/*
* Frame initialization, called by ContextStack operations after acquiring
* Frame initialization, called by InterpreterStack operations after acquiring
* the raw memory for the frame:
*/
/* Used for Invoke, Interpret, trace-jit LeaveTree, and method-jit stubs. */
void initCallFrame(JSContext *cx, JSFunction &callee,
JSScript *script, uint32_t nactual, StackFrame::Flags flags);
/* Used for Invoke and Interpret. */
void initCallFrame(JSContext *cx, StackFrame *prev, jsbytecode *prevpc, Value *prevsp, JSFunction &callee,
JSScript *script, Value *argv, uint32_t nactual, StackFrame::Flags flags);
/* Used for eval. */
void initExecuteFrame(JSScript *script, StackFrame *prevLink, AbstractFramePtr prev,
FrameRegs *regs, const Value &thisv, JSObject &scopeChain,
ExecuteType type);
/* Used for global and eval frames. */
void initExecuteFrame(JSContext *cx, JSScript *script, AbstractFramePtr prev,
const Value &thisv, JSObject &scopeChain, ExecuteType type);
public:
/*
@ -481,8 +448,6 @@ class StackFrame
return evalInFramePrev_;
}
inline void resetGeneratorPrev(JSContext *cx);
/*
* (Unaliased) locals and arguments
*
@ -623,29 +588,18 @@ class StackFrame
: exec.script;
}
/*
* Get the frame's current bytecode, assuming 'this' is in 'stack'. Beware,
* as the name implies, pcQuadratic can lead to quadratic behavior in loops
* such as:
*
* for ( ...; fp; fp = fp->prev())
* ... fp->pcQuadratic(cx->stack);
*
* This can be avoided in three ways:
* - use ScriptFrameIter, it has O(1) iteration
* - if you know the next frame (i.e., next s.t. next->prev == fp
* - pcQuadratic will only iterate maxDepth frames (before giving up and
* returning fp->script->code), making it O(1), but incorrect.
*/
jsbytecode *pcQuadratic(const ContextStack &stack, size_t maxDepth = SIZE_MAX);
/* Return the previous frame's pc. Unlike pcQuadratic, this is O(1). */
/* Return the previous frame's pc. */
jsbytecode *prevpc() {
JS_ASSERT(flags_ & HAS_PREVPC);
JS_ASSERT(prev_);
return prevpc_;
}
/* Return the previous frame's sp. */
Value *prevsp() {
JS_ASSERT(prev_);
return prevsp_;
}
/*
* Function
*
@ -830,7 +784,7 @@ class StackFrame
Value *generatorArgsSnapshotEnd() const {
JS_ASSERT(isGeneratorFrame());
return (Value *)this;
return argv() + js::Max(numActualArgs(), numFormalArgs());
}
Value *generatorSlotsSnapshotBegin() const {
@ -934,6 +888,21 @@ class StackFrame
flags_ &= ~YIELDING;
}
bool isSuspended() const {
JS_ASSERT(isGeneratorFrame());
return flags_ & SUSPENDED;
}
void setSuspended() {
JS_ASSERT(isGeneratorFrame());
flags_ |= SUSPENDED;
}
void clearSuspended() {
JS_ASSERT(isGeneratorFrame());
flags_ &= ~SUSPENDED;
}
public:
static size_t offsetOfFlags() {
return offsetof(StackFrame, flags_);
@ -968,6 +937,7 @@ class StackFrame
public:
void mark(JSTracer *trc);
void markValues(JSTracer *trc, Value *sp);
// Entered Baseline/Ion from the interpreter.
bool runningInJit() const {
@ -1026,7 +996,7 @@ class FrameRegs
return fp_->base() + depth;
}
/* For generator: */
/* For generators. */
void rebaseFromTo(const FrameRegs &from, StackFrame &to) {
fp_ = &to;
sp = to.slots() + (from.sp - from.fp_->slots());
@ -1034,15 +1004,12 @@ class FrameRegs
JS_ASSERT(fp_);
}
/* For ContextStack: */
void popFrame(Value *newsp) {
void popInlineFrame() {
pc = fp_->prevpc();
sp = newsp;
sp = fp_->prevsp() - fp_->numActualArgs() - 1;
fp_ = fp_->prev();
JS_ASSERT(fp_);
}
/* For stubs::CompileFunction, ContextStack: */
void prepareToRun(StackFrame &fp, JSScript *script) {
pc = script->code;
sp = fp.slots() + script->nfixed;
@ -1059,415 +1026,104 @@ class FrameRegs
/*****************************************************************************/
class StackSegment
class InterpreterStack
{
JSContext *cx_;
friend class FrameGuard;
friend class InterpreterActivation;
/* Previous segment within same context stack. */
StackSegment *const prevInContext_;
const static size_t DEFAULT_CHUNK_SIZE = 4 * 1024;
LifoAlloc allocator_;
/* Previous segment sequentially in memory. */
StackSegment *const prevInMemory_;
// Number of interpreter frames on the stack, for over-recursion checks.
static const size_t MAX_FRAMES = 50 * 1000;
size_t frameCount_;
/* Execution registers for most recent script in this segment (or null). */
FrameRegs *regs_;
/* End of CallArgs pushed by pushInvokeArgs. */
Value *invokeArgsEnd_;
#if JS_BITS_PER_WORD == 32
/*
* Ensure StackSegment is Value-aligned. Protected to silence Clang warning
* about unused private fields.
*/
protected:
uint32_t padding_;
#endif
public:
StackSegment(JSContext *cx,
StackSegment *prevInContext,
StackSegment *prevInMemory,
FrameRegs *regs)
: cx_(cx),
prevInContext_(prevInContext),
prevInMemory_(prevInMemory),
regs_(regs),
invokeArgsEnd_(NULL)
{}
/* A segment is followed in memory by the arguments of the first call. */
Value *slotsBegin() const {
return (Value *)(this + 1);
}
/* Accessors. */
FrameRegs &regs() const {
JS_ASSERT(regs_);
return *regs_;
}
FrameRegs *maybeRegs() const {
return regs_;
}
StackFrame *fp() const {
return regs_->fp();
}
StackFrame *maybefp() const {
return regs_ ? regs_->fp() : NULL;
}
jsbytecode *maybepc() const {
return regs_ ? regs_->pc : NULL;
}
JSContext *cx() const {
return cx_;
}
StackSegment *prevInContext() const {
return prevInContext_;
}
StackSegment *prevInMemory() const {
return prevInMemory_;
}
void repointRegs(FrameRegs *regs) {
regs_ = regs;
}
bool isEmpty() const {
return !regs_;
}
bool contains(const StackFrame *fp) const;
bool contains(const FrameRegs *regs) const;
StackFrame *computeNextFrame(const StackFrame *fp, size_t maxDepth) const;
Value *end() const;
FrameRegs *pushRegs(FrameRegs &regs);
void popRegs(FrameRegs *regs);
Value *invokeArgsEnd() const {
return invokeArgsEnd_;
}
void pushInvokeArgsEnd(Value *end, Value **prev) {
*prev = invokeArgsEnd_;
invokeArgsEnd_ = end;
}
void popInvokeArgsEnd(Value *prev) {
invokeArgsEnd_ = prev;
}
};
static const size_t VALUES_PER_STACK_SEGMENT = sizeof(StackSegment) / sizeof(Value);
JS_STATIC_ASSERT(sizeof(StackSegment) % sizeof(Value) == 0);
/*****************************************************************************/
class StackSpace
{
StackSegment *seg_;
Value *base_;
mutable Value *conservativeEnd_;
#ifdef XP_WIN
mutable Value *commitEnd_;
#endif
Value *defaultEnd_;
Value *trustedEnd_;
void assertInvariants() const {
JS_ASSERT(base_ <= conservativeEnd_);
#ifdef XP_WIN
JS_ASSERT(conservativeEnd_ <= commitEnd_);
JS_ASSERT(commitEnd_ <= trustedEnd_);
#endif
JS_ASSERT(conservativeEnd_ <= defaultEnd_);
JS_ASSERT(defaultEnd_ <= trustedEnd_);
}
/* The total number of values/bytes reserved for the stack. */
static const size_t CAPACITY_VALS = 512 * 1024;
static const size_t CAPACITY_BYTES = CAPACITY_VALS * sizeof(Value);
/* How much of the stack is initially committed. */
static const size_t COMMIT_VALS = 16 * 1024;
static const size_t COMMIT_BYTES = COMMIT_VALS * sizeof(Value);
/* How much space is reserved at the top of the stack for trusted JS. */
static const size_t BUFFER_VALS = 16 * 1024;
static const size_t BUFFER_BYTES = BUFFER_VALS * sizeof(Value);
static void staticAsserts() {
JS_STATIC_ASSERT(CAPACITY_VALS % COMMIT_VALS == 0);
}
friend class AllFramesIter;
friend class ContextStack;
friend class StackFrame;
inline bool ensureSpace(JSContext *cx, MaybeReportError report,
Value *from, ptrdiff_t nvals) const;
JS_FRIEND_API(bool) ensureSpaceSlow(JSContext *cx, MaybeReportError report,
Value *from, ptrdiff_t nvals) const;
StackSegment &findContainingSegment(const StackFrame *target) const;
bool containsFast(StackFrame *fp) {
return (Value *)fp >= base_ && (Value *)fp <= trustedEnd_;
}
void markFrame(JSTracer *trc, StackFrame *fp, Value *slotsEnd);
public:
StackSpace();
bool init();
~StackSpace();
/*
* Maximum supported value of arguments.length. This bounds the maximum
* number of arguments that can be supplied to Function.prototype.apply.
* This value also bounds the number of elements parsed in an array
* initialiser.
*
* Since arguments are copied onto the stack, the stack size is the
* limiting factor for this constant. Use the max stack size (available to
* untrusted code) with an extra buffer so that, after such an apply, the
* callee can do a little work without OOMing.
*/
static const unsigned ARGS_LENGTH_MAX = CAPACITY_VALS - (2 * BUFFER_VALS);
/* See stack layout comment in Stack.h. */
inline Value *firstUnused() const { return seg_ ? seg_->end() : base_; }
StackSegment &containingSegment(const StackFrame *target) const;
/* Called during GC: mark segments, frames, and slots under firstUnused. */
void mark(JSTracer *trc);
/* Called during GC: sets active flag on compartments with active frames. */
void markActiveCompartments();
/*
* On Windows, report the committed size; on *nix, we report the resident
* size (which means that if part of the stack is swapped to disk, we say
* it's shrunk).
*/
JS_FRIEND_API(size_t) sizeOf();
#ifdef DEBUG
/* Only used in assertion of debuggers API. */
bool containsSlow(StackFrame *fp);
#endif
};
/*****************************************************************************/
class ContextStack
{
StackSegment *seg_;
StackSpace *const space_;
JSContext *cx_;
/*
* Return whether this ContextStack is at the top of the contiguous stack.
* This is a precondition for extending the current segment by pushing
* stack frames or overrides etc.
*
* NB: Just because a stack is onTop() doesn't mean there is necessarily
* a frame pushed on the stack. For this, use hasfp().
*/
bool onTop() const;
#ifdef DEBUG
void assertSpaceInSync() const;
#else
void assertSpaceInSync() const {}
#endif
/* Implementation details of push* public interface. */
StackSegment *pushSegment(JSContext *cx);
enum MaybeExtend { CAN_EXTEND = true, CANT_EXTEND = false };
Value *ensureOnTop(JSContext *cx, MaybeReportError report, unsigned nvars,
MaybeExtend extend, bool *pushedSeg);
inline uint8_t *allocateFrame(JSContext *cx, size_t size);
inline StackFrame *
getCallFrame(JSContext *cx, MaybeReportError report, const CallArgs &args,
JSFunction *fun, HandleScript script, StackFrame::Flags *pflags) const;
getCallFrame(JSContext *cx, const CallArgs &args, HandleScript script,
StackFrame::Flags *pflags, Value **pargv);
/* Make pop* functions private since only called by guard classes. */
void popSegment();
friend class InvokeArgsGuard;
void popInvokeArgs(const InvokeArgsGuard &iag);
friend class FrameGuard;
void popFrame(const FrameGuard &fg);
friend class GeneratorFrameGuard;
void popGeneratorFrame(const GeneratorFrameGuard &gfg);
friend class ScriptFrameIter;
void releaseFrame(StackFrame *fp) {
frameCount_--;
allocator_.release(fp->mark_);
}
public:
ContextStack(JSContext *cx);
~ContextStack();
InterpreterStack()
: allocator_(DEFAULT_CHUNK_SIZE),
frameCount_(0)
{ }
/*** Stack accessors ***/
~InterpreterStack() {
JS_ASSERT(frameCount_ == 0);
}
/*
* A context's stack is "empty" if there are no scripts or natives
* executing. Note that JS_SaveFrameChain does not factor into this definition.
*/
bool empty() const { return !seg_; }
// For execution of eval or global code.
StackFrame *pushExecuteFrame(JSContext *cx, HandleScript script, const Value &thisv,
HandleObject scopeChain, ExecuteType type,
AbstractFramePtr evalInFrame, FrameGuard *fg);
/*
* Return whether there has been at least one frame pushed since the most
* recent call to JS_SaveFrameChain. Note that natives do not have frames
* hence this query has little semantic meaning past "you can call fp()".
*/
inline bool hasfp() const { return seg_ && seg_->maybeRegs(); }
// Called to invoke a function.
StackFrame *pushInvokeFrame(JSContext *cx, const CallArgs &args, InitialFrameFlags initial,
FrameGuard *fg);
/*
* Return the most recent script activation's registers with the same
* caveat as hasfp regarding JS_SaveFrameChain.
*/
inline FrameRegs *maybeRegs() const { return seg_ ? seg_->maybeRegs() : NULL; }
inline StackFrame *maybefp() const { return seg_ ? seg_->maybefp() : NULL; }
/* Faster alternatives to maybe* functions. */
inline FrameRegs &regs() const { JS_ASSERT(hasfp()); return seg_->regs(); }
inline StackFrame *fp() const { JS_ASSERT(hasfp()); return seg_->fp(); }
/* The StackSpace currently hosting this ContextStack. */
StackSpace &space() const { return *space_; }
/*** Stack manipulation ***/
/*
* pushInvokeArgs allocates |argc + 2| rooted values that will be passed as
* the arguments to Invoke. A single allocation can be used for multiple
* Invoke calls. The InvokeArgsGuard passed to Invoke must come from
* an immediately-enclosing (stack-wise) call to pushInvokeArgs.
*/
bool pushInvokeArgs(JSContext *cx, unsigned argc, InvokeArgsGuard *ag,
MaybeReportError report = REPORT_ERROR);
StackFrame *pushInvokeFrame(JSContext *cx, MaybeReportError report,
const CallArgs &args, JSFunction *fun,
InitialFrameFlags initial, FrameGuard *fg);
/* Called by Invoke for a scripted function call. */
bool pushInvokeFrame(JSContext *cx, const CallArgs &args,
InitialFrameFlags initial, InvokeFrameGuard *ifg);
/* Called by Execute for execution of eval or global code. */
bool pushExecuteFrame(JSContext *cx, HandleScript script, const Value &thisv,
HandleObject scopeChain, ExecuteType type,
AbstractFramePtr evalInFrame, ExecuteFrameGuard *efg);
/*
* Called by SendToGenerator to resume a yielded generator. In addition to
* pushing a frame onto the VM stack, this function copies over the
* floating frame stored in 'gen'. When 'gfg' is destroyed, the destructor
* will copy the frame back to the floating frame.
*/
bool pushGeneratorFrame(JSContext *cx, JSGenerator *gen, GeneratorFrameGuard *gfg);
/*
* An "inline frame" may only be pushed from within the top, active
* segment. This is the case for calls made inside mjit code and Interpret.
* The 'stackLimit' overload updates 'stackLimit' if it changes.
*/
// The interpreter can push light-weight, "inline" frames without entering a
// new InterpreterActivation or recursively calling Interpret.
bool pushInlineFrame(JSContext *cx, FrameRegs &regs, const CallArgs &args,
HandleFunction callee, HandleScript script,
InitialFrameFlags initial,
MaybeReportError report = REPORT_ERROR);
bool pushInlineFrame(JSContext *cx, FrameRegs &regs, const CallArgs &args,
HandleFunction callee, HandleScript script,
InitialFrameFlags initial, Value **stackLimit);
HandleScript script, InitialFrameFlags initial);
void popInlineFrame(FrameRegs &regs);
/*
* Get the topmost script and optional pc on the stack. By default, this
* function only returns a JSScript in the current compartment, returning
* NULL if the current script is in a different compartment. This behavior
* can be overridden by passing ALLOW_CROSS_COMPARTMENT.
*/
enum MaybeAllowCrossCompartment {
DONT_ALLOW_CROSS_COMPARTMENT = false,
ALLOW_CROSS_COMPARTMENT = true
};
inline JSScript *currentScript(jsbytecode **pc = NULL,
MaybeAllowCrossCompartment = DONT_ALLOW_CROSS_COMPARTMENT) const;
inline void purge(JSRuntime *rt);
/* Get the scope chain for the topmost scripted call on the stack. */
inline HandleObject currentScriptedScopeChain() const;
bool saveFrameChain();
void restoreFrameChain();
/*
* As an optimization, the interpreter/mjit can operate on a local
* FrameRegs instance repoint the ContextStack to this local instance.
*/
inline void repointRegs(FrameRegs *regs) { JS_ASSERT(hasfp()); seg_->repointRegs(regs); }
size_t sizeOfExcludingThis(JSMallocSizeOfFun mallocSizeOf) const {
return allocator_.sizeOfExcludingThis(mallocSizeOf);
}
};
void MarkInterpreterActivations(JSRuntime *rt, JSTracer *trc);
/*****************************************************************************/
class InvokeArgsGuard : public JS::CallArgs
class InvokeArgs : public JS::CallArgs
{
friend class ContextStack;
ContextStack *stack_;
Value *prevInvokeArgsEnd_;
bool pushedSeg_;
void setPushed(ContextStack &stack) { JS_ASSERT(!pushed()); stack_ = &stack; }
AutoValueVector v_;
public:
InvokeArgsGuard() : CallArgs(), stack_(NULL), prevInvokeArgsEnd_(NULL), pushedSeg_(false) {}
~InvokeArgsGuard() { if (pushed()) stack_->popInvokeArgs(*this); }
bool pushed() const { return !!stack_; }
void pop() { stack_->popInvokeArgs(*this); stack_ = NULL; }
InvokeArgs(JSContext *cx) : v_(cx) {}
bool init(unsigned argc) {
if (!v_.resize(2 + argc))
return false;
ImplicitCast<CallArgs>(*this) = CallArgsFromVp(argc, v_.begin());
return true;
}
};
class RunState;
class FrameGuard
{
protected:
friend class ContextStack;
ContextStack *stack_;
bool pushedSeg_;
FrameRegs regs_;
FrameRegs *prevRegs_;
void setPushed(ContextStack &stack) { stack_ = &stack; }
friend class InterpreterStack;
RunState &state_;
FrameRegs &regs_;
InterpreterStack *stack_;
StackFrame *fp_;
void setPushed(InterpreterStack &stack, StackFrame *fp) {
stack_ = &stack;
fp_ = fp;
}
public:
FrameGuard() : stack_(NULL), pushedSeg_(false) {}
~FrameGuard() { if (pushed()) stack_->popFrame(*this); }
bool pushed() const { return !!stack_; }
void pop() { stack_->popFrame(*this); stack_ = NULL; }
FrameGuard(RunState &state, FrameRegs &regs);
~FrameGuard();
StackFrame *fp() const { return regs_.fp(); }
};
class InvokeFrameGuard : public FrameGuard
{};
class ExecuteFrameGuard : public FrameGuard
{};
class DummyFrameGuard : public FrameGuard
{};
class GeneratorFrameGuard : public FrameGuard
{
friend class ContextStack;
JSGenerator *gen_;
Value *stackvp_;
public:
~GeneratorFrameGuard() { if (pushed()) stack_->popGeneratorFrame(*this); }
StackFrame *fp() const {
JS_ASSERT(fp_);
return fp_;
}
};
template <>
@ -1491,15 +1147,6 @@ namespace ion {
class JitActivation;
};
// A JSRuntime's stack consists of a linked list of activations. Every activation
// contains a number of scripted frames that are either running in the interpreter
// (InterpreterActivation) or JIT code (JitActivation). The frames inside a single
// activation are contiguous: whenever C++ calls back into JS, a new activation is
// pushed.
//
// Every activation is tied to a single JSContext and JSCompartment. This means we
// can construct a given context's stack by skipping activations belonging to other
// contexts.
class Activation
{
protected:
@ -1579,21 +1226,18 @@ class InterpreterActivation : public Activation
StackFrame *current_; // The most recent frame.
FrameRegs &regs_;
#ifdef DEBUG
size_t oldFrameCount_;
#endif
public:
inline InterpreterActivation(JSContext *cx, StackFrame *entry, FrameRegs &regs);
inline ~InterpreterActivation();
void pushFrame(StackFrame *frame) {
JS_ASSERT(frame->script()->compartment() == compartment_);
current_ = frame;
}
void popFrame(StackFrame *frame) {
JS_ASSERT(current_ == frame);
JS_ASSERT(current_ != entry_);
inline bool pushInlineFrame(const CallArgs &args, HandleScript script,
InitialFrameFlags initial);
inline void popInlineFrame(StackFrame *frame);
current_ = frame->prev();
JS_ASSERT(current_);
}
StackFrame *current() const {
JS_ASSERT(current_);
return current_;
@ -1688,16 +1332,19 @@ class InterpreterFrameIterator
InterpreterActivation *activation_;
StackFrame *fp_;
jsbytecode *pc_;
Value *sp_;
public:
explicit InterpreterFrameIterator(InterpreterActivation *activation)
: activation_(activation),
fp_(NULL),
pc_(NULL)
pc_(NULL),
sp_(NULL)
{
if (activation) {
fp_ = activation->current();
pc_ = activation->regs_.pc;
sp_ = activation->regs_.sp;
}
}
@ -1709,6 +1356,10 @@ class InterpreterFrameIterator
JS_ASSERT(!done());
return pc_;
}
Value *sp() const {
JS_ASSERT(!done());
return sp_;
}
InterpreterFrameIterator &operator++();
@ -1769,7 +1420,6 @@ class ScriptFrameIter
Data(const Data &other);
};
friend class ContextStack;
friend class ::JSBrokenFrameIterator;
private:
Data data_;

Просмотреть файл

@ -2029,15 +2029,9 @@ ReportJSRuntimeExplicitTreeStats(const JS::RuntimeStats &rtStats,
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.regexpData,
"Memory used by the regexp JIT to hold data.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/stack"),
nsIMemoryReporter::KIND_NONHEAP, rtStats.runtime.stack,
"Memory used for the JS call stack. This is the committed "
"portion of the stack on Windows; on *nix, it is the resident "
"portion of the stack. Therefore, on *nix, if part of the "
"stack is swapped out to disk, we do not count it here.\n\n"
"Note that debug builds usually have stack poisoning enabled, "
"which causes the whole stack to be committed (and likely "
"resident).");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/interpreter-stack"),
nsIMemoryReporter::KIND_HEAP, rtStats.runtime.interpreterStack,
"Memory used for JS interpreter frames.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/gc-marker"),
nsIMemoryReporter::KIND_HEAP, rtStats.runtime.gcMarker,