Bug 540706 - use contiguous buffer for stack frames and slots (r=waldo)

This commit is contained in:
Luke Wagner 2010-03-03 17:52:26 -08:00
Родитель 6190a5ea18
Коммит b4cfe93147
29 изменённых файлов: 2235 добавлений и 1612 удалений

Просмотреть файл

@ -88,6 +88,7 @@
#include "jstypedarray.h"
#include "jsatominlines.h"
#include "jscntxtinlines.h"
#include "jsscopeinlines.h"
#include "jsobjinlines.h"
@ -2290,8 +2291,6 @@ JS_GC(JSContext *cx)
LeaveTrace(cx);
/* Don't nuke active arenas if executing or compiling. */
if (cx->stackPool.current == &cx->stackPool.first)
JS_FinishArenaPool(&cx->stackPool);
if (cx->tempPool.current == &cx->tempPool.first)
JS_FinishArenaPool(&cx->tempPool);
js_GC(cx, GC_NORMAL);
@ -4892,18 +4891,18 @@ JS_New(JSContext *cx, JSObject *ctor, uintN argc, jsval *argv)
// is not a simple variation of JSOP_CALL. We have to determine what class
// of object to create, create it, and clamp the return value to an object,
// among other details. js_InvokeConstructor does the hard work.
void *mark;
jsval *vp = js_AllocStack(cx, 2 + argc, &mark);
if (!vp)
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, argc, args))
return NULL;
jsval *vp = args.getvp();
vp[0] = OBJECT_TO_JSVAL(ctor);
vp[1] = JSVAL_NULL;
memcpy(vp + 2, argv, argc * sizeof(jsval));
JSBool ok = js_InvokeConstructor(cx, argc, JS_TRUE, vp);
JSBool ok = js_InvokeConstructor(cx, args, JS_TRUE);
JSObject *obj = ok ? JSVAL_TO_OBJECT(vp[0]) : NULL;
js_FreeStack(cx, mark);
LAST_FRAME_CHECKS(cx, ok);
return obj;
}

Просмотреть файл

@ -112,6 +112,7 @@
#include "jsatominlines.h"
#include "jsobjinlines.h"
#include "jscntxtinlines.h"
using namespace js;
@ -1822,11 +1823,16 @@ js_MergeSort(void *src, size_t nel, size_t elsize,
return JS_TRUE;
}
typedef struct CompareArgs {
JSContext *context;
jsval fval;
jsval *elemroot; /* stack needed for js_Invoke */
} CompareArgs;
struct CompareArgs
{
JSContext *context;
jsval fval;
InvokeArgsGuard args;
CompareArgs(JSContext *cx, jsval fval)
: context(cx), fval(fval)
{}
};
static JS_REQUIRES_STACK JSBool
sort_compare(void *arg, const void *a, const void *b, int *result)
@ -1834,9 +1840,8 @@ sort_compare(void *arg, const void *a, const void *b, int *result)
jsval av = *(const jsval *)a, bv = *(const jsval *)b;
CompareArgs *ca = (CompareArgs *) arg;
JSContext *cx = ca->context;
jsval *invokevp, *sp;
/**
/*
* array_sort deals with holes and undefs on its own and they should not
* come here.
*/
@ -1846,14 +1851,14 @@ sort_compare(void *arg, const void *a, const void *b, int *result)
if (!JS_CHECK_OPERATION_LIMIT(cx))
return JS_FALSE;
invokevp = ca->elemroot;
sp = invokevp;
jsval *invokevp = ca->args.getvp();
jsval *sp = invokevp;
*sp++ = ca->fval;
*sp++ = JSVAL_NULL;
*sp++ = av;
*sp++ = bv;
if (!js_Invoke(cx, 2, invokevp, 0))
if (!js_Invoke(cx, ca->args, 0))
return JS_FALSE;
jsdouble cmp;
@ -2101,22 +2106,17 @@ array_sort(JSContext *cx, uintN argc, jsval *vp)
} while (++i != newlen);
}
} else {
void *mark;
LeaveTrace(cx);
CompareArgs ca;
ca.context = cx;
ca.fval = fval;
ca.elemroot = js_AllocStack(cx, 2 + 2, &mark);
if (!ca.elemroot)
CompareArgs ca(cx, fval);
if (!cx->stack().pushInvokeArgs(cx, 2, ca.args))
return false;
bool ok = !!js_MergeSort(vec, size_t(newlen), sizeof(jsval),
comparator_stack_cast(sort_compare),
&ca, mergesort_tmp);
js_FreeStack(cx, mark);
if (!ok)
if (!js_MergeSort(vec, size_t(newlen), sizeof(jsval),
comparator_stack_cast(sort_compare),
&ca, mergesort_tmp)) {
return false;
}
}
/*
@ -2811,15 +2811,8 @@ typedef enum ArrayExtraMode {
static JSBool
array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
{
JSObject *obj;
jsuint length, newlen;
jsval *argv, *elemroot, *invokevp, *sp;
JSBool ok, cond, hole;
JSObject *callable, *thisp, *newarr;
jsint start, end, step, i;
void *mark;
obj = JS_THIS_OBJECT(cx, vp);
JSObject *obj = JS_THIS_OBJECT(cx, vp);
jsuint length;
if (!obj || !js_GetLengthProperty(cx, obj, &length))
return JS_FALSE;
@ -2831,8 +2824,8 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
js_ReportMissingArg(cx, vp, 0);
return JS_FALSE;
}
argv = vp + 2;
callable = js_ValueToCallableObject(cx, &argv[0], JSV2F_SEARCH_STACK);
jsval *argv = vp + 2;
JSObject *callable = js_ValueToCallableObject(cx, &argv[0], JSV2F_SEARCH_STACK);
if (!callable)
return JS_FALSE;
@ -2840,11 +2833,13 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
* Set our initial return condition, used for zero-length array cases
* (and pre-size our map return to match our known length, for all cases).
*/
jsuint newlen;
JSObject *newarr;
#ifdef __GNUC__ /* quell GCC overwarning */
newlen = 0;
newarr = NULL;
#endif
start = 0, end = length, step = 1;
jsint start = 0, end = length, step = 1;
switch (mode) {
case REDUCE_RIGHT:
@ -2859,6 +2854,7 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
if (argc >= 2) {
*vp = argv[1];
} else {
JSBool hole;
do {
if (!GetArrayElement(cx, obj, start, &hole, vp))
return JS_FALSE;
@ -2894,6 +2890,7 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
if (length == 0)
return JS_TRUE;
JSObject *thisp;
if (argc > 1 && !REDUCE_MODE(mode)) {
if (!js_ValueToObject(cx, argv[1], &thisp))
return JS_FALSE;
@ -2908,17 +2905,21 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
*/
LeaveTrace(cx);
argc = 3 + REDUCE_MODE(mode);
elemroot = js_AllocStack(cx, 1 + 2 + argc, &mark);
if (!elemroot)
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, argc, args))
return JS_FALSE;
MUST_FLOW_THROUGH("out");
ok = JS_TRUE;
invokevp = elemroot + 1;
JSBool ok = JS_TRUE;
JSBool cond;
jsval *invokevp = args.getvp();
for (i = start; i != end; i += step) {
AutoValueRooter tvr(cx);
for (jsint i = start; i != end; i += step) {
JSBool hole;
ok = JS_CHECK_OPERATION_LIMIT(cx) &&
GetArrayElement(cx, obj, i, &hole, elemroot);
GetArrayElement(cx, obj, i, &hole, tvr.addr());
if (!ok)
goto out;
if (hole)
@ -2926,21 +2927,21 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
/*
* Push callable and 'this', then args. We must do this for every
* iteration around the loop since js_Invoke uses spbase[0] for return
* value storage, while some native functions use spbase[1] for local
* iteration around the loop since js_Invoke uses invokevp[0] for return
* value storage, while some native functions use invokevp[1] for local
* rooting.
*/
sp = invokevp;
jsval *sp = invokevp;
*sp++ = OBJECT_TO_JSVAL(callable);
*sp++ = OBJECT_TO_JSVAL(thisp);
if (REDUCE_MODE(mode))
*sp++ = *vp;
*sp++ = *elemroot;
*sp++ = tvr.value();
*sp++ = INT_TO_JSVAL(i);
*sp++ = OBJECT_TO_JSVAL(obj);
/* Do the call. */
ok = js_Invoke(cx, argc, invokevp, 0);
ok = js_Invoke(cx, args, 0);
if (!ok)
break;
@ -2966,8 +2967,8 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
case FILTER:
if (!cond)
break;
/* The filter passed *elemroot, so push it onto our result. */
ok = SetArrayElement(cx, newarr, newlen++, *elemroot);
/* The element passed the filter, so push it onto our result. */
ok = SetArrayElement(cx, newarr, newlen++, tvr.value());
if (!ok)
goto out;
break;
@ -2987,7 +2988,6 @@ array_extra(JSContext *cx, ArrayExtraMode mode, uintN argc, jsval *vp)
}
out:
js_FreeStack(cx, mark);
if (ok && mode == FILTER)
ok = js_SetLengthProperty(cx, newarr, newlen);
return ok;

Просмотреть файл

@ -63,6 +63,7 @@
#include "jsatominlines.h"
#include "jsobjinlines.h"
#include "jsscopeinlines.h"
#include "jscntxtinlines.h"
using namespace avmplus;
using namespace nanojit;
@ -360,36 +361,35 @@ JS_REQUIRES_STACK JSBool FASTCALL
js_PopInterpFrame(JSContext* cx, TracerState* state)
{
JS_ASSERT(cx->fp && cx->fp->down);
JSInlineFrame* ifp = (JSInlineFrame*)cx->fp;
JSStackFrame* const fp = cx->fp;
/*
* Mirror frame popping code from inline_return in js_Interpret. There are
* some things we just don't want to handle. In those cases, the trace will
* MISMATCH_EXIT.
*/
if (ifp->hookData)
if (fp->hookData)
return JS_FALSE;
if (cx->version != ifp->callerVersion)
if (cx->version != fp->callerVersion)
return JS_FALSE;
if (cx->fp->flags & JSFRAME_CONSTRUCTING)
if (fp->flags & JSFRAME_CONSTRUCTING)
return JS_FALSE;
if (cx->fp->imacpc)
if (fp->imacpc)
return JS_FALSE;
if (cx->fp->blockChain)
if (fp->blockChain)
return JS_FALSE;
cx->fp->putActivationObjects(cx);
fp->putActivationObjects(cx);
/* Update display table. */
if (cx->fp->script->staticLevel < JS_DISPLAY_SIZE)
cx->display[cx->fp->script->staticLevel] = cx->fp->displaySave;
if (fp->script->staticLevel < JS_DISPLAY_SIZE)
cx->display[fp->script->staticLevel] = fp->displaySave;
/* Pop the frame and its memory. */
cx->fp = cx->fp->down;
JS_ASSERT(cx->fp->regs == &ifp->callerRegs);
cx->fp->regs = ifp->frame.regs;
JS_ARENA_RELEASE(&cx->stackPool, ifp->mark);
JSStackFrame *down = fp->down;
cx->stack().popInlineFrame(cx, fp, down);
JS_ASSERT(cx->fp == down && cx->fp->regs == &fp->callerRegs);
down->regs = fp->regs;
/* Update the inline call count. */
*state->inlineCallCountp = *state->inlineCallCountp - 1;

Просмотреть файл

@ -58,6 +58,7 @@
#include "jsexn.h"
#include "jsfun.h"
#include "jsgc.h"
#include "jsiter.h"
#include "jslock.h"
#include "jsmath.h"
#include "jsnum.h"
@ -71,6 +72,22 @@
#include "jsstr.h"
#include "jstracer.h"
#include "jscntxtinlines.h"
#ifdef XP_WIN
# include <windows.h>
#else
# include <unistd.h>
# include <sys/mman.h>
# if !defined(MAP_ANONYMOUS)
# if defined(MAP_ANON)
# define MAP_ANONYMOUS MAP_ANON
# else
# define MAP_ANONYMOUS 0
# endif
# endif
#endif
using namespace js;
static const size_t ARENA_HEADER_SIZE_HACK = 40;
@ -83,9 +100,10 @@ static void
MarkLocalRoots(JSTracer *trc, JSLocalRootStack *lrs);
#ifdef DEBUG
bool
CallStack::contains(JSStackFrame *fp)
JS_REQUIRES_STACK bool
CallStack::contains(const JSStackFrame *fp) const
{
JS_ASSERT(inContext());
JSStackFrame *start;
JSStackFrame *stop;
if (isSuspended()) {
@ -103,6 +121,298 @@ CallStack::contains(JSStackFrame *fp)
}
#endif
bool
StackSpace::init()
{
void *p;
#ifdef XP_WIN
p = VirtualAlloc(NULL, CAPACITY_BYTES, MEM_RESERVE, PAGE_READWRITE);
if (!p)
return false;
void *check = VirtualAlloc(p, COMMIT_BYTES, MEM_COMMIT, PAGE_READWRITE);
if (p != check)
return false;
base = reinterpret_cast<jsval *>(p);
commitEnd = base + COMMIT_VALS;
end = base + CAPACITY_VALS;
#else
JS_ASSERT(CAPACITY_BYTES % getpagesize() == 0);
p = mmap(NULL, CAPACITY_BYTES, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
if (p == MAP_FAILED)
return false;
base = reinterpret_cast<jsval *>(p);
end = base + CAPACITY_VALS;
#endif
return true;
}
void
StackSpace::finish()
{
#ifdef XP_WIN
VirtualFree(base, (commitEnd - base) * sizeof(jsval), MEM_DECOMMIT);
VirtualFree(base, 0, MEM_RELEASE);
#else
munmap(base, CAPACITY_BYTES);
#endif
}
#ifdef XP_WIN
JS_FRIEND_API(bool)
StackSpace::bumpCommit(jsval *from, ptrdiff_t nvals) const
{
JS_ASSERT(end - from >= nvals);
jsval *newCommit = commitEnd;
jsval *request = from + nvals;
/* Use a dumb loop; will probably execute once. */
JS_ASSERT((end - newCommit) % COMMIT_VALS == 0);
do {
newCommit += COMMIT_VALS;
JS_ASSERT((end - newCommit) >= 0);
} while (newCommit < request);
/* The cast is safe because CAPACITY_BYTES is small. */
int32 size = static_cast<int32>(newCommit - commitEnd) * sizeof(jsval);
if (!VirtualAlloc(commitEnd, size, MEM_COMMIT, PAGE_READWRITE))
return false;
commitEnd = newCommit;
return true;
}
#endif
JS_REQUIRES_STACK void
StackSpace::mark(JSTracer *trc)
{
/*
* The correctness/completeness of marking depends on the continuity
* invariants described by the CallStack and StackSpace definitions.
*/
jsval *end = firstUnused();
for (CallStack *cs = currentCallStack; cs; cs = cs->getPreviousInThread()) {
if (cs->inContext()) {
/* This may be the only pointer to the initialVarObj. */
if (JSObject *varobj = cs->getInitialVarObj())
JS_CALL_OBJECT_TRACER(trc, varobj, "varobj");
/* Mark slots/args trailing off of the last stack frame. */
JSStackFrame *fp = cs->getCurrentFrame();
TraceValues(trc, fp->slots(), end, "stack");
/* Mark stack frames and slots/args between stack frames. */
JSStackFrame *initialFrame = cs->getInitialFrame();
for (JSStackFrame *f = fp; f != initialFrame; f = f->down) {
js_TraceStackFrame(trc, f);
TraceValues(trc, f->down->slots(), f->argEnd(), "stack");
}
/* Mark initialFrame stack frame and leading args. */
js_TraceStackFrame(trc, initialFrame);
TraceValues(trc, cs->getInitialArgBegin(), initialFrame->argEnd(), "stack");
} else {
/* Mark slots/args trailing off callstack. */
JS_ASSERT(end == cs->getInitialArgEnd());
TraceValues(trc, cs->getInitialArgBegin(), cs->getInitialArgEnd(), "stack");
}
end = cs->previousCallStackEnd();
}
}
JS_REQUIRES_STACK bool
StackSpace::pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard &ag)
{
jsval *start = firstUnused();
uintN vplen = 2 + argc;
ptrdiff_t nvals = VALUES_PER_CALL_STACK + vplen;
if (!ensureSpace(cx, start, nvals))
return false;
jsval *vp = start + VALUES_PER_CALL_STACK;
jsval *vpend = vp + vplen;
memset(vp, 0, vplen * sizeof(jsval)); /* Init so GC-safe on exit. */
CallStack *cs = new(start) CallStack;
cs->setInitialArgEnd(vpend);
cs->setPreviousInThread(currentCallStack);
currentCallStack = cs;
ag.cx = cx;
ag.cs = cs;
ag.argc = argc;
ag.vp = vp;
return true;
}
JS_REQUIRES_STACK JS_FRIEND_API(bool)
StackSpace::pushInvokeArgsFriendAPI(JSContext *cx, uintN argc,
InvokeArgsGuard &ag)
{
return cx->stack().pushInvokeArgs(cx, argc, ag);
}
InvokeFrameGuard::InvokeFrameGuard()
: cx(NULL), cs(NULL), fp(NULL)
{}
/*
* To maintain the 1 to 0..1 relationship between callstacks and js_Interpret
* activations, a callstack is pushed if one was not pushed for the arguments
* (viz., if the ternary InvokeArgsGuard constructor was used instead of the
* nullary constructor + pushInvokeArgs).
*/
bool
StackSpace::getInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag,
uintN nmissing, uintN nfixed,
InvokeFrameGuard &fg) const
{
if (ag.cs) {
JS_ASSERT(ag.cs == currentCallStack && !ag.cs->inContext());
jsval *start = ag.cs->getInitialArgEnd();
ptrdiff_t nvals = nmissing + VALUES_PER_STACK_FRAME + nfixed;
if (!ensureSpace(cx, start, nvals))
return false;
fg.fp = reinterpret_cast<JSStackFrame *>(start + nmissing);
return true;
}
assertIsCurrent(cx);
JS_ASSERT(currentCallStack->isActive());
jsval *start = cx->fp->regs->sp;
ptrdiff_t nvals = nmissing + VALUES_PER_CALL_STACK + VALUES_PER_STACK_FRAME + nfixed;
if (!ensureSpace(cx, start, nvals))
return false;
fg.cs = new(start + nmissing) CallStack;
fg.fp = reinterpret_cast<JSStackFrame *>(fg.cs + 1);
return true;
}
JS_REQUIRES_STACK void
StackSpace::pushInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag,
InvokeFrameGuard &fg)
{
JS_ASSERT(!!ag.cs ^ !!fg.cs);
JS_ASSERT_IF(ag.cs, ag.cs == currentCallStack && !ag.cs->inContext());
if (CallStack *cs = fg.cs) {
cs->setPreviousInThread(currentCallStack);
currentCallStack = cs;
}
JSStackFrame *fp = fg.fp;
fp->down = cx->fp;
cx->pushCallStackAndFrame(currentCallStack, fp);
currentCallStack->setInitialVarObj(NULL);
fg.cx = cx;
}
JS_REQUIRES_STACK
InvokeFrameGuard::~InvokeFrameGuard()
{
if (!cx)
return;
JS_ASSERT(fp && fp == cx->fp);
JS_ASSERT_IF(cs, cs == cx->stack().getCurrentCallStack());
cx->stack().popInvokeFrame(cx, cs);
}
JS_REQUIRES_STACK void
StackSpace::popInvokeFrame(JSContext *cx, CallStack *maybecs)
{
assertIsCurrent(cx);
JS_ASSERT(currentCallStack->getInitialFrame() == cx->fp);
JS_ASSERT_IF(maybecs, maybecs == currentCallStack);
cx->popCallStackAndFrame();
if (maybecs)
currentCallStack = currentCallStack->getPreviousInThread();
}
ExecuteFrameGuard::ExecuteFrameGuard()
: cx(NULL), vp(NULL), fp(NULL)
{}
JS_REQUIRES_STACK
ExecuteFrameGuard::~ExecuteFrameGuard()
{
if (!cx)
return;
JS_ASSERT(cx->activeCallStack() == cs);
JS_ASSERT(cx->fp == fp);
cx->stack().popExecuteFrame(cx);
}
/*
* To maintain a 1 to 0..1 relationship between callstacks and js_Interpret
* activations, we push a callstack even if it wasn't otherwise necessary.
*/
JS_REQUIRES_STACK bool
StackSpace::getExecuteFrame(JSContext *cx, JSStackFrame *down,
uintN vplen, uintN nfixed,
ExecuteFrameGuard &fg) const
{
jsval *start = firstUnused();
ptrdiff_t nvals = VALUES_PER_CALL_STACK + vplen + VALUES_PER_STACK_FRAME + nfixed;
if (!ensureSpace(cx, start, nvals))
return false;
fg.cs = new(start) CallStack;
fg.vp = start + VALUES_PER_CALL_STACK;
fg.fp = reinterpret_cast<JSStackFrame *>(fg.vp + vplen);
fg.down = down;
return true;
}
JS_REQUIRES_STACK void
StackSpace::pushExecuteFrame(JSContext *cx, ExecuteFrameGuard &fg,
JSObject *initialVarObj)
{
fg.fp->down = fg.down;
CallStack *cs = fg.cs;
cs->setPreviousInThread(currentCallStack);
currentCallStack = cs;
cx->pushCallStackAndFrame(cs, fg.fp);
cs->setInitialVarObj(initialVarObj);
fg.cx = cx;
}
JS_REQUIRES_STACK void
StackSpace::popExecuteFrame(JSContext *cx)
{
assertIsCurrent(cx);
JS_ASSERT(cx->hasActiveCallStack());
cx->popCallStackAndFrame();
currentCallStack = currentCallStack->getPreviousInThread();
}
JS_REQUIRES_STACK void
StackSpace::getSynthesizedSlowNativeFrame(JSContext *cx, CallStack *&cs, JSStackFrame *&fp)
{
jsval *start = firstUnused();
JS_ASSERT(size_t(end - start) >= VALUES_PER_CALL_STACK + VALUES_PER_STACK_FRAME);
cs = new(start) CallStack;
fp = reinterpret_cast<JSStackFrame *>(cs + 1);
}
JS_REQUIRES_STACK void
StackSpace::pushSynthesizedSlowNativeFrame(JSContext *cx, CallStack *cs, JSStackFrame *fp)
{
JS_ASSERT(cx->fp->fun->isInterpreted());
JS_ASSERT(!fp->script && FUN_SLOW_NATIVE(fp->fun));
fp->down = cx->fp;
cs->setPreviousInThread(currentCallStack);
currentCallStack = cs;
cx->pushCallStackAndFrame(cs, fp);
cs->setInitialVarObj(NULL);
}
JS_REQUIRES_STACK void
StackSpace::popSynthesizedSlowNativeFrame(JSContext *cx)
{
assertIsCurrent(cx);
JS_ASSERT(cx->hasActiveCallStack());
JS_ASSERT(currentCallStack->getInitialFrame() == cx->fp);
JS_ASSERT(!cx->fp->script && FUN_SLOW_NATIVE(cx->fp->fun));
cx->popCallStackAndFrame();
currentCallStack = currentCallStack->getPreviousInThread();
}
bool
JSThreadData::init()
{
@ -111,6 +421,8 @@ JSThreadData::init()
for (size_t i = 0; i != sizeof(*this); ++i)
JS_ASSERT(reinterpret_cast<uint8*>(this)[i] == 0);
#endif
if (!stackSpace.init())
return false;
#ifdef JS_TRACER
InitJIT(&traceMonitor);
#endif
@ -141,11 +453,13 @@ JSThreadData::finish()
#if defined JS_TRACER
FinishJIT(&traceMonitor);
#endif
stackSpace.finish();
}
void
JSThreadData::mark(JSTracer *trc)
{
stackSpace.mark(trc);
#ifdef JS_TRACER
traceMonitor.mark(trc);
#endif
@ -375,7 +689,8 @@ js_InitThreads(JSRuntime *rt)
return false;
}
#else
rt->threadData.init();
if (!rt->threadData.init())
return false;
#endif
return true;
}
@ -489,8 +804,6 @@ js_NewContext(JSRuntime *rt, size_t stackChunkSize)
JS_STATIC_ASSERT(JSVERSION_DEFAULT == 0);
JS_ASSERT(cx->version == JSVERSION_DEFAULT);
VOUCH_DOES_NOT_REQUIRE_STACK();
JS_InitArenaPool(&cx->stackPool, "stack", stackChunkSize, sizeof(jsval),
&cx->scriptStackQuota);
JS_InitArenaPool(&cx->tempPool, "temp", TEMP_POOL_CHUNK_SIZE, sizeof(jsdouble),
&cx->scriptStackQuota);
@ -850,7 +1163,6 @@ FreeContext(JSContext *cx)
/* Free the stuff hanging off of cx. */
js_FreeRegExpStatics(cx);
VOUCH_DOES_NOT_REQUIRE_STACK();
JS_FinishArenaPool(&cx->stackPool);
JS_FinishArenaPool(&cx->tempPool);
if (cx->lastMessage)
@ -1943,6 +2255,79 @@ js_CurrentPCIsInImacro(JSContext *cx)
#endif
}
JSContext::JSContext(JSRuntime *rt)
: runtime(rt),
fp(NULL),
regExpStatics(this),
busyArrays(this)
{}
void
JSContext::pushCallStackAndFrame(js::CallStack *newcs, JSStackFrame *newfp)
{
if (hasActiveCallStack())
currentCallStack->suspend(fp);
newcs->setPreviousInContext(currentCallStack);
currentCallStack = newcs;
setCurrentFrame(newfp);
newcs->joinContext(this, newfp);
}
void
JSContext::popCallStackAndFrame()
{
JS_ASSERT(currentCallStack->maybeContext() == this);
JS_ASSERT(currentCallStack->getInitialFrame() == fp);
currentCallStack->leaveContext();
currentCallStack = currentCallStack->getPreviousInContext();
if (currentCallStack) {
if (currentCallStack->isSaved()) {
setCurrentFrame(NULL);
} else {
setCurrentFrame(currentCallStack->getSuspendedFrame());
currentCallStack->resume();
}
} else {
JS_ASSERT(fp->down == NULL);
setCurrentFrame(NULL);
}
}
void
JSContext::saveActiveCallStack()
{
JS_ASSERT(hasActiveCallStack());
currentCallStack->save(fp);
setCurrentFrame(NULL);
}
void
JSContext::restoreCallStack()
{
JS_ASSERT(!hasActiveCallStack());
setCurrentFrame(currentCallStack->getSuspendedFrame());
currentCallStack->restore();
}
JSGenerator *
JSContext::generatorFor(JSStackFrame *fp) const
{
JS_ASSERT(stack().contains(fp) && fp->isGenerator());
JS_ASSERT(!fp->isFloatingGenerator());
JS_ASSERT(!genStack.empty());
if (JS_LIKELY(fp == genStack.back()->liveFrame))
return genStack.back();
/* General case; should only be needed for debug APIs. */
for (size_t i = 0; i < genStack.length(); ++i) {
if (genStack[i]->liveFrame == fp)
return genStack[i];
}
JS_NOT_REACHED("no matching generator");
return NULL;
}
CallStack *
JSContext::containingCallStack(JSStackFrame *target)
{
@ -1960,11 +2345,11 @@ JSContext::containingCallStack(JSStackFrame *target)
if (f == target)
return cs;
}
cs = cs->getPrevious();
cs = cs->getPreviousInContext();
}
/* A suspended callstack's top frame is its suspended frame. */
for (; cs; cs = cs->getPrevious()) {
for (; cs; cs = cs->getPreviousInContext()) {
JSStackFrame *f = cs->getSuspendedFrame();
JSStackFrame *stop = cs->getInitialFrame()->down;
for (; f != stop; f = f->down) {
@ -2023,6 +2408,7 @@ JSContext::isConstructing()
return fp && (fp->flags & JSFRAME_CONSTRUCTING);
}
/*
* Release pool's arenas if the stackPool has existed for longer than the
* limit specified by gcEmptyArenaPoolLifespan.
@ -2041,7 +2427,6 @@ FreeOldArenas(JSRuntime *rt, JSArenaPool *pool)
void
JSContext::purge()
{
FreeOldArenas(runtime, &stackPool);
FreeOldArenas(runtime, &regexpPool);
classProtoCache.purge();
}

Просмотреть файл

@ -122,6 +122,7 @@ static const size_t MAX_NATIVE_STACK_SLOTS = 4096;
static const size_t MAX_CALL_STACK_ENTRIES = 500;
static const size_t MAX_GLOBAL_SLOTS = 4096;
static const size_t GLOBAL_SLOTS_BUFFER_SIZE = MAX_GLOBAL_SLOTS + 1;
static const size_t MAX_SLOW_NATIVE_EXTRA_SLOTS = 16;
/* Forward declarations of tracer types. */
class VMAllocator;
@ -228,25 +229,57 @@ struct GlobalState {
};
/*
* A callstack contains a set of stack frames linked by fp->down. A callstack
* is a member of a JSContext and all of a JSContext's callstacks are kept in a
* list starting at cx->currentCallStack. A callstack may be active or
* suspended. There are zero or one active callstacks for a context and any
* number of suspended contexts. If there is an active context, it is the first
* in the currentCallStack list, |cx->fp != NULL| and the callstack's newest
* (top) stack frame is |cx->fp|. For all other (suspended) callstacks, the
* newest frame is pointed to by suspendedFrame.
* Callstacks
*
* While all frames in a callstack are down-linked, not all down-linked frames
* are in the same callstack (e.g., calling js_Execute with |down != cx->fp|
* will create a new frame in a new active callstack).
* A callstack logically contains the (possibly empty) set of stack frames
* associated with a single activation of the VM and the slots associated with
* each frame. A callstack may or may not be "in" a context and a callstack is
* in a context iff its set of stack frames is nonempty. A callstack and its
* contained frames/slots also have an implied memory layout, as described in
* the js::StackSpace comment.
*
* The set of stack frames in a non-empty callstack start at the callstack's
* "current frame", which is the most recently pushed frame, and ends at the
* callstack's "initial frame". Note that, while all stack frames in a
* callstack are down-linked, not all down-linked frames are in the same
* callstack. Hence, for a callstack |cs|, |cs->getInitialFrame()->down| may be
* non-null and in a different callstack. This occurs when the VM reenters
* itself (via js_Invoke or js_Execute). In full generality, a single context
* may contain a forest of trees of stack frames. With respect to this forest,
* a callstack contains a linear path along a single tree, not necessarily to
* the root.
*
* A callstack in a context may additionally be "active" or "suspended". A
* suspended callstack |cs| has a "suspended frame" which serves as the current
* frame of |cs|. There is at most one active callstack in a given context.
* Callstacks in a context execute LIFO and are maintained in a stack. The top
* of this stack is the context's "current callstack". If a context |cx| has an
* active callstack |cs|, then:
* 1. |cs| is |cx|'s current callstack,
* 2. |cx->fp != NULL|, and
* 3. |cs|'s current frame is |cx->fp|.
* Moreover, |cx->fp != NULL| iff |cx| has an active callstack.
*
* Finally, (to support JS_SaveFrameChain/JS_RestoreFrameChain) a suspended
* callstack may or may not be "saved". Normally, when the active callstack is
* popped, the previous callstack (which is necessarily suspended) becomes
* active. If the previous callstack was saved, however, then it stays
* suspended until it is made active by a call to JS_RestoreFrameChain. This is
* why a context may have a current callstack, but not an active callstack.
*/
class CallStack
{
#ifdef DEBUG
/* The context to which this callstack belongs. */
JSContext *cx;
#endif
/* Link for JSContext callstack stack mentioned in big comment above. */
CallStack *previousInContext;
/* Link for StackSpace callstack stack mentioned in StackSpace comment. */
CallStack *previousInThread;
/* The first frame executed in this callstack. null iff cx is null */
JSStackFrame *initialFrame;
/* If this callstack is suspended, the top of the callstack. */
JSStackFrame *suspendedFrame;
@ -254,80 +287,450 @@ class CallStack
/* This callstack was suspended by JS_SaveFrameChain. */
bool saved;
/* Links members of the JSContext::currentCallStack list. */
CallStack *previous;
/* End of arguments before the first frame. See StackSpace comment. */
jsval *initialArgEnd;
/* The varobj on entry to initialFrame. */
JSObject *initialVarObj;
/* The first frame executed in this callstack. */
JSStackFrame *initialFrame;
public:
CallStack(JSContext *cx)
:
#ifdef DEBUG
cx(cx),
#endif
suspendedFrame(NULL), saved(false), previous(NULL),
initialVarObj(NULL), initialFrame(NULL)
CallStack()
: cx(NULL), previousInContext(NULL), previousInThread(NULL),
initialFrame(NULL), suspendedFrame(NULL), saved(false),
initialArgEnd(NULL), initialVarObj(NULL)
{}
#ifdef DEBUG
bool contains(JSStackFrame *fp);
#endif
/* Safe casts guaranteed by the contiguous-stack layout. */
void suspend(JSStackFrame *fp) {
JS_ASSERT(fp && !isSuspended() && contains(fp));
suspendedFrame = fp;
jsval *previousCallStackEnd() const {
return (jsval *)this;
}
void resume() {
JS_ASSERT(suspendedFrame);
suspendedFrame = NULL;
jsval *getInitialArgBegin() const {
return (jsval *)(this + 1);
}
JSStackFrame *getSuspendedFrame() const {
JS_ASSERT(suspendedFrame);
/*
* As described in the comment at the beginning of the class, a callstack
* is in one of three states:
*
* !inContext: the callstack has been created to root arguments for a
* future call to js_Invoke.
* isActive: the callstack describes a set of stack frames in a context,
* where the top frame currently executing.
* isSuspended: like isActive, but the top frame has been suspended.
*/
bool inContext() const {
JS_ASSERT(!!cx == !!initialFrame);
JS_ASSERT_IF(!initialFrame, !suspendedFrame && !saved);
return cx;
}
bool isActive() const {
JS_ASSERT_IF(suspendedFrame, inContext());
return initialFrame && !suspendedFrame;
}
bool isSuspended() const {
JS_ASSERT_IF(!suspendedFrame, !saved);
JS_ASSERT_IF(suspendedFrame, inContext());
return suspendedFrame;
}
bool isSuspended() const { return !!suspendedFrame; }
void setPrevious(CallStack *cs) { previous = cs; }
CallStack *getPrevious() const { return previous; }
void setInitialVarObj(JSObject *o) { initialVarObj = o; }
JSObject *getInitialVarObj() const { return initialVarObj; }
void setInitialFrame(JSStackFrame *f) { initialFrame = f; }
JSStackFrame *getInitialFrame() const { return initialFrame; }
/*
* Saving and restoring is a special case of suspending and resuming
* whereby the active callstack becomes suspended without pushing a new
* active callstack. This means that if a callstack c1 is pushed on top of a
* saved callstack c2, when c1 is popped, c2 must not be made active. In
* the normal case, where c2 is not saved, when c1 is popped, c2 is made
* active. This distinction is indicated by the |saved| flag.
*/
void save(JSStackFrame *fp) {
suspend(fp);
saved = true;
}
void restore() {
saved = false;
resume();
}
/* Substate of suspended, queryable in any state. */
bool isSaved() const {
JS_ASSERT_IF(saved, isSuspended());
return saved;
}
/* Transitioning between inContext <--> isActive */
void joinContext(JSContext *cx, JSStackFrame *f) {
JS_ASSERT(!inContext());
this->cx = cx;
initialFrame = f;
JS_ASSERT(isActive());
}
void leaveContext() {
JS_ASSERT(isActive());
this->cx = NULL;
initialFrame = NULL;
JS_ASSERT(!inContext());
}
JSContext *maybeContext() const {
return cx;
}
/* Transitioning between isActive <--> isSuspended */
void suspend(JSStackFrame *fp) {
JS_ASSERT(isActive());
JS_ASSERT(fp && contains(fp));
suspendedFrame = fp;
JS_ASSERT(isSuspended());
}
void resume() {
JS_ASSERT(isSuspended());
suspendedFrame = NULL;
JS_ASSERT(isActive());
}
/* When isSuspended, transitioning isSaved <--> !isSaved */
void save(JSStackFrame *fp) {
JS_ASSERT(!isSaved());
suspend(fp);
saved = true;
JS_ASSERT(isSaved());
}
void restore() {
JS_ASSERT(isSaved());
saved = false;
resume();
JS_ASSERT(!isSaved());
}
/* Data available when !inContext */
void setInitialArgEnd(jsval *v) {
JS_ASSERT(!inContext() && !initialArgEnd);
initialArgEnd = v;
}
jsval *getInitialArgEnd() const {
JS_ASSERT(!inContext() && initialArgEnd);
return initialArgEnd;
}
/* Data available when inContext */
JSStackFrame *getInitialFrame() const {
JS_ASSERT(inContext());
return initialFrame;
}
inline JSStackFrame *getCurrentFrame() const;
/* Data available when isSuspended. */
JSStackFrame *getSuspendedFrame() const {
JS_ASSERT(isSuspended());
return suspendedFrame;
}
/* JSContext / js::StackSpace bookkeeping. */
void setPreviousInContext(CallStack *cs) {
previousInContext = cs;
}
CallStack *getPreviousInContext() const {
return previousInContext;
}
void setPreviousInThread(CallStack *cs) {
previousInThread = cs;
}
CallStack *getPreviousInThread() const {
return previousInThread;
}
void setInitialVarObj(JSObject *obj) {
JS_ASSERT(inContext());
initialVarObj = obj;
}
JSObject *getInitialVarObj() const {
JS_ASSERT(inContext());
return initialVarObj;
}
#ifdef DEBUG
JS_REQUIRES_STACK bool contains(const JSStackFrame *fp) const;
#endif
};
static const size_t VALUES_PER_CALL_STACK = sizeof(CallStack) / sizeof(jsval);
JS_STATIC_ASSERT(sizeof(CallStack) % sizeof(jsval) == 0);
/*
* The ternary constructor is used when arguments are already pushed on the
* stack (as the sp of the current frame), which should only happen from within
* js_Interpret. Otherwise, see StackSpace::pushInvokeArgs.
*/
class InvokeArgsGuard
{
friend class StackSpace;
JSContext *cx;
CallStack *cs; /* null implies nothing pushed */
jsval *vp;
uintN argc;
public:
inline InvokeArgsGuard();
inline InvokeArgsGuard(jsval *vp, uintN argc);
inline ~InvokeArgsGuard();
jsval *getvp() const { return vp; }
uintN getArgc() const { JS_ASSERT(vp != NULL); return argc; }
};
/* See StackSpace::pushInvokeFrame. */
class InvokeFrameGuard
{
friend class StackSpace;
JSContext *cx; /* null implies nothing pushed */
CallStack *cs;
JSStackFrame *fp;
public:
InvokeFrameGuard();
JS_REQUIRES_STACK ~InvokeFrameGuard();
JSStackFrame *getFrame() const { return fp; }
};
/* See StackSpace::pushExecuteFrame. */
class ExecuteFrameGuard
{
friend class StackSpace;
JSContext *cx; /* null implies nothing pushed */
CallStack *cs;
jsval *vp;
JSStackFrame *fp;
JSStackFrame *down;
public:
ExecuteFrameGuard();
JS_REQUIRES_STACK ~ExecuteFrameGuard();
jsval *getvp() const { return vp; }
JSStackFrame *getFrame() const { return fp; }
};
/*
* Thread stack layout
*
* Each JSThreadData has one associated StackSpace object which allocates all
* callstacks for the thread. StackSpace performs all such allocations in a
* single, fixed-size buffer using a specific layout scheme that allows some
* associations between callstacks, frames, and slots to be implicit, rather
* than explicitly stored as pointers. To maintain useful invariants, stack
* space is not given out arbitrarily, but rather allocated/deallocated for
* specific purposes. The use cases currently supported are: calling a function
* with arguments (e.g. js_Invoke), executing a script (e.g. js_Execute) and
* inline interpreter calls. See associated member functions below.
*
* First, we consider the layout of individual callstacks. (See the
* js::CallStack comment for terminology.) A non-empty callstack (i.e., a
* callstack in a context) has the following layout:
*
* initial frame current frame -------. if regs,
* .------------. | | regs->sp
* | V V V
* |callstack| slots |frame| slots |frame| slots |frame| slots |
* | ^ | ^ |
* ? <----------' `----------' `----------'
* down down down
*
* Moreover, the bytes in the following ranges form a contiguous array of
* jsvals that are marked during GC:
* 1. between a callstack and its first frame
* 2. between two adjacent frames in a callstack
* 3. between a callstack's current frame and (if fp->regs) fp->regs->sp
* Thus, the VM must ensure that all such jsvals are safe to be marked.
*
* An empty callstack roots the initial slots before the initial frame is
* pushed and after the initial frame has been popped (perhaps to be followed
* by subsequent initial frame pushes/pops...).
*
* initialArgEnd
* .---------.
* | V
* |callstack| slots |
*
* Above the level of callstacks, a StackSpace is simply a contiguous sequence
* of callstacks kept in a linked list:
*
* base currentCallStack firstUnused end
* | | | |
* V V V V
* |callstack| --- |callstack| --- |callstack| --- | |
* | ^ | ^ |
* 0 <----' `------------' `------------'
* previous previous previous
*
* Both js::StackSpace and JSContext maintain a stack of callstacks, the top of
* which is the "current callstack" for that thread or context, respectively.
* Since different contexts can arbitrarily interleave execution in a single
* thread, these stacks are different enough that a callstack needs both
* "previousInThread" and "previousInContext".
*
* For example, in a single thread, a function in callstack C1 in a context CX1
* may call out into C++ code that reenters the VM in a context CX2, which
* creates a new callstack C2 in CX2, and CX1 may or may not equal CX2.
*
* Note that there is some structure to this interleaving of callstacks:
* 1. the inclusion from callstacks in a context to callstacks in a thread
* preserves order (in terms of previousInContext and previousInThread,
* respectively).
* 2. the mapping from stack frames to their containing callstack preserves
* order (in terms of down and previousInContext, respectively).
*/
class StackSpace
{
jsval *base;
#ifdef XP_WIN
mutable jsval *commitEnd;
#endif
jsval *end;
CallStack *currentCallStack;
/* Although guards are friends, XGuard should only call popX(). */
friend class InvokeArgsGuard;
JS_REQUIRES_STACK inline void popInvokeArgs(JSContext *cx, jsval *vp);
friend class InvokeFrameGuard;
JS_REQUIRES_STACK void popInvokeFrame(JSContext *cx, CallStack *maybecs);
friend class ExecuteFrameGuard;
JS_REQUIRES_STACK void popExecuteFrame(JSContext *cx);
/* Return a pointer to the first unused slot. */
JS_REQUIRES_STACK
inline jsval *firstUnused() const;
inline void assertIsCurrent(JSContext *cx) const;
#ifdef DEBUG
CallStack *getCurrentCallStack() const { return currentCallStack; }
#endif
/*
* Allocate nvals on the top of the stack, report error on failure.
* N.B. the caller must ensure |from == firstUnused()|.
*/
inline bool ensureSpace(JSContext *maybecx, jsval *from, ptrdiff_t nvals) const;
#ifdef XP_WIN
/* Commit more memory from the reserved stack space. */
JS_FRIEND_API(bool) bumpCommit(jsval *from, ptrdiff_t nvals) const;
#endif
public:
static const size_t CAPACITY_VALS = 512 * 1024;
static const size_t CAPACITY_BYTES = CAPACITY_VALS * sizeof(jsval);
static const size_t COMMIT_VALS = 16 * 1024;
static const size_t COMMIT_BYTES = COMMIT_VALS * sizeof(jsval);
/* Kept as a member of JSThreadData; cannot use constructor/destructor. */
bool init();
void finish();
#ifdef DEBUG
template <class T>
bool contains(T *t) const {
char *v = (char *)t;
JS_ASSERT(size_t(-1) - uintptr_t(t) >= sizeof(T));
return v >= (char *)base && v + sizeof(T) <= (char *)end;
}
#endif
/*
* When we LeaveTree, we need to rebuild the stack, which requires stack
* allocation. There is no good way to handle an OOM for these allocations,
* so this function checks that they cannot occur using the size of the
* TraceNativeStorage as a conservative upper bound.
*/
inline bool ensureEnoughSpaceToEnterTrace();
/* +1 for slow native's stack frame. */
static const ptrdiff_t MAX_TRACE_SPACE_VALS =
MAX_NATIVE_STACK_SLOTS + MAX_CALL_STACK_ENTRIES * VALUES_PER_STACK_FRAME +
(VALUES_PER_CALL_STACK + VALUES_PER_STACK_FRAME /* synthesized slow native */);
/* Mark all callstacks, frames, and slots on the stack. */
JS_REQUIRES_STACK void mark(JSTracer *trc);
/*
* For all three use cases below:
* - The boolean-valued functions call js_ReportOutOfScriptQuota on OOM.
* - The "get*Frame" functions do not change any global state, they just
* check OOM and return pointers to an uninitialized frame with the
* requested missing arguments/slots. Only once the "push*Frame"
* function has been called is global state updated. Thus, between
* "get*Frame" and "push*Frame", the frame and slots are unrooted.
* - The "push*Frame" functions will set fp->down; the caller needn't.
* - Functions taking "*Guard" arguments will use the guard's destructor
* to pop the allocation. The caller must ensure the guard has the
* appropriate lifetime.
* - The get*Frame functions put the 'nmissing' slots contiguously after
* the arguments.
*/
/*
* pushInvokeArgs allocates |argc + 2| rooted values that will be passed as
* the arguments to js_Invoke. A single allocation can be used for multiple
* js_Invoke calls. The InvokeArgumentsGuard passed to js_Invoke must come
* from an immediately-enclosing (stack-wise) call to pushInvokeArgs.
*/
JS_REQUIRES_STACK
bool pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard &ag);
/* These functions are called inside js_Invoke, not js_Invoke clients. */
bool getInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag,
uintN nmissing, uintN nfixed,
InvokeFrameGuard &fg) const;
JS_REQUIRES_STACK
void pushInvokeFrame(JSContext *cx, const InvokeArgsGuard &ag,
InvokeFrameGuard &fg);
/*
* For the simpler case when arguments are allocated at the same time as
* the frame and it is not necessary to have rooted argument values before
* pushing the frame.
*/
JS_REQUIRES_STACK
bool getExecuteFrame(JSContext *cx, JSStackFrame *down,
uintN vplen, uintN nfixed,
ExecuteFrameGuard &fg) const;
JS_REQUIRES_STACK
void pushExecuteFrame(JSContext *cx, ExecuteFrameGuard &fg,
JSObject *initialVarObj);
/*
* Since RAII cannot be used for inline frames, callers must manually
* call pushInlineFrame/popInlineFrame.
*/
JS_REQUIRES_STACK
inline JSStackFrame *getInlineFrame(JSContext *cx, jsval *sp,
uintN nmissing, uintN nfixed) const;
JS_REQUIRES_STACK
inline void pushInlineFrame(JSContext *cx, JSStackFrame *fp, JSStackFrame *newfp);
JS_REQUIRES_STACK
inline void popInlineFrame(JSContext *cx, JSStackFrame *up, JSStackFrame *down);
/*
* For the special case of the slow native stack frame pushed and popped by
* tracing deep bail logic.
*/
JS_REQUIRES_STACK
void getSynthesizedSlowNativeFrame(JSContext *cx, CallStack *&cs, JSStackFrame *&fp);
JS_REQUIRES_STACK
void pushSynthesizedSlowNativeFrame(JSContext *cx, CallStack *cs, JSStackFrame *fp);
JS_REQUIRES_STACK
void popSynthesizedSlowNativeFrame(JSContext *cx);
/* Our privates leak into xpconnect, which needs a public symbol. */
JS_REQUIRES_STACK
JS_FRIEND_API(bool) pushInvokeArgsFriendAPI(JSContext *, uintN, InvokeArgsGuard &);
};
JS_STATIC_ASSERT(StackSpace::CAPACITY_VALS % StackSpace::COMMIT_VALS == 0);
/* Holds the number of recording attemps for an address. */
typedef HashMap<jsbytecode*,
size_t,
@ -526,6 +929,9 @@ const uint32 JSLRS_NULL_MARK = uint32(-1);
struct JSThreadData {
JSGCFreeLists gcFreeLists;
/* Keeper of the contiguous stack used by all contexts in this thread. */
js::StackSpace stackSpace;
/*
* Flag indicating that we are waiving any soft limits on the GC heap
* because we want allocations to be infallible (except when we hit
@ -1127,13 +1533,6 @@ struct JSArgumentFormatMap {
};
#endif
struct JSStackHeader {
uintN nslots;
JSStackHeader *down;
};
#define JS_STACK_SEGMENT(sh) ((jsval *)(sh) + 2)
/*
* Key and entry types for the JSContext.resolvingTable hash table, typedef'd
* here because all consumers need to see these declarations (and not just the
@ -1180,8 +1579,7 @@ struct JSRegExpStatics {
struct JSContext
{
explicit JSContext(JSRuntime *rt) :
runtime(rt), regExpStatics(this), busyArrays(this) {}
explicit JSContext(JSRuntime *rt);
/*
* If this flag is set, we were asked to call back the operation callback
@ -1249,15 +1647,21 @@ struct JSContext
size_t scriptStackQuota;
/* Data shared by threads in an address space. */
JSRuntime * const runtime;
/* Stack arena pool and frame pointer register. */
JS_REQUIRES_STACK
JSArenaPool stackPool;
JSRuntime *const runtime;
/* Currently executing frame, set by stack operations. */
JS_REQUIRES_STACK
JSStackFrame *fp;
private:
friend class js::StackSpace;
/* 'fp' must only be changed by calling this function. */
void setCurrentFrame(JSStackFrame *fp) {
this->fp = fp;
}
public:
/* Temporary arena pool used while compiling and decompiling. */
JSArenaPool tempPool;
@ -1298,58 +1702,51 @@ struct JSContext
void *data2;
private:
#ifdef __GNUC__
# pragma GCC visibility push(default)
#endif
friend void js_TraceContext(JSTracer *, JSContext *);
#ifdef __GNUC__
# pragma GCC visibility pop
#endif
/* Linked list of callstacks. See CallStack. */
js::CallStack *currentCallStack;
public:
void assertCallStacksInSync() const {
#ifdef DEBUG
if (fp) {
JS_ASSERT(currentCallStack->isActive());
if (js::CallStack *prev = currentCallStack->getPreviousInContext())
JS_ASSERT(!prev->isActive());
} else {
JS_ASSERT_IF(currentCallStack, !currentCallStack->isActive());
}
#endif
}
/* Return whether this context has an active callstack. */
bool hasActiveCallStack() const {
assertCallStacksInSync();
return fp;
}
/* Assuming there is an active callstack, return it. */
js::CallStack *activeCallStack() const {
JS_ASSERT(currentCallStack && !currentCallStack->isSaved());
JS_ASSERT(hasActiveCallStack());
return currentCallStack;
}
/* Return the current callstack, which may or may not be active. */
js::CallStack *getCurrentCallStack() const {
assertCallStacksInSync();
return currentCallStack;
}
/* Add the given callstack to the list as the new active callstack. */
void pushCallStack(js::CallStack *newcs) {
if (fp)
currentCallStack->suspend(fp);
else
JS_ASSERT_IF(currentCallStack, currentCallStack->isSaved());
newcs->setPrevious(currentCallStack);
currentCallStack = newcs;
JS_ASSERT(!newcs->isSuspended() && !newcs->isSaved());
}
void pushCallStackAndFrame(js::CallStack *newcs, JSStackFrame *newfp);
/* Remove the active callstack and make the next callstack active. */
void popCallStack() {
JS_ASSERT(!currentCallStack->isSuspended() && !currentCallStack->isSaved());
currentCallStack = currentCallStack->getPrevious();
if (currentCallStack && !currentCallStack->isSaved()) {
JS_ASSERT(fp);
currentCallStack->resume();
}
}
void popCallStackAndFrame();
/* Mark the top callstack as suspended, without pushing a new one. */
void saveActiveCallStack() {
JS_ASSERT(fp && currentCallStack && !currentCallStack->isSuspended());
currentCallStack->save(fp);
fp = NULL;
}
void saveActiveCallStack();
/* Undoes calls to suspendTopCallStack. */
void restoreCallStack() {
JS_ASSERT(!fp && currentCallStack && currentCallStack->isSuspended());
fp = currentCallStack->getSuspendedFrame();
currentCallStack->restore();
}
void restoreCallStack();
/*
* Perform a linear search of all frames in all callstacks in the given context
@ -1370,9 +1767,6 @@ struct JSContext
((JSContext *)((char *)(tl) - offsetof(JSContext, threadLinks)))
#endif
/* PDL of stack headers describing stack slots not rooted by argv, etc. */
JSStackHeader *stackHeaders;
/* Stack of thread-stack-allocated GC roots. */
js::AutoGCRooter *autoGCRooters;
@ -1428,6 +1822,33 @@ struct JSContext
JSClassProtoCache classProtoCache;
private:
/*
* To go from a live generator frame (on the stack) to its generator object
* (see comment js_FloatingFrameIfGenerator), we maintain a stack of active
* generators, pushing and popping when entering and leaving generator
* frames, respectively.
*/
js::Vector<JSGenerator *, 2, js::SystemAllocPolicy> genStack;
public:
/* Return the generator object for the given generator frame. */
JSGenerator *generatorFor(JSStackFrame *fp) const;
/* Early OOM-check. */
bool ensureGeneratorStackSpace() {
return genStack.reserve(genStack.length() + 1);
}
bool enterGenerator(JSGenerator *gen) {
return genStack.append(gen);
}
void leaveGenerator(JSGenerator *gen) {
JS_ASSERT(genStack.back() == gen);
genStack.popBack();
}
#ifdef JS_THREADSAFE
/*
* The sweep task for this context.
@ -1559,6 +1980,10 @@ struct JSContext
void purge();
js::StackSpace &stack() const {
return JS_THREAD_DATA(this)->stackSpace;
}
private:
/*
@ -1571,19 +1996,53 @@ private:
};
JS_ALWAYS_INLINE JSObject *
JSStackFrame::varobj(js::CallStack *cs)
JSStackFrame::varobj(js::CallStack *cs) const
{
JS_ASSERT(cs->contains(this));
return fun ? callobj : cs->getInitialVarObj();
}
JS_ALWAYS_INLINE JSObject *
JSStackFrame::varobj(JSContext *cx)
JSStackFrame::varobj(JSContext *cx) const
{
JS_ASSERT(cx->activeCallStack()->contains(this));
return fun ? callobj : cx->activeCallStack()->getInitialVarObj();
}
/*
* InvokeArgsGuard is used outside the JS engine (where jscntxtinlines.h is
* not included). To avoid visibility issues, force members inline.
*/
namespace js {
JS_ALWAYS_INLINE void
StackSpace::popInvokeArgs(JSContext *cx, jsval *vp)
{
JS_ASSERT(!currentCallStack->inContext());
currentCallStack = currentCallStack->getPreviousInThread();
}
JS_ALWAYS_INLINE
InvokeArgsGuard::InvokeArgsGuard()
: cx(NULL), cs(NULL), vp(NULL)
{}
JS_ALWAYS_INLINE
InvokeArgsGuard::InvokeArgsGuard(jsval *vp, uintN argc)
: cx(NULL), cs(NULL), vp(vp), argc(argc)
{}
JS_ALWAYS_INLINE
InvokeArgsGuard::~InvokeArgsGuard()
{
if (!cs)
return;
JS_ASSERT(cs == cx->stack().getCurrentCallStack());
cx->stack().popInvokeArgs(cx, vp);
}
} /* namespace js */
#ifdef JS_THREADSAFE
# define JS_THREAD_ID(cx) ((cx)->thread ? (cx)->thread->id : 0)
#endif
@ -2207,7 +2666,7 @@ js_ReportOutOfMemory(JSContext *cx);
/*
* Report that cx->scriptStackQuota is exhausted.
*/
extern void
void
js_ReportOutOfScriptQuota(JSContext *cx);
extern void

Просмотреть файл

@ -41,12 +41,120 @@
#define jscntxtinlines_h___
#include "jscntxt.h"
#include "jsparse.h"
#include "jsxml.h"
#include "jsobjinlines.h"
namespace js {
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
CallStack::getCurrentFrame() const
{
JS_ASSERT(inContext());
return isSuspended() ? getSuspendedFrame() : cx->fp;
}
JS_REQUIRES_STACK inline jsval *
StackSpace::firstUnused() const
{
CallStack *ccs = currentCallStack;
if (!ccs)
return base;
if (!ccs->inContext())
return ccs->getInitialArgEnd();
JSStackFrame *fp = ccs->getCurrentFrame();
if (JSFrameRegs *regs = fp->regs)
return regs->sp;
return fp->slots();
}
/* Inline so we don't need the friend API. */
JS_ALWAYS_INLINE void
StackSpace::assertIsCurrent(JSContext *cx) const
{
#ifdef DEBUG
JS_ASSERT(cx == currentCallStack->maybeContext());
JS_ASSERT(cx->getCurrentCallStack() == currentCallStack);
cx->assertCallStacksInSync();
#endif
}
JS_ALWAYS_INLINE bool
StackSpace::ensureSpace(JSContext *maybecx, jsval *from, ptrdiff_t nvals) const
{
JS_ASSERT(from == firstUnused());
#ifdef XP_WIN
JS_ASSERT(from <= commitEnd);
if (commitEnd - from >= nvals)
return true;
if (end - from < nvals) {
if (maybecx)
js_ReportOutOfScriptQuota(maybecx);
return false;
}
if (!bumpCommit(from, nvals)) {
if (maybecx)
js_ReportOutOfScriptQuota(maybecx);
return false;
}
return true;
#else
if (end - from < nvals) {
if (maybecx)
js_ReportOutOfScriptQuota(maybecx);
return false;
}
return true;
#endif
}
JS_ALWAYS_INLINE bool
StackSpace::ensureEnoughSpaceToEnterTrace()
{
#ifdef XP_WIN
return ensureSpace(NULL, firstUnused(), MAX_TRACE_SPACE_VALS);
#endif
return end - firstUnused() > MAX_TRACE_SPACE_VALS;
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
StackSpace::getInlineFrame(JSContext *cx, jsval *sp,
uintN nmissing, uintN nfixed) const
{
assertIsCurrent(cx);
JS_ASSERT(cx->hasActiveCallStack());
JS_ASSERT(cx->fp->regs->sp == sp);
ptrdiff_t nvals = nmissing + VALUES_PER_STACK_FRAME + nfixed;
if (!ensureSpace(cx, sp, nvals))
return NULL;
JSStackFrame *fp = reinterpret_cast<JSStackFrame *>(sp + nmissing);
return fp;
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
StackSpace::pushInlineFrame(JSContext *cx, JSStackFrame *fp, JSStackFrame *newfp)
{
assertIsCurrent(cx);
JS_ASSERT(cx->hasActiveCallStack());
JS_ASSERT(cx->fp == fp);
newfp->down = fp;
cx->setCurrentFrame(newfp);
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
StackSpace::popInlineFrame(JSContext *cx, JSStackFrame *up, JSStackFrame *down)
{
assertIsCurrent(cx);
JS_ASSERT(cx->hasActiveCallStack());
JS_ASSERT(cx->fp == up && up->down == down);
cx->setCurrentFrame(down);
}
void
AutoIdArray::trace(JSTracer *trc) {
JS_ASSERT(tag == IDARRAY);

Просмотреть файл

@ -600,148 +600,124 @@ js_GetWatchedSetter(JSRuntime *rt, JSScope *scope,
JSBool
js_watch_set(JSContext *cx, JSObject *obj, jsval id, jsval *vp)
{
JSRuntime *rt;
JSWatchPoint *wp;
JSScopeProperty *sprop;
jsval propid, userid;
JSScope *scope;
JSBool ok;
rt = cx->runtime;
JSRuntime *rt = cx->runtime;
DBG_LOCK(rt);
for (wp = (JSWatchPoint *)rt->watchPointList.next;
for (JSWatchPoint *wp = (JSWatchPoint *)rt->watchPointList.next;
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
sprop = wp->sprop;
JSScopeProperty *sprop = wp->sprop;
if (wp->object == obj && SPROP_USERID(sprop) == id &&
!(wp->flags & JSWP_HELD)) {
wp->flags |= JSWP_HELD;
DBG_UNLOCK(rt);
JS_LOCK_OBJ(cx, obj);
propid = ID_TO_VALUE(sprop->id);
userid = SPROP_USERID(sprop);
scope = obj->scope();
jsval propid = ID_TO_VALUE(sprop->id);
jsval userid = SPROP_USERID(sprop);
JSScope *scope = obj->scope();
JS_UNLOCK_OBJ(cx, obj);
/* NB: wp is held, so we can safely dereference it still. */
ok = wp->handler(cx, obj, propid,
if (!wp->handler(cx, obj, propid,
SPROP_HAS_VALID_SLOT(sprop, scope)
? obj->getSlotMT(cx, sprop->slot)
: JSVAL_VOID,
vp, wp->closure);
if (ok) {
vp, wp->closure)) {
DBG_LOCK(rt);
DropWatchPointAndUnlock(cx, wp, JSWP_HELD);
return JS_FALSE;
}
/*
* Create a pseudo-frame for the setter invocation so that any
* stack-walking security code under the setter will correctly
* identify the guilty party. So that the watcher appears to
* be active to obj_eval and other such code, point frame.pc
* at the JSOP_STOP at the end of the script.
*
* The pseudo-frame is not created for fast natives as they
* are treated as interpreter frame extensions and always
* trusted.
*/
JSObject *closure = wp->closure;
JSClass *clasp = closure->getClass();
JSFunction *fun;
JSScript *script;
if (clasp == &js_FunctionClass) {
fun = GET_FUNCTION_PRIVATE(cx, closure);
script = FUN_SCRIPT(fun);
} else if (clasp == &js_ScriptClass) {
fun = NULL;
script = (JSScript *) closure->getPrivate();
} else {
fun = NULL;
script = NULL;
}
uintN vplen = 2;
if (fun)
vplen += fun->minArgs() + (fun->isInterpreted() ? 0 : fun->u.n.extra);
uintN nfixed = script ? script->nfixed : 0;
/* Destructor pops frame. */
JSFrameRegs regs;
ExecuteFrameGuard frame;
if (fun && !fun->isFastNative()) {
/*
* Create a pseudo-frame for the setter invocation so that any
* stack-walking security code under the setter will correctly
* identify the guilty party. So that the watcher appears to
* be active to obj_eval and other such code, point frame.pc
* at the JSOP_STOP at the end of the script.
*
* The pseudo-frame is not created for fast natives as they
* are treated as interpreter frame extensions and always
* trusted.
* Get a pointer to new frame/slots. This memory is not
* "claimed", so the code before pushExecuteFrame must not
* reenter the interpreter.
*/
JSObject *closure;
JSClass *clasp;
JSFunction *fun;
JSScript *script;
JSBool injectFrame;
uintN nslots, slotsStart;
jsval smallv[5];
jsval *argv;
JSStackFrame frame;
JSFrameRegs regs;
closure = wp->closure;
clasp = closure->getClass();
if (clasp == &js_FunctionClass) {
fun = GET_FUNCTION_PRIVATE(cx, closure);
script = FUN_SCRIPT(fun);
} else if (clasp == &js_ScriptClass) {
fun = NULL;
script = (JSScript *) closure->getPrivate();
} else {
fun = NULL;
script = NULL;
JSStackFrame *down = js_GetTopStackFrame(cx);
if (!cx->stack().getExecuteFrame(cx, down, vplen, nfixed, frame)) {
DBG_LOCK(rt);
DropWatchPointAndUnlock(cx, wp, JSWP_HELD);
return JS_FALSE;
}
slotsStart = nslots = 2;
injectFrame = JS_TRUE;
if (fun) {
nslots += FUN_MINARGS(fun);
if (!FUN_INTERPRETED(fun)) {
nslots += fun->u.n.extra;
injectFrame = !(fun->flags & JSFUN_FAST_NATIVE);
}
slotsStart = nslots;
/* Initialize slots/frame. */
jsval *vp = frame.getvp();
PodZero(vp, vplen);
vp[0] = OBJECT_TO_JSVAL(closure);
JSStackFrame *fp = frame.getFrame();
PodZero(fp->slots(), nfixed);
PodZero(fp);
fp->script = script;
fp->regs = NULL;
fp->fun = fun;
fp->argv = vp + 2;
fp->scopeChain = closure->getParent();
if (script) {
JS_ASSERT(script->length >= JSOP_STOP_LENGTH);
regs.pc = script->code + script->length - JSOP_STOP_LENGTH;
regs.sp = fp->slots() + script->nfixed;
fp->regs = &regs;
}
if (script)
nslots += script->nslots;
if (injectFrame) {
if (nslots <= JS_ARRAY_LENGTH(smallv)) {
argv = smallv;
} else {
argv = (jsval *) cx->malloc(nslots * sizeof(jsval));
if (!argv) {
DBG_LOCK(rt);
DropWatchPointAndUnlock(cx, wp, JSWP_HELD);
return JS_FALSE;
}
}
/* Officially push |fp|. |frame|'s destructor pops. */
cx->stack().pushExecuteFrame(cx, frame, NULL);
argv[0] = OBJECT_TO_JSVAL(closure);
argv[1] = JSVAL_NULL;
PodZero(argv + 2, nslots - 2);
PodZero(&frame);
frame.script = script;
frame.regs = NULL;
frame.fun = fun;
frame.argv = argv + 2;
frame.down = js_GetTopStackFrame(cx);
frame.scopeChain = closure->getParent();
if (script && script->nslots)
frame.slots = argv + slotsStart;
if (script) {
JS_ASSERT(script->length >= JSOP_STOP_LENGTH);
regs.pc = script->code + script->length
- JSOP_STOP_LENGTH;
regs.sp = NULL;
frame.regs = &regs;
if (fun &&
JSFUN_HEAVYWEIGHT_TEST(fun->flags) &&
!js_GetCallObject(cx, &frame)) {
if (argv != smallv)
cx->free(argv);
DBG_LOCK(rt);
DropWatchPointAndUnlock(cx, wp, JSWP_HELD);
return JS_FALSE;
}
}
cx->fp = &frame;
}
#ifdef __GNUC__
else
argv = NULL; /* suppress bogus gcc warnings */
#endif
ok = !wp->setter ||
(sprop->hasSetterValue()
? js_InternalCall(cx, obj,
CastAsObjectJSVal(wp->setter),
1, vp, vp)
: wp->setter(cx, obj, userid, vp));
if (injectFrame) {
/* Evil code can cause us to have an arguments object. */
frame.putActivationObjects(cx);
cx->fp = frame.down;
if (argv != smallv)
cx->free(argv);
/* Now that fp has been pushed, get the call object. */
if (script && fun && fun->isHeavyweight() &&
!js_GetCallObject(cx, fp)) {
DBG_LOCK(rt);
DropWatchPointAndUnlock(cx, wp, JSWP_HELD);
return JS_FALSE;
}
}
JSBool ok = !wp->setter ||
(sprop->hasSetterValue()
? js_InternalCall(cx, obj,
CastAsObjectJSVal(wp->setter),
1, vp, vp)
: wp->setter(cx, obj, userid, vp));
/* Evil code can cause us to have an arguments object. */
if (frame.getFrame())
frame.getFrame()->putActivationObjects(cx);
DBG_LOCK(rt);
return DropWatchPointAndUnlock(cx, wp, JSWP_HELD) && ok;
}
@ -1198,6 +1174,8 @@ JS_GetFrameObject(JSContext *cx, JSStackFrame *fp)
JS_PUBLIC_API(JSObject *)
JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fp)
{
JS_ASSERT(cx->stack().contains(fp));
/* Force creation of argument and call objects if not yet created */
(void) JS_GetFrameCallObject(cx, fp);
return js_GetScopeChain(cx, fp);
@ -1206,6 +1184,8 @@ JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fp)
JS_PUBLIC_API(JSObject *)
JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fp)
{
JS_ASSERT(cx->stack().contains(fp));
if (! fp->fun)
return NULL;

Просмотреть файл

@ -80,6 +80,7 @@
#endif
#include "jsatominlines.h"
#include "jscntxtinlines.h"
#include "jsobjinlines.h"
using namespace js;
@ -647,8 +648,12 @@ args_enumerate(JSContext *cx, JSObject *obj)
#if JS_HAS_GENERATORS
/*
* If a generator-iterator's arguments or call object escapes, it needs to
* mark its generator object.
* If a generator's arguments or call object escapes, and the generator frame
* is not executing, the generator object needs to be marked because it is not
* otherwise reachable. An executing generator is rooted by its invocation. To
* distinguish the two cases (which imply different access paths to the
* generator object), we use the JSFRAME_FLOATING_GENERATOR flag, which is only
* set on the JSStackFrame kept in the generator object's JSGenerator.
*/
static void
args_or_call_trace(JSTracer *trc, JSObject *obj)
@ -661,9 +666,9 @@ args_or_call_trace(JSTracer *trc, JSObject *obj)
}
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
if (fp && (fp->flags & JSFRAME_GENERATOR)) {
JS_CALL_OBJECT_TRACER(trc, FRAME_TO_GENERATOR(fp)->obj,
"FRAME_TO_GENERATOR(fp)->obj");
if (fp && fp->isFloatingGenerator()) {
JSObject *obj = js_FloatingFrameToGenerator(fp)->obj;
JS_CALL_OBJECT_TRACER(trc, obj, "generator object");
}
}
#else
@ -787,7 +792,9 @@ js_GetCallObject(JSContext *cx, JSStackFrame *fp)
#ifdef DEBUG
/* A call object should be a frame's outermost scope chain element. */
JSClass *classp = fp->scopeChain->getClass();
if (classp == &js_WithClass || classp == &js_BlockClass || classp == &js_CallClass)
if (classp == &js_WithClass || classp == &js_BlockClass)
JS_ASSERT(fp->scopeChain->getPrivate() != js_FloatingFrameIfGenerator(cx, fp));
else if (classp == &js_CallClass)
JS_ASSERT(fp->scopeChain->getPrivate() != fp);
#endif
@ -897,7 +904,7 @@ js_PutCallObject(JSContext *cx, JSStackFrame *fp)
if (n != 0) {
JS_ASSERT(callobj->numSlots() >= JS_INITIAL_NSLOTS + n);
n += JS_INITIAL_NSLOTS;
CopyValuesToCallObject(callobj, fun->nargs, fp->argv, fun->u.i.nvars, fp->slots);
CopyValuesToCallObject(callobj, fun->nargs, fp->argv, fun->u.i.nvars, fp->slots());
}
/* Clear private pointers to fp, which is about to go away (js_Invoke). */
@ -1057,7 +1064,7 @@ CallPropertyOp(JSContext *cx, JSObject *obj, jsid id, jsval *vp,
array = fp->argv;
} else {
JS_ASSERT(kind == JSCPK_VAR);
array = fp->slots;
array = fp->slots();
}
}
@ -1899,9 +1906,8 @@ JSBool
js_fun_call(JSContext *cx, uintN argc, jsval *vp)
{
JSObject *obj;
jsval fval, *argv, *invokevp;
jsval fval, *argv;
JSString *str;
void *mark;
JSBool ok;
LeaveTrace(cx);
@ -1941,18 +1947,17 @@ js_fun_call(JSContext *cx, uintN argc, jsval *vp)
}
/* Allocate stack space for fval, obj, and the args. */
invokevp = js_AllocStack(cx, 2 + argc, &mark);
if (!invokevp)
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, argc, args))
return JS_FALSE;
/* Push fval, obj, and the args. */
invokevp[0] = fval;
invokevp[1] = OBJECT_TO_JSVAL(obj);
memcpy(invokevp + 2, argv, argc * sizeof *argv);
args.getvp()[0] = fval;
args.getvp()[1] = OBJECT_TO_JSVAL(obj);
memcpy(args.getvp() + 2, argv, argc * sizeof *argv);
ok = js_Invoke(cx, argc, invokevp, 0);
*vp = *invokevp;
js_FreeStack(cx, mark);
ok = js_Invoke(cx, args, 0);
*vp = *args.getvp();
return ok;
}
@ -1960,11 +1965,10 @@ JSBool
js_fun_apply(JSContext *cx, uintN argc, jsval *vp)
{
JSObject *obj, *aobj;
jsval fval, *invokevp, *sp;
jsval fval, *sp;
JSString *str;
jsuint length;
JSBool arraylike, ok;
void *mark;
JSBool arraylike;
uintN i;
if (argc == 0) {
@ -2026,12 +2030,13 @@ js_fun_apply(JSContext *cx, uintN argc, jsval *vp)
/* Allocate stack space for fval, obj, and the args. */
argc = (uintN)JS_MIN(length, JS_ARGS_LENGTH_MAX);
invokevp = js_AllocStack(cx, 2 + argc, &mark);
if (!invokevp)
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, argc, args))
return JS_FALSE;
/* Push fval, obj, and aobj's elements as args. */
sp = invokevp;
sp = args.getvp();
*sp++ = fval;
*sp++ = OBJECT_TO_JSVAL(obj);
if (aobj && aobj->isArguments() && !aobj->isArgsLengthOverridden()) {
@ -2057,17 +2062,14 @@ js_fun_apply(JSContext *cx, uintN argc, jsval *vp)
}
} else {
for (i = 0; i < argc; i++) {
ok = aobj->getProperty(cx, INT_TO_JSID(jsint(i)), sp);
if (!ok)
goto out;
if (!aobj->getProperty(cx, INT_TO_JSID(jsint(i)), sp))
return JS_FALSE;
sp++;
}
}
ok = js_Invoke(cx, argc, invokevp, 0);
*vp = *invokevp;
out:
js_FreeStack(cx, mark);
JSBool ok = js_Invoke(cx, args, 0);
*vp = *args.getvp();
return ok;
}
@ -2077,9 +2079,6 @@ fun_applyConstructor(JSContext *cx, uintN argc, jsval *vp)
{
JSObject *aobj;
uintN length, i;
void *mark;
jsval *invokevp, *sp;
JSBool ok;
if (JSVAL_IS_PRIMITIVE(vp[2]) ||
(aobj = JSVAL_TO_OBJECT(vp[2]),
@ -2095,24 +2094,23 @@ fun_applyConstructor(JSContext *cx, uintN argc, jsval *vp)
if (length > JS_ARGS_LENGTH_MAX)
length = JS_ARGS_LENGTH_MAX;
invokevp = js_AllocStack(cx, 2 + length, &mark);
if (!invokevp)
return JS_FALSE;
sp = invokevp;
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, length, args))
return JS_FALSE;
jsval *sp = args.getvp();
*sp++ = vp[1];
*sp++ = JSVAL_NULL; /* this is filled automagically */
for (i = 0; i < length; i++) {
ok = aobj->getProperty(cx, INT_TO_JSID(jsint(i)), sp);
if (!ok)
goto out;
if (!aobj->getProperty(cx, INT_TO_JSID(jsint(i)), sp))
return JS_FALSE;
sp++;
}
ok = js_InvokeConstructor(cx, length, JS_TRUE, invokevp);
*vp = *invokevp;
out:
js_FreeStack(cx, mark);
JSBool ok = js_InvokeConstructor(cx, args, JS_TRUE);
*vp = *args.getvp();
return ok;
}
#endif

Просмотреть файл

@ -163,6 +163,10 @@ struct JSFunction : public JSObject
bool optimizedClosure() const { return FUN_KIND(this) > JSFUN_INTERPRETED; }
bool needsWrapper() const { return FUN_NULL_CLOSURE(this) && u.i.skipmin != 0; }
bool isInterpreted() const { return FUN_INTERPRETED(this); }
bool isFastNative() const { return flags & JSFUN_FAST_NATIVE; }
bool isHeavyweight() const { return JSFUN_HEAVYWEIGHT_TEST(flags); }
unsigned minArgs() const { return FUN_MINARGS(this); }
uintN countVars() const {
JS_ASSERT(FUN_INTERPRETED(this));
@ -418,8 +422,12 @@ js_IsNamedLambda(JSFunction *fun) { return (fun->flags & JSFUN_LAMBDA) && fun->a
* arguments that can be supplied via the second (so-called |argArray|) param
* to Function.prototype.apply. This value also bounds the number of elements
* parsed in an array initialiser.
*
* The thread's stack is the limiting factor for this number. It is currently
* 2MB, which fits a little less than 2^19 arguments (once the stack frame,
* callstack, etc. are included). Pick a max args length that is a little less.
*/
const uint32 JS_ARGS_LENGTH_MAX = JS_BIT(24) - 1;
const uint32 JS_ARGS_LENGTH_MAX = JS_BIT(19) - 1024;
/*
* JSSLOT_ARGS_LENGTH stores ((argc << 1) | overwritten_flag) as int jsval.

Просмотреть файл

@ -134,8 +134,6 @@ JS_STATIC_ASSERT(JSVAL_NULL == 0);
JS_STATIC_ASSERT(FINALIZE_EXTERNAL_STRING_LAST - FINALIZE_EXTERNAL_STRING0 ==
JS_EXTERNAL_STRING_LIMIT - 1);
JS_STATIC_ASSERT(sizeof(JSStackHeader) >= 2 * sizeof(jsval));
/*
* GC memory is allocated in chunks. The size of each chunk is GC_CHUNK_SIZE.
* The chunk contains an array of GC arenas holding GC things, an array of
@ -1276,7 +1274,6 @@ js_named_root_dumper(JSDHashTable *table, JSDHashEntryHdr *hdr, uint32 number,
return JS_DHASH_NEXT;
}
JS_BEGIN_EXTERN_C
void
js_DumpNamedRoots(JSRuntime *rt,
void (*dump)(const char *name, void *rp, void *data),
@ -1288,7 +1285,6 @@ js_DumpNamedRoots(JSRuntime *rt,
args.data = data;
JS_DHashTableEnumerate(&rt->gcRootsHash, js_named_root_dumper, &args);
}
JS_END_EXTERN_C
#endif /* DEBUG */
@ -2241,55 +2237,17 @@ TraceObjectVector(JSTracer *trc, JSObject **vec, uint32 len)
void
js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp)
{
uintN nslots, minargs, skip;
if (fp->callobj)
JS_CALL_OBJECT_TRACER(trc, fp->callobj, "call");
if (fp->argsobj)
JS_CALL_OBJECT_TRACER(trc, JSVAL_TO_OBJECT(fp->argsobj), "arguments");
if (fp->script) {
if (fp->script)
js_TraceScript(trc, fp->script);
/* fp->slots is null for watch pseudo-frames, see js_watch_set. */
if (fp->slots) {
/*
* Don't mark what has not been pushed yet, or what has been
* popped already.
*/
if (fp->regs && fp->regs->sp) {
nslots = (uintN) (fp->regs->sp - fp->slots);
JS_ASSERT(nslots >= fp->script->nfixed);
} else {
nslots = fp->script->nfixed;
}
TraceValues(trc, nslots, fp->slots, "slot");
}
} else {
JS_ASSERT(!fp->slots);
JS_ASSERT(!fp->regs);
}
/* Allow for primitive this parameter due to JSFUN_THISP_* flags. */
JS_CALL_VALUE_TRACER(trc, fp->thisv, "this");
if (fp->argv) {
JS_CALL_VALUE_TRACER(trc, fp->calleeValue(), "callee");
nslots = fp->argc;
skip = 0;
if (fp->fun) {
minargs = FUN_MINARGS(fp->fun);
if (minargs > nslots)
nslots = minargs;
if (!FUN_INTERPRETED(fp->fun)) {
JS_ASSERT(!(fp->fun->flags & JSFUN_FAST_NATIVE));
nslots += fp->fun->u.n.extra;
}
if (fp->fun->flags & JSFRAME_ROOTED_ARGV)
skip = 2 + fp->argc;
}
TraceValues(trc, 2 + nslots - skip, fp->argv - 2 + skip, "operand");
}
JS_CALL_VALUE_TRACER(trc, fp->rval, "rval");
if (fp->scopeChain)
JS_CALL_OBJECT_TRACER(trc, fp->scopeChain, "scope chain");
@ -2330,48 +2288,10 @@ JSWeakRoots::mark(JSTracer *trc)
js_CallValueTracerIfGCThing(trc, lastInternalResult);
}
static void inline
TraceFrameChain(JSTracer *trc, JSStackFrame *fp)
{
do {
js_TraceStackFrame(trc, fp);
} while ((fp = fp->down) != NULL);
}
JS_REQUIRES_STACK JS_FRIEND_API(void)
void
js_TraceContext(JSTracer *trc, JSContext *acx)
{
JSStackHeader *sh;
/*
* Trace active and suspended callstacks.
*
* Since js_GetTopStackFrame needs to dereference cx->thread to check for
* JIT frames, we check for non-null thread here and avoid null checks
* there. See bug 471197.
*/
#ifdef JS_THREADSAFE
if (acx->thread)
#endif
{
/* If |cx->fp|, the active callstack has newest (top) frame |cx->fp|. */
JSStackFrame *fp = js_GetTopStackFrame(acx);
if (fp) {
JS_ASSERT(!acx->activeCallStack()->isSuspended());
TraceFrameChain(trc, fp);
if (JSObject *o = acx->activeCallStack()->getInitialVarObj())
JS_CALL_OBJECT_TRACER(trc, o, "variables");
}
/* Trace suspended frames. */
CallStack *cur = acx->currentCallStack;
CallStack *cs = fp ? cur->getPrevious() : cur;
for (; cs; cs = cs->getPrevious()) {
TraceFrameChain(trc, cs->getSuspendedFrame());
if (cs->getInitialVarObj())
JS_CALL_OBJECT_TRACER(trc, cs->getInitialVarObj(), "var env");
}
}
/* Stack frames and slots are traced by StackSpace::mark. */
/* Mark other roots-by-definition in acx. */
if (acx->globalObject && !JS_HAS_OPTION(acx, JSOPTION_UNROOTED_GLOBAL))
@ -2384,12 +2304,6 @@ js_TraceContext(JSTracer *trc, JSContext *acx)
acx->exception = JSVAL_NULL;
}
for (sh = acx->stackHeaders; sh; sh = sh->down) {
METER(trc->context->runtime->gcStats.stackseg++);
METER(trc->context->runtime->gcStats.segslots += sh->nslots);
TraceValues(trc, sh->nslots, JS_STACK_SEGMENT(sh), "stack");
}
for (js::AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down)
gcr->trace(trc);

Просмотреть файл

@ -51,8 +51,6 @@
#include "jsvector.h"
#include "jsversion.h"
JS_BEGIN_EXTERN_C
#define JSTRACE_XML 3
/*
@ -476,13 +474,27 @@ js_DumpGCStats(JSRuntime *rt, FILE *fp);
extern void
js_MarkTraps(JSTracer *trc);
JS_END_EXTERN_C
namespace js {
void
TraceObjectVector(JSTracer *trc, JSObject **vec, uint32 len);
inline void
#ifdef DEBUG
TraceValues(JSTracer *trc, jsval *beg, jsval *end, const char *name)
#else
TraceValues(JSTracer *trc, jsval *beg, jsval *end, const char *) /* last arg unused in release. kill unreferenced formal param warnings */
#endif
{
for (jsval *vp = beg; vp < end; ++vp) {
jsval v = *vp;
if (JSVAL_IS_TRACEABLE(v)) {
JS_SET_TRACING_INDEX(trc, name, vp - beg);
js_CallGCMarker(trc, JSVAL_TO_TRACEABLE(v), JSVAL_TRACE_KIND(v));
}
}
}
inline void
#ifdef DEBUG
TraceValues(JSTracer *trc, size_t len, jsval *vec, const char *name)
@ -490,15 +502,9 @@ TraceValues(JSTracer *trc, size_t len, jsval *vec, const char *name)
TraceValues(JSTracer *trc, size_t len, jsval *vec, const char *) /* last arg unused in release. kill unreferenced formal param warnings */
#endif
{
for (jsval *vp = vec, *end = vp + len; vp < end; vp++) {
jsval v = *vp;
if (JSVAL_IS_TRACEABLE(v)) {
JS_SET_TRACING_INDEX(trc, name, vp - vec);
js_CallGCMarker(trc, JSVAL_TO_TRACEABLE(v), JSVAL_TRACE_KIND(v));
}
}
TraceValues(trc, vec, vec + len, name);
}
}
} /* namespace js */
#endif /* jsgc_h___ */

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -56,6 +56,23 @@ typedef struct JSFrameRegs {
jsval *sp; /* stack pointer */
} JSFrameRegs;
/* JS stack frame flags. */
enum JSFrameFlags {
JSFRAME_CONSTRUCTING = 0x01, /* frame is for a constructor invocation */
JSFRAME_COMPUTED_THIS = 0x02, /* frame.thisv was computed already and
JSVAL_IS_OBJECT(thisv) */
JSFRAME_ASSIGNING = 0x04, /* a complex (not simplex JOF_ASSIGNING) op
is currently assigning to a property */
JSFRAME_DEBUGGER = 0x08, /* frame for JS_EvaluateInStackFrame */
JSFRAME_EVAL = 0x10, /* frame for obj_eval */
JSFRAME_FLOATING_GENERATOR = 0x20, /* frame copy stored in a generator obj */
JSFRAME_YIELDING = 0x40, /* js_Interpret dispatched JSOP_YIELD */
JSFRAME_ITERATOR = 0x80, /* trying to get an iterator for for-in */
JSFRAME_GENERATOR = 0x200, /* frame belongs to generator-iterator */
JSFRAME_OVERRIDE_ARGS = 0x400, /* overridden arguments local variable */
JSFRAME_SPECIAL = JSFRAME_DEBUGGER | JSFRAME_EVAL
};
/*
* JS stack frame, may be allocated on the C stack by native callers. Always
@ -67,21 +84,24 @@ typedef struct JSFrameRegs {
* sharp* and xml* members should be moved onto the stack as local variables
* with well-known slots, if possible.
*/
struct JSStackFrame {
JSFrameRegs *regs;
jsbytecode *imacpc; /* null or interpreter macro call pc */
jsval *slots; /* variables, locals and operand stack */
JSObject *callobj; /* lazily created Call object */
jsval argsobj; /* lazily created arguments object, must be
JSVAL_OBJECT */
JSScript *script; /* script being interpreted */
JSFunction *fun; /* function being called or null */
jsval thisv; /* "this" pointer if in method */
uintN argc; /* actual argument count */
jsval *argv; /* base of argument stack slots */
jsval rval; /* function return value */
JSStackFrame *down; /* previous frame */
void *annotation; /* used by Java security */
struct JSStackFrame
{
JSFrameRegs *regs;
jsbytecode *imacpc; /* null or interpreter macro call pc */
JSObject *callobj; /* lazily created Call object */
jsval argsobj; /* lazily created arguments object, must be
JSVAL_OBJECT */
JSScript *script; /* script being interpreted */
JSFunction *fun; /* function being called or null */
jsval thisv; /* "this" pointer if in method */
uintN argc; /* actual argument count */
jsval *argv; /* base of argument stack slots */
jsval rval; /* function return value */
void *annotation; /* used by Java security */
/* Maintained by StackSpace operations */
JSStackFrame *down; /* previous frame, part of
stack layout invariant */
/*
* We can't determine in advance which local variables can live on
@ -129,6 +149,11 @@ struct JSStackFrame {
JSStackFrame *displaySave; /* previous value of display entry for
script->staticLevel */
/* Members only needed for inline calls. */
JSFrameRegs callerRegs; /* caller's regs for inline call */
void *hookData; /* debugger call hook data */
JSVersion callerVersion; /* dynamic version of calling script */
inline void assertValidStackDepth(uintN depth);
void putActivationObjects(JSContext *cx) {
@ -144,6 +169,14 @@ struct JSStackFrame {
}
}
jsval *argEnd() const {
return (jsval *)this;
}
jsval *slots() const {
return (jsval *)(this + 1);
}
jsval calleeValue() {
JS_ASSERT(argv);
return argv[-2];
@ -163,14 +196,30 @@ struct JSStackFrame {
* VariableEnvironment (ES5 10.3). The given CallStack must contain this
* stack frame.
*/
JSObject *varobj(js::CallStack *cs);
JSObject *varobj(js::CallStack *cs) const;
/* Short for: varobj(cx->activeCallStack()). */
JSObject *varobj(JSContext *cx);
JSObject *varobj(JSContext *cx) const;
inline JSObject *getThisObject(JSContext *cx);
bool isGenerator() const { return flags & JSFRAME_GENERATOR; }
bool isFloatingGenerator() const {
if (flags & JSFRAME_FLOATING_GENERATOR) {
JS_ASSERT(isGenerator());
return true;
}
return false;
}
};
namespace js {
static const size_t VALUES_PER_STACK_FRAME = sizeof(JSStackFrame) / sizeof(jsval);
JS_STATIC_ASSERT(sizeof(JSStackFrame) % sizeof(jsval) == 0);
}
#ifdef __cplusplus
static JS_INLINE uintN
FramePCOffset(JSStackFrame* fp)
@ -182,7 +231,7 @@ FramePCOffset(JSStackFrame* fp)
static JS_INLINE jsval *
StackBase(JSStackFrame *fp)
{
return fp->slots + fp->script->nfixed;
return fp->slots() + fp->script->nfixed;
}
#ifdef DEBUG
@ -204,39 +253,6 @@ GlobalVarCount(JSStackFrame *fp)
return fp->script->nfixed;
}
typedef struct JSInlineFrame {
JSStackFrame frame; /* base struct */
JSFrameRegs callerRegs; /* parent's frame registers */
void *mark; /* mark before inline frame */
void *hookData; /* debugger call hook data */
JSVersion callerVersion; /* dynamic version of calling script */
} JSInlineFrame;
/* JS stack frame flags. */
#define JSFRAME_CONSTRUCTING 0x01 /* frame is for a constructor invocation */
#define JSFRAME_COMPUTED_THIS 0x02 /* frame.thisv was computed already and
JSVAL_IS_OBJECT(thisv) */
#define JSFRAME_ASSIGNING 0x04 /* a complex (not simplex JOF_ASSIGNING) op
is currently assigning to a property */
#define JSFRAME_DEBUGGER 0x08 /* frame for JS_EvaluateInStackFrame */
#define JSFRAME_EVAL 0x10 /* frame for obj_eval */
#define JSFRAME_ROOTED_ARGV 0x20 /* frame.argv is rooted by the caller */
#define JSFRAME_YIELDING 0x40 /* js_Interpret dispatched JSOP_YIELD */
#define JSFRAME_ITERATOR 0x80 /* trying to get an iterator for for-in */
#define JSFRAME_GENERATOR 0x200 /* frame belongs to generator-iterator */
#define JSFRAME_OVERRIDE_ARGS 0x400 /* overridden arguments local variable */
#define JSFRAME_SPECIAL (JSFRAME_DEBUGGER | JSFRAME_EVAL)
/*
* Interpreter stack arena-pool alloc and free functions.
*/
extern JS_REQUIRES_STACK JS_FRIEND_API(jsval *)
js_AllocStack(JSContext *cx, uintN nslots, void **markp);
extern JS_REQUIRES_STACK JS_FRIEND_API(void)
js_FreeStack(JSContext *cx, void *mark);
/*
* Refresh and return fp->scopeChain. It may be stale if block scopes are
* active but not yet reflected by objects in the scope chain. If a block
@ -277,16 +293,15 @@ extern const uint16 js_PrimitiveTestFlags[];
js_PrimitiveTestFlags[JSVAL_TAG(thisv) - 1]))
/*
* NB: js_Invoke requires that cx is currently running JS (i.e., that cx->fp
* is non-null), and that vp points to the callee, |this| parameter, and
* actual arguments of the call. [vp .. vp + 2 + argc) must belong to the last
* JS stack segment that js_AllocStack allocated. The function may use the
* space available after vp + 2 + argc in the stack segment for temporaries,
* so the caller should not use that space for values that must be preserved
* across the call.
* The js::InvokeArgumentsGuard passed to js_Invoke must come from an
* immediately-enclosing successful call to js::StackSpace::pushInvokeArgs,
* i.e., there must have been no un-popped pushes to cx->stack(). Furthermore,
* |args.getvp()[0]| should be the callee, |args.getvp()[1]| should be |this|,
* and the range [args.getvp() + 2, args.getvp() + 2 + args.getArgc()) should
* be initialized actual arguments.
*/
extern JS_REQUIRES_STACK JS_FRIEND_API(JSBool)
js_Invoke(JSContext *cx, uintN argc, jsval *vp, uintN flags);
js_Invoke(JSContext *cx, const js::InvokeArgsGuard &args, uintN flags);
/*
* Consolidated js_Invoke flags simply rename certain JSFRAME_* flags, so that
@ -332,7 +347,7 @@ js_Execute(JSContext *cx, JSObject *chain, JSScript *script,
JSStackFrame *down, uintN flags, jsval *result);
extern JS_REQUIRES_STACK JSBool
js_InvokeConstructor(JSContext *cx, uintN argc, JSBool clampReturn, jsval *vp);
js_InvokeConstructor(JSContext *cx, const js::InvokeArgsGuard &args, JSBool clampReturn);
extern JS_REQUIRES_STACK JSBool
js_Interpret(JSContext *cx);
@ -386,12 +401,6 @@ js_GetUpvar(JSContext *cx, uintN level, uintN cookie);
#else
# define JS_STATIC_INTERPRET
extern JS_REQUIRES_STACK jsval *
js_AllocRawStack(JSContext *cx, uintN nslots, void **markp);
extern JS_REQUIRES_STACK void
js_FreeRawStack(JSContext *cx, void *mark);
/*
* ECMA requires "the global object", but in embeddings such as the browser,
* which have multiple top-level objects (windows, frames, etc. in the DOM),

Просмотреть файл

@ -729,19 +729,17 @@ generator_trace(JSTracer *trc, JSObject *obj)
return;
/*
* js_TraceStackFrame does not recursively trace the down-linked frame
* chain, so we insist that gen->frame has no parent to trace when the
* generator is not running.
* Do not mark if the generator is running; the contents may be trash and
* will be replaced when the generator stops.
*/
JS_ASSERT_IF(gen->state != JSGEN_RUNNING && gen->state != JSGEN_CLOSING,
!gen->frame.down);
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING)
return;
/*
* FIXME be 390950. Generator's frame is a part of the JS stack when the
* generator is running or closing. Thus tracing the frame in this case
* here duplicates the work done in js_TraceContext.
*/
js_TraceStackFrame(trc, &gen->frame);
JSStackFrame *fp = gen->getFloatingFrame();
JS_ASSERT(gen->getLiveFrame() == fp);
TraceValues(trc, gen->floatingStack, fp->argEnd(), "generator slots");
js_TraceStackFrame(trc, fp);
TraceValues(trc, fp->slots(), gen->savedRegs.sp, "generator slots");
}
JSExtendedClass js_GeneratorClass = {
@ -771,87 +769,83 @@ JSExtendedClass js_GeneratorClass = {
JS_REQUIRES_STACK JSObject *
js_NewGenerator(JSContext *cx)
{
JSObject *obj;
uintN argc, nargs, nslots;
JSGenerator *gen;
jsval *slots;
obj = NewObject(cx, &js_GeneratorClass.base, NULL, NULL);
JSObject *obj = NewObject(cx, &js_GeneratorClass.base, NULL, NULL);
if (!obj)
return NULL;
/* Load and compute stack slot counts. */
JSStackFrame *fp = cx->fp;
argc = fp->argc;
nargs = JS_MAX(argc, fp->fun->nargs);
nslots = 2 + nargs + fp->script->nslots;
uintN argc = fp->argc;
uintN nargs = JS_MAX(argc, fp->fun->nargs);
uintN vplen = 2 + nargs;
/* Allocate obj's private data struct. */
gen = (JSGenerator *)
cx->malloc(sizeof(JSGenerator) + (nslots - 1) * sizeof(jsval));
/* Compute JSGenerator size. */
uintN nbytes = sizeof(JSGenerator) +
(-1 + /* one jsval included in JSGenerator */
vplen +
VALUES_PER_STACK_FRAME +
fp->script->nslots) * sizeof(jsval);
JSGenerator *gen = (JSGenerator *) cx->malloc(nbytes);
if (!gen)
return NULL;
gen->obj = obj;
/* Cut up floatingStack space. */
jsval *vp = gen->floatingStack;
JSStackFrame *newfp = reinterpret_cast<JSStackFrame *>(vp + vplen);
jsval *slots = newfp->slots();
/* Steal away objects reflecting fp and point them at gen->frame. */
gen->frame.callobj = fp->callobj;
if (fp->callobj) {
fp->callobj->setPrivate(&gen->frame);
/* Initialize JSGenerator. */
gen->obj = obj;
gen->state = JSGEN_NEWBORN;
gen->savedRegs.pc = fp->regs->pc;
JS_ASSERT(fp->regs->sp == fp->slots() + fp->script->nfixed);
gen->savedRegs.sp = slots + fp->script->nfixed;
gen->vplen = vplen;
gen->liveFrame = newfp;
/* Copy generator's stack frame copy in from |cx->fp|. */
newfp->regs = &gen->savedRegs;
newfp->imacpc = NULL;
newfp->callobj = fp->callobj;
if (fp->callobj) { /* Steal call object. */
fp->callobj->setPrivate(newfp);
fp->callobj = NULL;
}
gen->frame.argsobj = fp->argsobj;
if (fp->argsobj) {
JSVAL_TO_OBJECT(fp->argsobj)->setPrivate(&gen->frame);
newfp->argsobj = fp->argsobj;
if (fp->argsobj) { /* Steal args object. */
JSVAL_TO_OBJECT(fp->argsobj)->setPrivate(newfp);
fp->argsobj = NULL;
}
/* These two references can be shared with fp until it goes away. */
gen->frame.thisv = fp->thisv;
/* Copy call-invariant script and function references. */
gen->frame.script = fp->script;
gen->frame.fun = fp->fun;
/* Use slots to carve space out of gen->slots. */
slots = gen->slots;
gen->arena.next = NULL;
gen->arena.base = (jsuword) slots;
gen->arena.limit = gen->arena.avail = (jsuword) (slots + nslots);
/* Copy rval, argv and vars. */
gen->frame.rval = fp->rval;
memcpy(slots, fp->argv - 2, (2 + nargs) * sizeof(jsval));
gen->frame.argc = fp->argc;
gen->frame.argv = slots + 2;
slots += 2 + nargs;
memcpy(slots, fp->slots, fp->script->nfixed * sizeof(jsval));
/* Initialize or copy virtual machine state. */
gen->frame.down = NULL;
gen->frame.annotation = NULL;
gen->frame.scopeChain = fp->scopeChain;
gen->frame.imacpc = NULL;
gen->frame.slots = slots;
JS_ASSERT(StackBase(fp) == fp->regs->sp);
gen->savedRegs.sp = slots + fp->script->nfixed;
gen->savedRegs.pc = fp->regs->pc;
gen->frame.regs = &gen->savedRegs;
gen->frame.flags = (fp->flags & ~JSFRAME_ROOTED_ARGV) | JSFRAME_GENERATOR;
/* JSOP_GENERATOR appears in the prologue, outside all blocks. */
newfp->script = fp->script;
newfp->fun = fp->fun;
newfp->thisv = fp->thisv;
newfp->argc = fp->argc;
newfp->argv = vp + 2;
newfp->rval = fp->rval;
newfp->annotation = NULL;
newfp->scopeChain = fp->scopeChain;
JS_ASSERT(!fp->blockChain);
gen->frame.blockChain = NULL;
newfp->blockChain = NULL;
newfp->flags = fp->flags | JSFRAME_GENERATOR | JSFRAME_FLOATING_GENERATOR;
/* Note that gen is newborn. */
gen->state = JSGEN_NEWBORN;
/* Copy in arguments and slots. */
memcpy(vp, fp->argv - 2, vplen * sizeof(jsval));
memcpy(slots, fp->slots(), fp->script->nfixed * sizeof(jsval));
obj->setPrivate(gen);
return obj;
}
JSGenerator *
js_FloatingFrameToGenerator(JSStackFrame *fp)
{
JS_ASSERT(fp->isGenerator() && fp->isFloatingGenerator());
char *floatingStackp = (char *)(fp->argv - 2);
char *p = floatingStackp - offsetof(JSGenerator, floatingStack);
return reinterpret_cast<JSGenerator *>(p);
}
typedef enum JSGeneratorOp {
JSGENOP_NEXT,
JSGENOP_SEND,
@ -867,17 +861,17 @@ static JS_REQUIRES_STACK JSBool
SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
JSGenerator *gen, jsval arg)
{
JSStackFrame *fp;
JSArena *arena;
JSBool ok;
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING) {
js_ReportValueError(cx, JSMSG_NESTING_GENERATOR,
JSDVG_SEARCH_STACK, OBJECT_TO_JSVAL(obj),
JS_GetFunctionId(gen->frame.fun));
JS_GetFunctionId(gen->getFloatingFrame()->fun));
return JS_FALSE;
}
/* Check for OOM errors here, where we can fail easily. */
if (!cx->ensureGeneratorStackSpace())
return JS_FALSE;
JS_ASSERT(gen->state == JSGEN_NEWBORN || gen->state == JSGEN_OPEN);
switch (op) {
case JSGENOP_NEXT:
@ -904,40 +898,94 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
break;
}
/* Extend the current stack pool with gen->arena. */
arena = cx->stackPool.current;
JS_ASSERT(!arena->next);
JS_ASSERT(!gen->arena.next);
JS_ASSERT(cx->stackPool.current != &gen->arena);
cx->stackPool.current = arena->next = &gen->arena;
JSStackFrame *genfp = gen->getFloatingFrame();
JSBool ok;
{
jsval *genVp = gen->floatingStack;
uintN vplen = gen->vplen;
uintN nfixed = genfp->script->nslots;
/* Push gen->frame around the interpreter activation. */
fp = js_GetTopStackFrame(cx);
cx->fp = &gen->frame;
gen->frame.down = fp;
ok = js_Interpret(cx);
cx->fp = fp;
gen->frame.down = NULL;
/*
* Get a pointer to new frame/slots. This memory is not "claimed", so
* the code before pushExecuteFrame must not reenter the interpreter.
*/
ExecuteFrameGuard frame;
if (!cx->stack().getExecuteFrame(cx, cx->fp, vplen, nfixed, frame)) {
gen->state = JSGEN_CLOSED;
return JS_FALSE;
}
/* Retract the stack pool and sanitize gen->arena. */
JS_ASSERT(!gen->arena.next);
JS_ASSERT(arena->next == &gen->arena);
JS_ASSERT(cx->stackPool.current == &gen->arena);
cx->stackPool.current = arena;
arena->next = NULL;
jsval *vp = frame.getvp();
JSStackFrame *fp = frame.getFrame();
if (gen->frame.flags & JSFRAME_YIELDING) {
/*
* Copy and rebase stack frame/args/slots. The "floating" flag must
* only be set on the generator's frame. See args_or_call_trace.
*/
uintN usedBefore = gen->savedRegs.sp - genVp;
memcpy(vp, genVp, usedBefore * sizeof(jsval));
fp->flags &= ~JSFRAME_FLOATING_GENERATOR;
fp->argv = vp + 2;
fp->regs = &gen->savedRegs;
gen->savedRegs.sp = fp->slots() + (gen->savedRegs.sp - genfp->slots());
JS_ASSERT(uintN(gen->savedRegs.sp - fp->slots()) <= fp->script->nslots);
#ifdef DEBUG
JSObject *callobjBefore = fp->callobj;
jsval argsobjBefore = fp->argsobj;
#endif
/*
* Repoint Call, Arguments, Block and With objects to the new live
* frame. Call and Arguments are done directly because we have
* pointers to them. Block and With objects are done indirectly through
* 'liveFrame'. See js_LiveFrameToFloating comment in jsiter.h.
*/
if (genfp->callobj)
fp->callobj->setPrivate(fp);
if (genfp->argsobj)
JSVAL_TO_OBJECT(fp->argsobj)->setPrivate(fp);
gen->liveFrame = fp;
(void)cx->enterGenerator(gen); /* OOM check above. */
/* Officially push |fp|. |frame|'s destructor pops. */
cx->stack().pushExecuteFrame(cx, frame, NULL);
ok = js_Interpret(cx);
/* Restore call/args/block objects. */
cx->leaveGenerator(gen);
gen->liveFrame = genfp;
if (fp->argsobj)
JSVAL_TO_OBJECT(fp->argsobj)->setPrivate(genfp);
if (fp->callobj)
fp->callobj->setPrivate(genfp);
JS_ASSERT_IF(argsobjBefore, argsobjBefore == fp->argsobj);
JS_ASSERT_IF(callobjBefore, callobjBefore == fp->callobj);
/* Copy and rebase stack frame/args/slots. Restore "floating" flag. */
JS_ASSERT(uintN(gen->savedRegs.sp - fp->slots()) <= fp->script->nslots);
uintN usedAfter = gen->savedRegs.sp - vp;
memcpy(genVp, vp, usedAfter * sizeof(jsval));
genfp->flags |= JSFRAME_FLOATING_GENERATOR;
genfp->argv = genVp + 2;
gen->savedRegs.sp = genfp->slots() + (gen->savedRegs.sp - fp->slots());
JS_ASSERT(uintN(gen->savedRegs.sp - genfp->slots()) <= genfp->script->nslots);
}
if (gen->getFloatingFrame()->flags & JSFRAME_YIELDING) {
/* Yield cannot fail, throw or be called on closing. */
JS_ASSERT(ok);
JS_ASSERT(!cx->throwing);
JS_ASSERT(gen->state == JSGEN_RUNNING);
JS_ASSERT(op != JSGENOP_CLOSE);
gen->frame.flags &= ~JSFRAME_YIELDING;
genfp->flags &= ~JSFRAME_YIELDING;
gen->state = JSGEN_OPEN;
return JS_TRUE;
}
gen->frame.rval = JSVAL_VOID;
genfp->rval = JSVAL_VOID;
gen->state = JSGEN_CLOSED;
if (ok) {
/* Returned, explicitly or by falling off the end. */
@ -1030,7 +1078,7 @@ generator_op(JSContext *cx, JSGeneratorOp op, jsval *vp, uintN argc)
: JSVAL_VOID;
if (!SendToGenerator(cx, op, obj, gen, arg))
return JS_FALSE;
*vp = gen->frame.rval;
*vp = gen->getFloatingFrame()->rval;
return JS_TRUE;
}

Просмотреть файл

@ -124,18 +124,57 @@ typedef enum JSGeneratorState {
struct JSGenerator {
JSObject *obj;
JSGeneratorState state;
JSStackFrame frame;
JSFrameRegs savedRegs;
JSArena arena;
jsval slots[1];
};
uintN vplen;
JSStackFrame *liveFrame;
jsval floatingStack[1];
#define FRAME_TO_GENERATOR(fp) \
((JSGenerator *) ((uint8 *)(fp) - offsetof(JSGenerator, frame)))
JSStackFrame *getFloatingFrame() {
return reinterpret_cast<JSStackFrame *>(floatingStack + vplen);
}
JSStackFrame *getLiveFrame() {
JS_ASSERT((state == JSGEN_RUNNING || state == JSGEN_CLOSING) ==
(liveFrame != getFloatingFrame()));
return liveFrame;
}
};
extern JSObject *
js_NewGenerator(JSContext *cx);
/*
* Generator stack frames do not have stable pointers since they get copied to
* and from the generator object and the stack (see SendToGenerator). This is a
* problem for Block and With objects, which need to store a pointer to the
* enclosing stack frame. The solution is for Block and With objects to store
* a pointer to the "floating" stack frame stored in the generator object,
* since it is stable, and maintain, in the generator object, a pointer to the
* "live" stack frame (either a copy on the stack or the floating frame). Thus,
* Block and With objects must "normalize" to and from the floating/live frames
* in the case of generators using the following functions.
*/
inline JSStackFrame *
js_FloatingFrameIfGenerator(JSContext *cx, JSStackFrame *fp)
{
JS_ASSERT(cx->stack().contains(fp));
if (JS_UNLIKELY(fp->isGenerator()))
return cx->generatorFor(fp)->getFloatingFrame();
return fp;
}
/* Given a floating frame, given the JSGenerator containing it. */
extern JSGenerator *
js_FloatingFrameToGenerator(JSStackFrame *fp);
inline JSStackFrame *
js_LiveFrameIfGenerator(JSStackFrame *fp)
{
if (fp->flags & JSFRAME_GENERATOR)
return js_FloatingFrameToGenerator(fp)->getLiveFrame();
return fp;
}
#endif
extern JSExtendedClass js_GeneratorClass;

Просмотреть файл

@ -2991,7 +2991,7 @@ js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth)
obj = NewObject(cx, &js_WithClass, proto, parent);
if (!obj)
return NULL;
obj->setPrivate(cx->fp);
obj->setPrivate(js_FloatingFrameIfGenerator(cx, cx->fp));
OBJ_SET_BLOCK_DEPTH(cx, obj, depth);
return obj;
}
@ -3019,7 +3019,8 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp)
return NULL;
/* The caller sets parent on its own. */
clone->init(&js_BlockClass, proto, NULL, reinterpret_cast<jsval>(fp));
jsval priv = (jsval)js_FloatingFrameIfGenerator(cx, fp);
clone->init(&js_BlockClass, proto, NULL, priv);
clone->fslots[JSSLOT_BLOCK_DEPTH] = proto->fslots[JSSLOT_BLOCK_DEPTH];
JS_ASSERT(cx->runtime->emptyBlockScope->freeslot == JSSLOT_BLOCK_DEPTH + 1);
@ -3042,7 +3043,7 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
fp = cx->fp;
obj = fp->scopeChain;
JS_ASSERT(obj->getClass() == &js_BlockClass);
JS_ASSERT(obj->getPrivate() == cx->fp);
JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp));
JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj));
/*
@ -3066,14 +3067,14 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
JS_ASSERT(count >= 1);
depth += fp->script->nfixed;
obj->fslots[JSSLOT_BLOCK_DEPTH + 1] = fp->slots[depth];
obj->fslots[JSSLOT_BLOCK_DEPTH + 1] = fp->slots()[depth];
if (normalUnwind && count > 1) {
--count;
JS_LOCK_OBJ(cx, obj);
if (!obj->allocSlots(cx, JS_INITIAL_NSLOTS + count))
normalUnwind = JS_FALSE;
else
memcpy(obj->dslots, fp->slots + depth + 1, count * sizeof(jsval));
memcpy(obj->dslots, fp->slots() + depth + 1, count * sizeof(jsval));
JS_UNLOCK_OBJ(cx, obj);
}
@ -3098,9 +3099,10 @@ block_getProperty(JSContext *cx, JSObject *obj, jsval id, jsval *vp)
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
if (fp) {
fp = js_LiveFrameIfGenerator(fp);
index += fp->script->nfixed + OBJ_BLOCK_DEPTH(cx, obj);
JS_ASSERT(index < fp->script->nslots);
*vp = fp->slots[index];
*vp = fp->slots()[index];
return true;
}
@ -3123,9 +3125,10 @@ block_setProperty(JSContext *cx, JSObject *obj, jsval id, jsval *vp)
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
if (fp) {
fp = js_LiveFrameIfGenerator(fp);
index += fp->script->nfixed + OBJ_BLOCK_DEPTH(cx, obj);
JS_ASSERT(index < fp->script->nslots);
fp->slots[index] = *vp;
fp->slots()[index] = *vp;
return true;
}
@ -6473,11 +6476,11 @@ js_DumpStackFrame(JSStackFrame *fp)
fprintf(stderr, "pc = %p\n", pc);
fprintf(stderr, " current op: %s\n", js_CodeName[*pc]);
}
if (sp && fp->slots) {
fprintf(stderr, " slots: %p\n", (void *) fp->slots);
fprintf(stderr, " sp: %p = slots + %u\n", (void *) sp, (unsigned) (sp - fp->slots));
if (sp - fp->slots < 10000) { // sanity
for (jsval *p = fp->slots; p < sp; p++) {
if (sp && fp->slots()) {
fprintf(stderr, " slots: %p\n", (void *) fp->slots());
fprintf(stderr, " sp: %p = slots + %u\n", (void *) sp, (unsigned) (sp - fp->slots()));
if (sp - fp->slots() < 10000) { // sanity
for (jsval *p = fp->slots(); p < sp; p++) {
fprintf(stderr, " %p: ", (void *) p);
dumpValue(*p);
fputc('\n', stderr);
@ -6485,7 +6488,7 @@ js_DumpStackFrame(JSStackFrame *fp)
}
} else {
fprintf(stderr, " sp: %p\n", (void *) sp);
fprintf(stderr, " slots: %p\n", (void *) fp->slots);
fprintf(stderr, " slots: %p\n", (void *) fp->slots());
}
fprintf(stderr, " argv: %p (argc: %u)\n", (void *) fp->argv, (unsigned) fp->argc);
MaybeDumpObject("callobj", fp->callobj);
@ -6508,8 +6511,6 @@ js_DumpStackFrame(JSStackFrame *fp)
fprintf(stderr, " debugger");
if (fp->flags & JSFRAME_EVAL)
fprintf(stderr, " eval");
if (fp->flags & JSFRAME_ROOTED_ARGV)
fprintf(stderr, " rooted_argv");
if (fp->flags & JSFRAME_YIELDING)
fprintf(stderr, " yielding");
if (fp->flags & JSFRAME_ITERATOR)

Просмотреть файл

@ -149,7 +149,7 @@ BEGIN_CASE(JSOP_POPN)
clasp = obj->getClass();
if (clasp != &js_BlockClass && clasp != &js_WithClass)
continue;
if (obj->getPrivate() != fp)
if (obj->getPrivate() != js_FloatingFrameIfGenerator(cx, fp))
break;
JS_ASSERT(StackBase(fp) + OBJ_BLOCK_DEPTH(cx, obj)
+ ((clasp == &js_BlockClass)
@ -207,7 +207,7 @@ BEGIN_CASE(JSOP_STOP)
* current frame.
*/
JS_ASSERT(op == JSOP_STOP);
JS_ASSERT((uintN)(regs.sp - fp->slots) <= script->nslots);
JS_ASSERT((uintN)(regs.sp - fp->slots()) <= script->nslots);
regs.pc = fp->imacpc + js_CodeSpec[*fp->imacpc].length;
fp->imacpc = NULL;
atoms = script->atomMap.vector;
@ -222,16 +222,15 @@ BEGIN_CASE(JSOP_STOP)
if (inlineCallCount)
inline_return:
{
JSInlineFrame *ifp = (JSInlineFrame *) fp;
void *hookData = ifp->hookData;
JS_ASSERT(!fp->blockChain);
JS_ASSERT(!js_IsActiveWithOrBlock(cx, fp->scopeChain, 0));
JS_ASSERT(fp->down->regs == &fp->callerRegs);
if (script->staticLevel < JS_DISPLAY_SIZE)
if (JS_LIKELY(script->staticLevel < JS_DISPLAY_SIZE))
cx->display[script->staticLevel] = fp->displaySave;
if (hookData) {
void *hookData = fp->hookData;
if (JS_UNLIKELY(hookData != NULL)) {
JSInterpreterHook hook;
JSBool status;
@ -260,7 +259,7 @@ BEGIN_CASE(JSOP_STOP)
/* Restore context version only if callee hasn't set version. */
if (JS_LIKELY(cx->version == currentVersion)) {
currentVersion = ifp->callerVersion;
currentVersion = fp->callerVersion;
if (currentVersion != cx->version)
js_SetVersion(cx, currentVersion);
}
@ -275,22 +274,20 @@ BEGIN_CASE(JSOP_STOP)
JS_RUNTIME_METER(cx->runtime, constructs);
}
JSStackFrame *down = fp->down;
bool recursive = fp->script == down->script;
/* Restore caller's registers. */
regs = ifp->callerRegs;
/* Store the return value in the caller's operand frame. */
regs.sp -= 1 + (size_t) ifp->frame.argc;
regs = fp->callerRegs;
regs.sp -= 1 + (size_t) fp->argc;
regs.sp[-1] = fp->rval;
down->regs = &regs;
bool recursive = fp->script == fp->down->script;
/* Pop |fp| from the context. */
cx->stack().popInlineFrame(cx, fp, down);
/* Restore cx->fp and release the inline frame's space. */
cx->fp = fp = fp->down;
JS_ASSERT(fp->regs == &ifp->callerRegs);
fp->regs = &regs;
JS_ARENA_RELEASE(&cx->stackPool, ifp->mark);
/* Restore the calling script's interpreter registers. */
/* Sync interpreter registers. */
fp = cx->fp;
script = fp->script;
atoms = FrameAtomBase(cx, fp);
@ -502,7 +499,7 @@ BEGIN_CASE(JSOP_FORLOCAL)
slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < fp->script->nslots);
JS_ASSERT(!JSVAL_IS_PRIMITIVE(regs.sp[-1]));
if (!IteratorNext(cx, JSVAL_TO_OBJECT(regs.sp[-1]), &fp->slots[slot]))
if (!IteratorNext(cx, JSVAL_TO_OBJECT(regs.sp[-1]), &fp->slots()[slot]))
goto error;
END_CASE(JSOP_FORLOCAL)
@ -1333,9 +1330,9 @@ BEGIN_CASE(JSOP_LOCALINC)
do_local_incop:
slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < fp->script->nslots);
vp = fp->slots + slot;
vp = fp->slots() + slot;
METER_SLOT_OP(op, slot);
vp = fp->slots + slot;
vp = fp->slots() + slot;
do_int_fast_incop:
rval = *vp;
@ -1381,7 +1378,7 @@ BEGIN_CASE(JSOP_GVARINC)
slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < GlobalVarCount(fp));
METER_SLOT_OP(op, slot);
lval = fp->slots[slot];
lval = fp->slots()[slot];
if (JSVAL_IS_NULL(lval)) {
op = op2;
DO_OP();
@ -1442,7 +1439,7 @@ BEGIN_CASE(JSOP_GETLOCALPROP)
i = SLOTNO_LEN;
slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < script->nslots);
PUSH_OPND(fp->slots[slot]);
PUSH_OPND(fp->slots()[slot]);
goto do_getprop_body;
BEGIN_CASE(JSOP_GETPROP)
@ -1629,7 +1626,7 @@ BEGIN_CASE(JSOP_CALLPROP)
END_CASE(JSOP_CALLPROP)
BEGIN_CASE(JSOP_UNBRAND)
JS_ASSERT(regs.sp - fp->slots >= 1);
JS_ASSERT(regs.sp - fp->slots() >= 1);
lval = FETCH_OPND(-1);
obj = JSVAL_TO_OBJECT(lval);
if (!obj->unbrand(cx))
@ -1996,7 +1993,7 @@ BEGIN_CASE(JSOP_NEW)
}
}
if (!js_InvokeConstructor(cx, argc, JS_FALSE, vp))
if (!js_InvokeConstructor(cx, InvokeArgsGuard(vp, argc), JS_FALSE))
goto error;
regs.sp = vp + 1;
CHECK_INTERRUPT_HANDLER();
@ -2021,148 +2018,100 @@ BEGIN_CASE(JSOP_APPLY)
if (FUN_INTERPRETED(fun))
inline_call:
{
uintN nframeslots, nvars, missing;
JSArena *a;
jsuword nbytes;
void *newmark;
jsval *newsp;
JSInlineFrame *newifp;
JSInterpreterHook hook;
script = fun->u.i.script;
if (script->isEmpty()) {
script = fp->script;
JSScript *newscript = fun->u.i.script;
if (JS_UNLIKELY(newscript->isEmpty())) {
*vp = JSVAL_VOID;
regs.sp = vp + 1;
goto end_call;
}
/* Restrict recursion of lightweight functions. */
if (inlineCallCount >= JS_MAX_INLINE_CALL_COUNT) {
if (JS_UNLIKELY(inlineCallCount >= JS_MAX_INLINE_CALL_COUNT)) {
js_ReportOverRecursed(cx);
script = fp->script;
goto error;
}
/* Compute the total number of stack slots needed by fun. */
nframeslots = JS_HOWMANY(sizeof(JSInlineFrame), sizeof(jsval));
atoms = script->atomMap.vector;
nbytes = (nframeslots + script->nslots) * sizeof(jsval);
/* Allocate missing expected args adjacent to actuals. */
a = cx->stackPool.current;
newmark = (void *) a->avail;
if (fun->nargs <= argc) {
missing = 0;
/*
* Get pointer to new frame/slots, without changing global state.
* Initialize missing args if there are any.
*/
StackSpace &stack = cx->stack();
uintN nfixed = newscript->nslots;
uintN funargs = fun->nargs;
JSStackFrame *newfp;
if (argc < funargs) {
uintN missing = funargs - argc;
newfp = stack.getInlineFrame(cx, regs.sp, missing, nfixed);
if (!newfp)
goto error;
for (jsval *v = regs.sp, *end = v + missing; v != end; ++v)
*v = JSVAL_VOID;
} else {
newsp = vp + 2 + fun->nargs;
JS_ASSERT(newsp > regs.sp);
if ((jsuword) newsp <= a->limit) {
if ((jsuword) newsp > a->avail)
a->avail = (jsuword) newsp;
jsval *argsp = newsp;
do {
*--argsp = JSVAL_VOID;
} while (argsp != regs.sp);
missing = 0;
} else {
missing = fun->nargs - argc;
nbytes += (2 + fun->nargs) * sizeof(jsval);
}
newfp = stack.getInlineFrame(cx, regs.sp, 0, nfixed);
if (!newfp)
goto error;
}
/* Allocate the inline frame with its slots and operands. */
if (a->avail + nbytes <= a->limit) {
newsp = (jsval *) a->avail;
a->avail += nbytes;
JS_ASSERT(missing == 0);
} else {
JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool,
nbytes);
if (!newsp) {
js_ReportOutOfScriptQuota(cx);
goto bad_inline_call;
}
/*
* Move args if the missing ones overflow arena a, then push
* undefined for the missing args.
*/
if (missing) {
memcpy(newsp, vp, (2 + argc) * sizeof(jsval));
vp = newsp;
newsp = vp + 2 + argc;
do {
*newsp++ = JSVAL_VOID;
} while (--missing != 0);
}
/* Initialize stack frame. */
newfp->callobj = NULL;
newfp->argsobj = NULL;
newfp->script = newscript;
newfp->fun = fun;
newfp->argc = argc;
newfp->argv = vp + 2;
newfp->rval = JSVAL_VOID;
newfp->annotation = NULL;
newfp->scopeChain = parent = obj->getParent();
newfp->flags = flags;
newfp->blockChain = NULL;
if (JS_LIKELY(newscript->staticLevel < JS_DISPLAY_SIZE)) {
JSStackFrame **disp = &cx->display[newscript->staticLevel];
newfp->displaySave = *disp;
*disp = newfp;
}
/* Claim space for the stack frame and initialize it. */
newifp = (JSInlineFrame *) newsp;
newsp += nframeslots;
newifp->frame.callobj = NULL;
newifp->frame.argsobj = NULL;
newifp->frame.script = script;
newifp->frame.fun = fun;
newifp->frame.argc = argc;
newifp->frame.argv = vp + 2;
newifp->frame.rval = JSVAL_VOID;
newifp->frame.down = fp;
newifp->frame.annotation = NULL;
newifp->frame.scopeChain = parent = obj->getParent();
newifp->frame.flags = flags;
newifp->frame.blockChain = NULL;
if (script->staticLevel < JS_DISPLAY_SIZE) {
JSStackFrame **disp = &cx->display[script->staticLevel];
newifp->frame.displaySave = *disp;
*disp = &newifp->frame;
}
newifp->mark = newmark;
/* Compute the 'this' parameter now that argv is set. */
JS_ASSERT(!JSFUN_BOUND_METHOD_TEST(fun->flags));
newifp->frame.thisv = vp[1];
newifp->frame.regs = NULL;
newifp->frame.imacpc = NULL;
newifp->frame.slots = newsp;
newfp->thisv = vp[1];
newfp->regs = NULL;
newfp->imacpc = NULL;
/* Push void to initialize local variables. */
nvars = fun->u.i.nvars;
while (nvars--)
*newsp++ = JSVAL_VOID;
jsval *newsp = StackBase(newfp);
for (jsval *v = newfp->slots(); v != newsp; ++v)
*v = JSVAL_VOID;
/* Scope with a call object parented by callee's parent. */
if (JSFUN_HEAVYWEIGHT_TEST(fun->flags) &&
!js_GetCallObject(cx, &newifp->frame)) {
goto bad_inline_call;
}
if (fun->isHeavyweight() && !js_GetCallObject(cx, newfp))
goto error;
/* Switch version if currentVersion wasn't overridden. */
newifp->callerVersion = (JSVersion) cx->version;
newfp->callerVersion = (JSVersion) cx->version;
if (JS_LIKELY(cx->version == currentVersion)) {
currentVersion = (JSVersion) script->version;
if (currentVersion != cx->version)
currentVersion = (JSVersion) newscript->version;
if (JS_UNLIKELY(currentVersion != cx->version))
js_SetVersion(cx, currentVersion);
}
/* Push the frame and set interpreter registers. */
newifp->callerRegs = regs;
fp->regs = &newifp->callerRegs;
newfp->callerRegs = regs;
fp->regs = &newfp->callerRegs;
regs.sp = newsp;
regs.pc = script->code;
newifp->frame.regs = &regs;
cx->fp = fp = &newifp->frame;
regs.pc = newscript->code;
newfp->regs = &regs;
stack.pushInlineFrame(cx, fp, newfp);
JS_ASSERT(newfp == cx->fp);
/* Import into locals. */
fp = newfp;
script = newscript;
atoms = script->atomMap.vector;
/* Call the debugger hook if present. */
hook = cx->debugHooks->callHook;
if (hook) {
newifp->hookData = hook(cx, &newifp->frame, JS_TRUE, 0,
cx->debugHooks->callHookData);
if (JSInterpreterHook hook = cx->debugHooks->callHook) {
fp->hookData = hook(cx, fp, JS_TRUE, 0,
cx->debugHooks->callHookData);
CHECK_INTERRUPT_HANDLER();
} else {
newifp->hookData = NULL;
fp->hookData = NULL;
}
inlineCallCount++;
@ -2192,13 +2141,6 @@ BEGIN_CASE(JSOP_APPLY)
/* Load first op and dispatch it (safe since JSOP_STOP). */
op = (JSOp) *regs.pc;
DO_OP();
bad_inline_call:
JS_ASSERT(fp->regs == &regs);
script = fp->script;
atoms = script->atomMap.vector;
js_FreeRawStack(cx, newmark);
goto error;
}
if (fun->flags & JSFUN_FAST_NATIVE) {
@ -2217,7 +2159,7 @@ BEGIN_CASE(JSOP_APPLY)
}
}
ok = js_Invoke(cx, argc, vp, 0);
ok = js_Invoke(cx, InvokeArgsGuard(vp, argc), 0);
regs.sp = vp + 1;
CHECK_INTERRUPT_HANDLER();
if (!ok)
@ -2231,7 +2173,7 @@ END_CASE(JSOP_CALL)
BEGIN_CASE(JSOP_SETCALL)
argc = GET_ARGC(regs.pc);
vp = regs.sp - argc - 2;
if (js_Invoke(cx, argc, vp, 0))
if (js_Invoke(cx, InvokeArgsGuard(vp, argc), 0))
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_LEFTSIDE_OF_ASS);
goto error;
END_CASE(JSOP_SETCALL)
@ -2598,20 +2540,20 @@ END_SET_CASE(JSOP_SETARG)
BEGIN_CASE(JSOP_GETLOCAL)
slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < script->nslots);
PUSH_OPND(fp->slots[slot]);
PUSH_OPND(fp->slots()[slot]);
END_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_CALLLOCAL)
slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < script->nslots);
PUSH_OPND(fp->slots[slot]);
PUSH_OPND(fp->slots()[slot]);
PUSH_OPND(JSVAL_NULL);
END_CASE(JSOP_CALLLOCAL)
BEGIN_CASE(JSOP_SETLOCAL)
slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < script->nslots);
vp = &fp->slots[slot];
vp = &fp->slots()[slot];
*vp = FETCH_OPND(-1);
END_SET_CASE(JSOP_SETLOCAL)
@ -2690,7 +2632,7 @@ BEGIN_CASE(JSOP_CALLGVAR)
slot = GET_SLOTNO(regs.pc);
JS_ASSERT(slot < GlobalVarCount(fp));
METER_SLOT_OP(op, slot);
lval = fp->slots[slot];
lval = fp->slots()[slot];
if (JSVAL_IS_NULL(lval)) {
op = (op == JSOP_GETGVAR) ? JSOP_NAME : JSOP_CALLNAME;
DO_OP();
@ -2711,7 +2653,7 @@ BEGIN_CASE(JSOP_SETGVAR)
rval = FETCH_OPND(-1);
JS_ASSERT(fp->varobj(cx) == cx->activeCallStack()->getInitialVarObj());
obj = cx->activeCallStack()->getInitialVarObj();
lval = fp->slots[slot];
lval = fp->slots()[slot];
if (JSVAL_IS_NULL(lval)) {
/*
* Inline-clone and deoptimize JSOP_SETNAME code here because
@ -2803,7 +2745,7 @@ BEGIN_CASE(JSOP_DEFVAR)
* The atom index for the global's name literal is identical to its
* variable index.
*/
fp->slots[index] = INT_TO_JSVAL(sprop->slot);
fp->slots()[index] = INT_TO_JSVAL(sprop->slot);
}
}
@ -3045,7 +2987,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN)
slot = GET_SLOTNO(regs.pc);
TRACE_2(DefLocalFunSetSlot, slot, obj);
fp->slots[slot] = OBJECT_TO_JSVAL(obj);
fp->slots()[slot] = OBJECT_TO_JSVAL(obj);
END_CASE(JSOP_DEFLOCALFUN)
BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
@ -3058,7 +3000,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN_FC)
slot = GET_SLOTNO(regs.pc);
TRACE_2(DefLocalFunSetSlot, slot, obj);
fp->slots[slot] = OBJECT_TO_JSVAL(obj);
fp->slots()[slot] = OBJECT_TO_JSVAL(obj);
END_CASE(JSOP_DEFLOCALFUN_FC)
BEGIN_CASE(JSOP_DEFLOCALFUN_DBGFC)
@ -3069,7 +3011,7 @@ BEGIN_CASE(JSOP_DEFLOCALFUN_DBGFC)
goto error;
slot = GET_SLOTNO(regs.pc);
fp->slots[slot] = OBJECT_TO_JSVAL(obj);
fp->slots()[slot] = OBJECT_TO_JSVAL(obj);
END_CASE(JSOP_DEFLOCALFUN_DBGFC)
BEGIN_CASE(JSOP_LAMBDA)
@ -3444,7 +3386,7 @@ END_CASE(JSOP_INITELEM)
BEGIN_CASE(JSOP_DEFSHARP)
slot = GET_UINT16(regs.pc);
JS_ASSERT(slot + 1 < fp->script->nfixed);
lval = fp->slots[slot];
lval = fp->slots()[slot];
if (!JSVAL_IS_PRIMITIVE(lval)) {
obj = JSVAL_TO_OBJECT(lval);
} else {
@ -3452,7 +3394,7 @@ BEGIN_CASE(JSOP_DEFSHARP)
obj = js_NewArrayObject(cx, 0, NULL);
if (!obj)
goto error;
fp->slots[slot] = OBJECT_TO_JSVAL(obj);
fp->slots()[slot] = OBJECT_TO_JSVAL(obj);
}
i = (jsint) GET_UINT16(regs.pc + UINT16_LEN);
id = INT_TO_JSID(i);
@ -3471,12 +3413,12 @@ END_CASE(JSOP_DEFSHARP)
BEGIN_CASE(JSOP_USESHARP)
slot = GET_UINT16(regs.pc);
JS_ASSERT(slot + 1 < fp->script->nfixed);
lval = fp->slots[slot];
lval = fp->slots()[slot];
i = (jsint) GET_UINT16(regs.pc + UINT16_LEN);
if (JSVAL_IS_VOID(lval)) {
rval = JSVAL_VOID;
} else {
obj = JSVAL_TO_OBJECT(fp->slots[slot]);
obj = JSVAL_TO_OBJECT(fp->slots()[slot]);
id = INT_TO_JSID(i);
if (!obj->getProperty(cx, id, &rval))
goto error;
@ -3495,7 +3437,7 @@ END_CASE(JSOP_USESHARP)
BEGIN_CASE(JSOP_SHARPINIT)
slot = GET_UINT16(regs.pc);
JS_ASSERT(slot + 1 < fp->script->nfixed);
vp = &fp->slots[slot];
vp = &fp->slots()[slot];
rval = vp[1];
/*
@ -3585,7 +3527,7 @@ BEGIN_CASE(JSOP_SETLOCALPOP)
JS_ASSERT((size_t) (regs.sp - StackBase(fp)) >= 2);
slot = GET_UINT16(regs.pc);
JS_ASSERT(slot + 1 < script->nslots);
fp->slots[slot] = POP_OPND();
fp->slots()[slot] = POP_OPND();
END_CASE(JSOP_SETLOCALPOP)
BEGIN_CASE(JSOP_IFPRIMTOP)
@ -3902,7 +3844,7 @@ BEGIN_CASE(JSOP_ENTERBLOCK)
JS_ASSERT(StackBase(fp) + OBJ_BLOCK_DEPTH(cx, obj) == regs.sp);
vp = regs.sp + OBJ_BLOCK_COUNT(cx, obj);
JS_ASSERT(regs.sp < vp);
JS_ASSERT(vp <= fp->slots + script->nslots);
JS_ASSERT(vp <= fp->slots() + script->nslots);
while (regs.sp < vp) {
STORE_OPND(0, JSVAL_VOID);
regs.sp++;
@ -3922,7 +3864,7 @@ BEGIN_CASE(JSOP_ENTERBLOCK)
while ((clasp = obj2->getClass()) == &js_WithClass)
obj2 = obj2->getParent();
if (clasp == &js_BlockClass &&
obj2->getPrivate() == fp) {
obj2->getPrivate() == js_FloatingFrameIfGenerator(cx, fp)) {
JSObject *youngestProto = obj2->getProto();
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(youngestProto));
parent = obj;
@ -3987,7 +3929,7 @@ BEGIN_CASE(JSOP_GENERATOR)
BEGIN_CASE(JSOP_YIELD)
ASSERT_NOT_THROWING(cx);
if (FRAME_TO_GENERATOR(fp)->state == JSGEN_CLOSING) {
if (cx->generatorFor(fp)->state == JSGEN_CLOSING) {
js_ReportValueError(cx, JSMSG_BAD_GENERATOR_YIELD,
JSDVG_SEARCH_STACK, fp->argv[-2], NULL);
goto error;
@ -4002,7 +3944,7 @@ BEGIN_CASE(JSOP_ARRAYPUSH)
slot = GET_UINT16(regs.pc);
JS_ASSERT(script->nfixed <= slot);
JS_ASSERT(slot < script->nslots);
lval = fp->slots[slot];
lval = fp->slots()[slot];
obj = JSVAL_TO_OBJECT(lval);
rval = FETCH_OPND(-1);
if (!js_ArrayCompPush(cx, obj, rval))

Просмотреть файл

@ -151,8 +151,12 @@ extern "C++" {
namespace js {
class ExecuteArgsGuard;
class InvokeFrameGuard;
class InvokeArgsGuard;
class TraceRecorder;
struct TraceMonitor;
class TraceMonitor;
class StackSpace;
class CallStack;
class TokenStream;

Просмотреть файл

@ -245,7 +245,7 @@ TraceRecorder::upRecursion()
* Need to compute this from the down frame, since the stack could have
* moved on this one.
*/
fi->spdist = cx->fp->down->regs->sp - cx->fp->down->slots;
fi->spdist = cx->fp->down->regs->sp - cx->fp->down->slots();
JS_ASSERT(cx->fp->argc == cx->fp->down->argc);
fi->set_argc(uint16(cx->fp->argc), false);
fi->callerHeight = downPostSlots;
@ -599,12 +599,11 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
&fp->scopeChainVal,
&info);
/* vars */
LIns* slots_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, slots),
ACC_OTHER),
LIns* slots_ins = addName(lir->ins2(LIR_piadd, fp_ins, INS_CONSTWORD(sizeof(JSStackFrame))),
"slots");
for (unsigned i = 0; i < fp->script->nfixed; i++)
slurpSlot(lir->insLoad(LIR_ldp, slots_ins, i * sizeof(jsval), ACC_OTHER),
&fp->slots[i],
&fp->slots()[i],
&info);
/* stack vals */
unsigned nfixed = fp->script->nfixed;

Просмотреть файл

@ -78,6 +78,7 @@
#include "jsvector.h"
#include "jsversion.h"
#include "jscntxtinlines.h"
#include "jsobjinlines.h"
#include "jsstrinlines.h"
@ -1682,30 +1683,25 @@ str_search(JSContext *cx, uintN argc, jsval *vp)
struct ReplaceData
{
ReplaceData(JSContext *cx)
: g(cx), invokevp(NULL), cb(cx)
: g(cx), cb(cx)
{}
~ReplaceData() {
if (invokevp) {
/* If we set invokevp, we already left trace. */
VOUCH_HAVE_STACK();
js_FreeStack(g.cx(), invokevpMark);
}
bool argsPushed() const {
return args.getvp() != NULL;
}
JSString *str; /* 'this' parameter object as a string */
RegExpGuard g; /* regexp parameter object and private data */
JSObject *lambda; /* replacement function object or null */
JSString *repstr; /* replacement string */
jschar *dollar; /* null or pointer to first $ in repstr */
jschar *dollarEnd; /* limit pointer for js_strchr_limit */
jsint index; /* index in result of next replacement */
jsint leftIndex; /* left context index in str->chars */
JSSubString dollarStr; /* for "$$" InterpretDollar result */
bool calledBack; /* record whether callback has been called */
jsval *invokevp; /* reusable allocation from js_AllocStack */
void *invokevpMark; /* the mark to return */
JSCharBuffer cb; /* buffer built during DoMatch */
JSString *str; /* 'this' parameter object as a string */
RegExpGuard g; /* regexp parameter object and private data */
JSObject *lambda; /* replacement function object or null */
JSString *repstr; /* replacement string */
jschar *dollar; /* null or pointer to first $ in repstr */
jschar *dollarEnd; /* limit pointer for js_strchr_limit */
jsint index; /* index in result of next replacement */
jsint leftIndex; /* left context index in str->chars */
JSSubString dollarStr; /* for "$$" InterpretDollar result */
bool calledBack; /* record whether callback has been called */
InvokeArgsGuard args; /* arguments for lambda's js_Invoke call */
JSCharBuffer cb; /* buffer built during DoMatch */
};
static JSSubString *
@ -1816,17 +1812,13 @@ FindReplaceLength(JSContext *cx, ReplaceData &rdata, size_t *sizep)
uintN p = rdata.g.re()->parenCount;
uintN argc = 1 + p + 2;
if (!rdata.invokevp) {
rdata.invokevp = js_AllocStack(cx, 2 + argc, &rdata.invokevpMark);
if (!rdata.invokevp)
return false;
}
jsval* invokevp = rdata.invokevp;
if (!rdata.argsPushed() && !cx->stack().pushInvokeArgs(cx, argc, rdata.args))
return false;
PreserveRegExpStatics save(cx);
/* Push lambda and its 'this' parameter. */
jsval *sp = invokevp;
jsval *sp = rdata.args.getvp();
*sp++ = OBJECT_TO_JSVAL(lambda);
*sp++ = OBJECT_TO_JSVAL(lambda->getParent());
@ -1848,7 +1840,7 @@ FindReplaceLength(JSContext *cx, ReplaceData &rdata, size_t *sizep)
*sp++ = INT_TO_JSVAL((jsint)cx->regExpStatics.leftContext.length);
*sp++ = STRING_TO_JSVAL(rdata.str);
if (!js_Invoke(cx, argc, invokevp, 0))
if (!js_Invoke(cx, rdata.args, 0))
return false;
/*
@ -1856,7 +1848,7 @@ FindReplaceLength(JSContext *cx, ReplaceData &rdata, size_t *sizep)
* created by this js_ValueToString that would otherwise be GC-
* able, until we use rdata.repstr in DoReplace.
*/
repstr = js_ValueToString(cx, *invokevp);
repstr = js_ValueToString(cx, *rdata.args.getvp());
if (!repstr)
return false;

Просмотреть файл

@ -78,6 +78,7 @@
#include "jstypedarray.h"
#include "jsatominlines.h"
#include "jscntxtinlines.h"
#include "jspropertycacheinlines.h"
#include "jsobjinlines.h"
#include "jsscopeinlines.h"
@ -1754,7 +1755,7 @@ VisitFrameSlots(Visitor &visitor, unsigned depth, JSStackFrame *fp,
if (!visitor.visitStackSlots(&fp->scopeChainVal, 1, fp))
return false;
visitor.setStackSlotKind("var");
if (!visitor.visitStackSlots(fp->slots, fp->script->nfixed, fp))
if (!visitor.visitStackSlots(fp->slots(), fp->script->nfixed, fp))
return false;
}
visitor.setStackSlotKind("stack");
@ -2525,10 +2526,10 @@ TraceRecorder::nativeStackOffset(jsval* p) const
/*
* If it's not in a pending frame, it must be on the stack of the current
* frame above sp but below fp->slots + script->nslots.
* frame above sp but below fp->slots() + script->nslots.
*/
if (!visitor.stopped()) {
JS_ASSERT(size_t(p - cx->fp->slots) < cx->fp->script->nslots);
JS_ASSERT(size_t(p - cx->fp->slots()) < cx->fp->script->nslots);
offset += size_t(p - cx->fp->regs->sp) * sizeof(double);
}
return offset;
@ -3057,7 +3058,7 @@ GetUpvarArgOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDept
// For this traits type, 'slot' is an index into the local slots array.
struct UpvarVarTraits {
static jsval interp_get(JSStackFrame* fp, int32 slot) {
return fp->slots[slot];
return fp->slots()[slot];
}
static uint32 native_slot(uint32 argc, int32 slot) {
@ -3078,7 +3079,7 @@ GetUpvarVarOnTrace(JSContext* cx, uint32 upvarLevel, int32 slot, uint32 callDept
*/
struct UpvarStackTraits {
static jsval interp_get(JSStackFrame* fp, int32 slot) {
return fp->slots[slot + fp->script->nfixed];
return fp->slots()[slot + fp->script->nfixed];
}
static uint32 native_slot(uint32 argc, int32 slot) {
@ -3228,7 +3229,7 @@ struct VarClosureTraits
}
// See also UpvarVarTraits.
static inline jsval* slots(JSStackFrame* fp) { return fp->slots; }
static inline jsval* slots(JSStackFrame* fp) { return fp->slots(); }
static inline jsval* slots(JSObject* obj) {
// We know Call objects use dslots.
return obj->dslots + slot_offset(obj);
@ -3630,7 +3631,7 @@ TraceRecorder::attemptImport(jsval* p)
CountSlotsVisitor countVisitor(p);
VisitStackSlots(countVisitor, cx, callDepth);
if (countVisitor.stopped() || size_t(p - cx->fp->slots) < cx->fp->script->nslots)
if (countVisitor.stopped() || size_t(p - cx->fp->slots()) < cx->fp->script->nslots)
return get(p);
return NULL;
@ -5385,122 +5386,90 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee)
js_ReconstructStackDepth(cx, fp->script, fi.pc) ==
uintN(fi.spdist - fp->script->nfixed));
uintN nframeslots = JS_HOWMANY(sizeof(JSInlineFrame), sizeof(jsval));
JSScript* script = fun->u.i.script;
size_t nbytes = (nframeslots + script->nslots) * sizeof(jsval);
/* Code duplicated from inline_call: case in js_Interpret (FIXME). */
JSArena* a = cx->stackPool.current;
void* newmark = (void*) a->avail;
/* Simulate js_Interpret locals for when |cx->fp == fp|. */
JSScript* newscript = fun->u.i.script;
jsval* sp = fp->slots() + fi.spdist;
uintN argc = fi.get_argc();
jsval* vp = fp->slots + fi.spdist - (2 + argc);
uintN missing = 0;
jsval* newsp;
jsval* vp = sp - (2 + argc);
if (fun->nargs > argc) {
const JSFrameRegs& regs = *fp->regs;
newsp = vp + 2 + fun->nargs;
JS_ASSERT(newsp > regs.sp);
if ((jsuword) newsp <= a->limit) {
if ((jsuword) newsp > a->avail)
a->avail = (jsuword) newsp;
jsval* argsp = newsp;
do {
*--argsp = JSVAL_VOID;
} while (argsp != regs.sp);
missing = 0;
} else {
missing = fun->nargs - argc;
nbytes += (2 + fun->nargs) * sizeof(jsval);
}
}
/* Allocate the inline frame with its vars and operands. */
if (a->avail + nbytes <= a->limit) {
newsp = (jsval *) a->avail;
a->avail += nbytes;
JS_ASSERT(missing == 0);
} else {
JS_ARENA_ALLOCATE_CAST(newsp, jsval *, &cx->stackPool, nbytes);
if (!newsp)
OutOfMemoryAbort();
/*
* Move args if the missing ones overflow arena a, then push
* undefined for the missing args.
*/
if (missing) {
memcpy(newsp, vp, (2 + argc) * sizeof(jsval));
vp = newsp;
newsp = vp + 2 + argc;
do {
*newsp++ = JSVAL_VOID;
} while (--missing != 0);
}
}
/* Claim space for the stack frame and initialize it. */
JSInlineFrame* newifp = (JSInlineFrame *) newsp;
newsp += nframeslots;
newifp->frame.callobj = NULL;
newifp->frame.argsobj = NULL;
newifp->frame.script = script;
newifp->frame.fun = fun;
bool constructing = fi.is_constructing();
newifp->frame.argc = argc;
newifp->callerRegs.pc = fi.pc;
newifp->callerRegs.sp = fp->slots + fi.spdist;
/* Fixup |fp| using |fi|. */
fp->regs->sp = sp;
fp->regs->pc = fi.pc;
fp->imacpc = fi.imacpc;
fp->blockChain = fi.block;
#ifdef DEBUG
if (fi.block != fp->blockChain) {
for (JSObject* obj = fi.block; obj != fp->blockChain; obj = obj->getParent())
JS_ASSERT(obj);
}
#endif
fp->blockChain = fi.block;
newifp->frame.argv = newifp->callerRegs.sp - argc;
JS_ASSERT(newifp->frame.argv);
/*
* Get pointer to new frame/slots, without changing global state.
* Initialize missing args if there are any. (Copied from js_Interpret.)
*
* StackSpace::getInlineFrame calls js_ReportOutOfScriptQuota if there is
* no space (which will try to deep bail, which is bad), however we already
* check on entry to ExecuteTree that there is enough space.
*/
StackSpace &stack = cx->stack();
uintN nslots = newscript->nslots;
uintN funargs = fun->nargs;
jsval *argv = vp + 2;
JSStackFrame *newfp;
if (argc < funargs) {
uintN missing = funargs - argc;
newfp = stack.getInlineFrame(cx, sp, missing, nslots);
for (jsval *v = argv + argc, *end = v + missing; v != end; ++v)
*v = JSVAL_VOID;
} else {
newfp = stack.getInlineFrame(cx, sp, 0, nslots);
}
/* Initialize the new stack frame. */
newfp->callobj = NULL;
newfp->argsobj = NULL;
newfp->script = newscript;
newfp->fun = fun;
newfp->argc = argc;
newfp->argv = argv;
#ifdef DEBUG
// Initialize argv[-1] to a known-bogus value so we'll catch it if
// someone forgets to initialize it later.
newifp->frame.argv[-1] = JSVAL_HOLE;
newfp->argv[-1] = JSVAL_HOLE;
#endif
JS_ASSERT(newifp->frame.argv >= StackBase(fp) + 2);
newifp->frame.rval = JSVAL_VOID;
newifp->frame.down = fp;
newifp->frame.annotation = NULL;
newifp->frame.scopeChain = NULL; // will be updated in FlushNativeStackFrame
newifp->frame.flags = constructing ? JSFRAME_CONSTRUCTING : 0;
newifp->frame.blockChain = NULL;
newifp->mark = newmark;
newifp->frame.thisv = JSVAL_NULL; // will be updated in FlushNativeStackFrame
newifp->frame.regs = fp->regs;
newifp->frame.regs->pc = script->code;
newifp->frame.regs->sp = newsp + script->nfixed;
newifp->frame.imacpc = NULL;
newifp->frame.slots = newsp;
if (script->staticLevel < JS_DISPLAY_SIZE) {
JSStackFrame **disp = &cx->display[script->staticLevel];
newifp->frame.displaySave = *disp;
*disp = &newifp->frame;
newfp->rval = JSVAL_VOID;
newfp->annotation = NULL;
newfp->scopeChain = NULL; // will be updated in FlushNativeStackFrame
newfp->flags = fi.is_constructing() ? JSFRAME_CONSTRUCTING : 0;
newfp->blockChain = NULL;
newfp->thisv = JSVAL_NULL; // will be updated in FlushNativeStackFrame
newfp->imacpc = NULL;
if (newscript->staticLevel < JS_DISPLAY_SIZE) {
JSStackFrame **disp = &cx->display[newscript->staticLevel];
newfp->displaySave = *disp;
*disp = newfp;
}
/*
* Note that fp->script is still the caller's script; set the callee
* inline frame's idea of caller version from its version.
*/
newifp->callerVersion = (JSVersion) fp->script->version;
newfp->callerVersion = (JSVersion) fp->script->version;
// After this paragraph, fp and cx->fp point to the newly synthesized frame.
fp->regs = &newifp->callerRegs;
fp = cx->fp = &newifp->frame;
/*
* Weave regs like JSOP_CALL/JSOP_STOP. |fp->regs| should point to the
* |regs| variable in the innermost js_Interpret activation.
*/
newfp->callerRegs = *fp->regs;
newfp->regs = fp->regs;
fp->regs = &newfp->callerRegs;
newfp->regs->pc = newscript->code;
newfp->regs->sp = newfp->slots() + newscript->nfixed;
/* Push inline frame. (Copied from js_Interpret.) */
stack.pushInlineFrame(cx, fp, newfp);
/*
* If there's a call hook, invoke it to compute the hookData used by
@ -5508,9 +5477,9 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee)
*/
JSInterpreterHook hook = cx->debugHooks->callHook;
if (hook) {
newifp->hookData = hook(cx, fp, JS_TRUE, 0, cx->debugHooks->callHookData);
newfp->hookData = hook(cx, newfp, JS_TRUE, 0, cx->debugHooks->callHookData);
} else {
newifp->hookData = NULL;
newfp->hookData = NULL;
}
/*
@ -5521,29 +5490,30 @@ SynthesizeFrame(JSContext* cx, const FrameInfo& fi, JSObject* callee)
* everything down to the caller's fp->slots (where vars start) and avoid
* some of the complexity?
*/
return (fi.spdist - fp->down->script->nfixed) +
((fun->nargs > fp->argc) ? fun->nargs - fp->argc : 0) +
script->nfixed + SPECIAL_FRAME_SLOTS;
return (fi.spdist - newfp->down->script->nfixed) +
((fun->nargs > newfp->argc) ? fun->nargs - newfp->argc : 0) +
newscript->nfixed + SPECIAL_FRAME_SLOTS;
}
static void
JS_REQUIRES_STACK static void
SynthesizeSlowNativeFrame(TracerState& state, JSContext *cx, VMSideExit *exit)
{
VOUCH_DOES_NOT_REQUIRE_STACK();
/*
* StackSpace::getInlineFrame calls js_ReportOutOfScriptQuota if there is
* no space (which will try to deep bail, which is bad), however we already
* check on entry to ExecuteTree that there is enough space.
*/
CallStack *cs;
JSStackFrame *fp;
cx->stack().getSynthesizedSlowNativeFrame(cx, cs, fp);
void *mark;
JSInlineFrame *ifp;
JSObject *callee = JSVAL_TO_OBJECT(state.nativeVp[0]);
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, callee);
JS_ASSERT(!fun->isInterpreted() && !fun->isFastNative());
JS_ASSERT(fun->u.n.extra == 0);
/* This allocation is infallible: ExecuteTree reserved enough stack. */
mark = JS_ARENA_MARK(&cx->stackPool);
JS_ARENA_ALLOCATE_CAST(ifp, JSInlineFrame *, &cx->stackPool, sizeof(JSInlineFrame));
if (!ifp)
OutOfMemoryAbort();
JSStackFrame *fp = &ifp->frame;
fp->regs = NULL;
fp->imacpc = NULL;
fp->slots = NULL;
fp->callobj = NULL;
fp->argsobj = NULL;
fp->script = NULL;
@ -5552,7 +5522,6 @@ SynthesizeSlowNativeFrame(TracerState& state, JSContext *cx, VMSideExit *exit)
fp->argv = state.nativeVp + 2;
fp->fun = GET_FUNCTION_PRIVATE(cx, fp->calleeObject());
fp->rval = JSVAL_VOID;
fp->down = cx->fp;
fp->annotation = NULL;
JS_ASSERT(cx->fp->scopeChain);
fp->scopeChain = cx->fp->scopeChain;
@ -5560,8 +5529,7 @@ SynthesizeSlowNativeFrame(TracerState& state, JSContext *cx, VMSideExit *exit)
fp->flags = exit->constructing() ? JSFRAME_CONSTRUCTING : 0;
fp->displaySave = NULL;
ifp->mark = mark;
cx->fp = fp;
cx->stack().pushSynthesizedSlowNativeFrame(cx, cs, fp);
}
static JS_REQUIRES_STACK bool
@ -6447,7 +6415,7 @@ ExecuteTree(JSContext* cx, TreeFragment* f, uintN& inlineCallCount,
JS_ASSERT(f->root == f && f->code());
TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
if (!ScopeChainCheck(cx, f)) {
if (!ScopeChainCheck(cx, f) || !cx->stack().ensureEnoughSpaceToEnterTrace()) {
*lrp = NULL;
return true;
}
@ -6591,14 +6559,8 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr)
*
* First, if we just returned from a slow native, pop its stack frame.
*/
if (!cx->fp->script) {
JSStackFrame *fp = cx->fp;
JS_ASSERT(FUN_SLOW_NATIVE(fp->fun));
JS_ASSERT(!fp->regs);
JS_ASSERT(fp->down->regs != &((JSInlineFrame *) fp)->callerRegs);
cx->fp = fp->down;
JS_ARENA_RELEASE(&cx->stackPool, ((JSInlineFrame *) fp)->mark);
}
if (!cx->fp->script)
cx->stack().popSynthesizedSlowNativeFrame(cx);
JS_ASSERT(cx->fp->script);
if (!(bs & BUILTIN_ERROR)) {
@ -6644,7 +6606,7 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr)
regs->sp += cs.ndefs;
regs->pc += cs.length;
JS_ASSERT_IF(!cx->fp->imacpc,
cx->fp->slots + cx->fp->script->nfixed +
cx->fp->slots() + cx->fp->script->nfixed +
js_ReconstructStackDepth(cx, cx->fp->script, regs->pc) ==
regs->sp);
@ -6766,7 +6728,7 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr)
fp->imacpc = innermost->imacpc;
fp->regs->sp = StackBase(fp) + (innermost->sp_adj / sizeof(double)) - calldepth_slots;
JS_ASSERT_IF(!fp->imacpc,
fp->slots + fp->script->nfixed +
fp->slots() + fp->script->nfixed +
js_ReconstructStackDepth(cx, fp->script, fp->regs->pc) == fp->regs->sp);
#ifdef EXECUTE_TREE_TIMER
@ -7751,7 +7713,7 @@ JS_REQUIRES_STACK jsval&
TraceRecorder::varval(unsigned n) const
{
JS_ASSERT(n < cx->fp->script->nslots);
return cx->fp->slots[n];
return cx->fp->slots()[n];
}
JS_REQUIRES_STACK jsval&
@ -7920,7 +7882,7 @@ TraceRecorder::callProp(JSObject* obj, JSProperty* prop, jsid id, jsval*& vp,
} else if (sprop->getterOp() == js_GetCallVar ||
sprop->getterOp() == js_GetCallVarChecked) {
JS_ASSERT(slot < cfp->script->nslots);
vp = &cfp->slots[slot];
vp = &cfp->slots()[slot];
upvar_slot = cx->fp->fun->nargs + slot;
nr.v = *vp;
} else {
@ -9654,8 +9616,8 @@ TraceRecorder::clearFrameSlotsFromTracker(Tracker& which, JSStackFrame* fp, unsi
which.set(&fp->argsobj, (LIns*)0);
which.set(&fp->scopeChain, (LIns*)0);
}
vp = &fp->slots[0];
vpstop = &fp->slots[nslots];
vp = &fp->slots()[0];
vpstop = &fp->slots()[nslots];
while (vp < vpstop)
which.set(vp++, (LIns*)0);
}
@ -9724,7 +9686,7 @@ TraceRecorder::putActivationObjects()
if (nslots) {
slots_ins = lir->insAlloc(sizeof(jsval) * nslots);
for (int i = 0; i < nslots; ++i) {
LIns* slot_ins = box_jsval(cx->fp->slots[i], get(&cx->fp->slots[i]));
LIns* slot_ins = box_jsval(cx->fp->slots()[i], get(&cx->fp->slots()[i]));
lir->insStore(slot_ins, slots_ins, i * sizeof(jsval), ACC_OTHER);
}
} else {
@ -9781,21 +9743,41 @@ TraceRecorder::record_EnterFrame(uintN& inlineCallCount)
)
LIns* void_ins = INS_VOID();
// Before we enter this frame, we need to clear out any dangling insns left
// in the tracer. While we also clear when returning from a function, it is
// possible to have the following sequence of stack usage:
//
// [fp1]***************** push
// [fp1]***** pop
// [fp1]*****[fp2] call
// [fp1]*****[fp2]*** push
//
// Duplicate native stack layout computation: see VisitFrameSlots header comment.
// This doesn't do layout arithmetic, but it must initialize in the tracker all the
// slots defined as imported by VisitFrameSlots.
jsval* vp = &fp->argv[fp->argc];
jsval* vpstop = vp + ptrdiff_t(fp->fun->nargs) - ptrdiff_t(fp->argc);
while (vp < vpstop) {
nativeFrameTracker.set(vp, (LIns*)0);
set(vp++, void_ins);
for (; vp < vpstop; ++vp) {
nativeFrameTracker.set(vp, NULL);
set(vp, void_ins);
}
vp = &fp->slots[0];
vpstop = vp + fp->script->nfixed;
while (vp < vpstop)
set(vp++, void_ins);
nativeFrameTracker.set(&fp->argsobj, NULL);
set(&fp->argsobj, INS_NULL());
nativeFrameTracker.set(&fp->scopeChain, NULL);
vp = fp->slots();
vpstop = vp + fp->script->nfixed;
for (; vp < vpstop; ++vp) {
nativeFrameTracker.set(vp, NULL);
set(vp, void_ins);
}
vp = vpstop;
vpstop = vp + (fp->script->nslots - fp->script->nfixed);
for (; vp < vpstop; ++vp)
nativeFrameTracker.set(vp, NULL);
LIns* callee_ins = get(&cx->fp->argv[-2]);
LIns* scopeChain_ins = stobj_get_parent(callee_ins);
@ -11380,7 +11362,7 @@ TraceRecorder::setCallProp(JSObject *callobj, LIns *callobj_ins, JSScopeProperty
if (sprop->setterOp() == SetCallVar) {
JS_ASSERT(sprop->hasShortID());
uintN slot = uint16(sprop->shortid);
jsval *vp2 = &fp->slots[slot];
jsval *vp2 = &fp->slots()[slot];
CHECK_STATUS(setUpwardTrackedVar(vp2, v, v_ins));
return RECORD_CONTINUE;
}
@ -12566,12 +12548,6 @@ TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc,
JSStackFrame* fp = cx->fp;
// TODO: track the copying via the tracker...
if (argc < fun->nargs &&
jsuword(fp->regs->sp + (fun->nargs - argc)) > cx->stackPool.current->limit) {
RETURN_STOP("can't trace calls with too few args requiring argv move");
}
// Generate a type map for the outgoing frame and stash it in the LIR
unsigned stackSlots = NativeStackSlots(cx, 0 /* callDepth */);
FrameInfo* fi = (FrameInfo*)
@ -12589,7 +12565,7 @@ TraceRecorder::interpretedFunctionCall(jsval& fval, JSFunction* fun, uintN argc,
tree->gcthings.addUnique(OBJECT_TO_JSVAL(fp->blockChain));
fi->pc = fp->regs->pc;
fi->imacpc = fp->imacpc;
fi->spdist = fp->regs->sp - fp->slots;
fi->spdist = fp->regs->sp - fp->slots();
fi->set_argc(uint16(argc), constructing);
fi->callerHeight = stackSlots - (2 + argc);
fi->callerArgc = fp->argc;
@ -13905,7 +13881,7 @@ TraceRecorder::record_JSOP_BINDNAME()
// are still on the stack. We never use BINDNAME to refer to these.
while (obj->getClass() == &js_BlockClass) {
// The block's values are still on the stack.
JS_ASSERT(obj->getPrivate() == fp);
JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, fp));
obj = obj->getParent();
// Blocks always have parents.
JS_ASSERT(obj);
@ -14498,7 +14474,7 @@ TraceRecorder::record_JSOP_RETRVAL()
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_GETGVAR()
{
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval))
return ARECORD_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
@ -14514,7 +14490,7 @@ TraceRecorder::record_JSOP_GETGVAR()
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_SETGVAR()
{
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval))
return ARECORD_CONTINUE; // We will see JSOP_NAME from the interpreter's jump, so no-op here.
@ -14530,7 +14506,7 @@ TraceRecorder::record_JSOP_SETGVAR()
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_INCGVAR()
{
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval))
// We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
return ARECORD_CONTINUE;
@ -14546,7 +14522,7 @@ TraceRecorder::record_JSOP_INCGVAR()
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_DECGVAR()
{
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval))
// We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
return ARECORD_CONTINUE;
@ -14562,7 +14538,7 @@ TraceRecorder::record_JSOP_DECGVAR()
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_GVARINC()
{
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval))
// We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
return ARECORD_CONTINUE;
@ -14578,7 +14554,7 @@ TraceRecorder::record_JSOP_GVARINC()
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_GVARDEC()
{
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval))
// We will see JSOP_INCNAME from the interpreter's jump, so no-op here.
return ARECORD_CONTINUE;
@ -14986,8 +14962,8 @@ TraceRecorder::record_JSOP_ARRAYPUSH()
{
uint32_t slot = GET_UINT16(cx->fp->regs->pc);
JS_ASSERT(cx->fp->script->nfixed <= slot);
JS_ASSERT(cx->fp->slots + slot < cx->fp->regs->sp - 1);
jsval &arrayval = cx->fp->slots[slot];
JS_ASSERT(cx->fp->slots() + slot < cx->fp->regs->sp - 1);
jsval &arrayval = cx->fp->slots()[slot];
JS_ASSERT(JSVAL_IS_OBJECT(arrayval));
JS_ASSERT(JSVAL_TO_OBJECT(arrayval)->isDenseArray());
LIns *array_ins = get(&arrayval);
@ -15067,7 +15043,7 @@ TraceRecorder::record_JSOP_INDEXBASE3()
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::record_JSOP_CALLGVAR()
{
jsval slotval = cx->fp->slots[GET_SLOTNO(cx->fp->regs->pc)];
jsval slotval = cx->fp->slots()[GET_SLOTNO(cx->fp->regs->pc)];
if (JSVAL_IS_NULL(slotval))
// We will see JSOP_CALLNAME from the interpreter's jump, so no-op here.
return ARECORD_CONTINUE;

Просмотреть файл

@ -75,6 +75,7 @@ JS_PUBLIC_API(void) JS_Assert(const char *s, const char *file, JSIntn ln)
* trapped.
*/
*((int *) NULL) = 0; /* To continue from here in GDB: "return" then "continue". */
raise(SIGABRT); /* In case above statement gets nixed by the optimizer. */
#else
raise(SIGABRT); /* To continue from here in GDB: "signal 0". */
#endif

Просмотреть файл

@ -1406,7 +1406,7 @@ ValueToScript(JSContext *cx, jsval v)
script = (JSScript *) JS_GetPrivate(cx, obj);
} else if (clasp == &js_GeneratorClass.base) {
JSGenerator *gen = (JSGenerator *) JS_GetPrivate(cx, obj);
fun = gen->frame.fun;
fun = gen->getFloatingFrame()->fun;
script = FUN_SCRIPT(fun);
}
}

Просмотреть файл

@ -38,13 +38,13 @@
var gTestfile = 'regress-350256-02.js';
//-----------------------------------------------------------------------------
var BUGNUMBER = 350256;
var summary = 'Array.apply maximum arguments: 2^20';
var summary = 'Array.apply maximum arguments: 2^19 - 1024';
var actual = '';
var expect = '';
//-----------------------------------------------------------------------------
test(Math.pow(2, 20));
test(Math.pow(2, 19) - 1024);
//-----------------------------------------------------------------------------
function test(length)

Просмотреть файл

@ -38,13 +38,13 @@
var gTestfile = 'regress-350256-03.js';
//-----------------------------------------------------------------------------
var BUGNUMBER = 350256;
var summary = 'Array.apply maximum arguments: 2^24-1';
var summary = 'Array.apply maximum arguments: 2^19-1024';
var actual = '';
var expect = '';
//-----------------------------------------------------------------------------
test(Math.pow(2, 24)-1);
test(Math.pow(2, 19)-1024);
//-----------------------------------------------------------------------------
function test(length)

Просмотреть файл

@ -0,0 +1,16 @@
function g(e) {
return ("" + e);
}
function blah() {
do {
yield;
} while ({}(p = arguments));
}
rv = blah();
try {
for (a in rv) ;
} catch (e) {
print("" + g(e));
}
gc();

Просмотреть файл

@ -0,0 +1,41 @@
// Fun.apply ignores arguments past JS_ARGS_LENGTH_MAX = 2^19 - 1024
const numFatArgs = Math.pow(2,19) - 1024;
// Recursion on trace is limited to MAX_CALL_STACK_ENTRIES = 500
const traceDepth = 490;
var trace = true;
function maybeTrace(x) {
if (!trace)
eval("");
if (x <= 0)
return 0;
return maybeTrace(x-1);
}
function fatStack() {
return maybeTrace(traceDepth);
}
// This tests that we conservatively guard against stack space exhaustion
// before entering trace.
exception = false;
try {
fatStack.apply(null, new Array(numFatArgs));
} catch (e) {
assertEq(e.toString(), "InternalError: script stack space quota is exhausted");
exception = true;
}
assertEq(exception, true);
checkStats({traceCompleted:1});
// This tests that, without tracing, we exhaust stack space.
trace = false;
var exception = false;
try {
fatStack.apply(null, new Array(numFatArgs));
} catch (e) {
assertEq(e.toString(), "InternalError: script stack space quota is exhausted");
exception = true;
}
assertEq(exception, true);

Просмотреть файл

@ -338,6 +338,8 @@ STDMETHODIMP XPCDispatchTearOff::Invoke(DISPID dispIdMember, REFIID riid,
XPCJSRuntime* rt = ccx.GetRuntime();
int j;
js::InvokeArgsGuard args;
thisObj = obj = GetJSObject();;
if(!cx || !xpcc)
@ -349,16 +351,12 @@ STDMETHODIMP XPCDispatchTearOff::Invoke(DISPID dispIdMember, REFIID riid,
xpcc->SetException(nsnull);
ccx.GetThreadData()->SetException(nsnull);
// We use js_AllocStack, js_Invoke, and js_FreeStack so that the gcthings
// we use as args will be rooted by the engine as we do conversions and
// prepare to do the function call. This adds a fair amount of complexity,
// but is a good optimization compared to calling JS_AddRoot for each item.
// setup stack
// allocate extra space for function and 'this'
stack_size = argc + 2;
// We use js_Invoke so that the gcthings we use as args will be rooted
// by the engine as we do conversions and prepare to do the function
// call. This adds a fair amount of complexity, but it's a good
// optimization compared to calling JS_AddRoot for each item.
js::LeaveTrace(cx);
// In the xpidl [function] case we are making sure now that the
// JSObject is callable. If it is *not* callable then we silently
@ -382,19 +380,17 @@ STDMETHODIMP XPCDispatchTearOff::Invoke(DISPID dispIdMember, REFIID riid,
goto pre_call_clean_up;
}
// if stack_size is zero then we won't be needing a stack
if(stack_size && !(stackbase = sp = js_AllocStack(cx, stack_size, &mark)))
if (!cx->stack().pushInvokeArgsFriendAPI(cx, argc, args))
{
retval = NS_ERROR_OUT_OF_MEMORY;
goto pre_call_clean_up;
}
sp = stackbase = args.getvp();
// this is a function call, so push function and 'this'
if(stack_size != argc)
{
*sp++ = fval;
*sp++ = OBJECT_TO_JSVAL(thisObj);
}
*sp++ = fval;
*sp++ = OBJECT_TO_JSVAL(thisObj);
// make certain we leave no garbage in the stack
for(i = 0; i < argc; i++)
@ -445,7 +441,7 @@ pre_call_clean_up:
if(!JSVAL_IS_PRIMITIVE(fval))
{
success = js_Invoke(cx, argc, stackbase, 0);
success = js_Invoke(cx, args, 0);
result = stackbase[0];
}
else
@ -527,9 +523,6 @@ pre_call_clean_up:
}
done:
if(sp)
js_FreeStack(cx, mark);
// TODO: I think we may need to translate this error,
// for now we'll pass through
return retval;

Просмотреть файл

@ -1250,6 +1250,17 @@ nsXPCWrappedJSClass::CheckForException(XPCCallContext & ccx,
return NS_ERROR_FAILURE;
}
class ContextPrincipalGuard
{
nsIScriptSecurityManager *ssm;
XPCCallContext &ccx;
public:
ContextPrincipalGuard(XPCCallContext &ccx)
: ssm(nsnull), ccx(ccx) {}
void principalPushed(nsIScriptSecurityManager *ssm) { this->ssm = ssm; }
~ContextPrincipalGuard() { if (ssm) ssm->PopContextPrincipal(ccx); }
};
NS_IMETHODIMP
nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
const XPTMethodDescriptor* info,
@ -1259,8 +1270,6 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
jsval* sp = nsnull;
uint8 i;
uint8 argc=0;
uint8 stack_size;
jsval result;
uint8 paramCount=0;
nsresult retval = NS_ERROR_FAILURE;
nsresult pending_result = NS_OK;
@ -1270,13 +1279,11 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
JSObject* obj;
const char* name = info->name;
jsval fval;
void* mark;
JSBool foundDependentParam;
XPCContext* xpcc;
JSContext* cx;
JSObject* thisObj;
JSBool popPrincipal = JS_FALSE;
nsIScriptSecurityManager* ssm = nsnull;
bool invokeCall;
// Make sure not to set the callee on ccx until after we've gone through
// the whole nsIXPCFunctionThisTranslator bit. That code uses ccx to
@ -1296,17 +1303,8 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
}
AutoScriptEvaluate scriptEval(cx);
#ifdef DEBUG_stats_jband
PRIntervalTime startTime = PR_IntervalNow();
PRIntervalTime endTime = 0;
static int totalTime = 0;
static int count = 0;
static const int interval = 10;
if(0 == (++count % interval))
printf("<<<<<<<< %d calls on nsXPCWrappedJSs made. (%d)\n", count, PR_IntervalToMilliseconds(totalTime));
#endif
js::InvokeArgsGuard args;
ContextPrincipalGuard principalGuard(ccx);
obj = thisObj = wrapper->GetJSObject();
@ -1326,7 +1324,7 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
if(XPCPerThreadData::IsMainThread(ccx))
{
ssm = XPCWrapper::GetSecurityManager();
nsIScriptSecurityManager *ssm = XPCWrapper::GetSecurityManager();
if(ssm)
{
nsCOMPtr<nsIPrincipal> objPrincipal;
@ -1344,30 +1342,24 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
goto pre_call_clean_up;
}
popPrincipal = JS_TRUE;
principalGuard.principalPushed(ssm);
}
}
}
// We use js_AllocStack, js_Invoke, and js_FreeStack so that the gcthings
// we use as args will be rooted by the engine as we do conversions and
// prepare to do the function call. This adds a fair amount of complexity,
// but is a good optimization compared to calling JS_AddRoot for each item.
// We use js_Invoke so that the gcthings we use as args will be rooted by
// the engine as we do conversions and prepare to do the function call.
// This adds a fair amount of complexity, but it's a good optimization
// compared to calling JS_AddRoot for each item.
js::LeaveTrace(cx);
// setup stack
// if this isn't a function call then we don't need to push extra stuff
if(XPT_MD_IS_GETTER(info->flags) || XPT_MD_IS_SETTER(info->flags))
invokeCall = !(XPT_MD_IS_SETTER(info->flags) || XPT_MD_IS_GETTER(info->flags));
if (invokeCall)
{
stack_size = argc;
}
else
{
// allocate extra space for function and 'this'
stack_size = argc + 2;
// We get fval before allocating the stack to avoid gc badness that can
// happen if the GetProperty call leaves our request and the gc runs
// while the stack we allocate contains garbage.
@ -1467,18 +1459,21 @@ nsXPCWrappedJSClass::CallMethod(nsXPCWrappedJS* wrapper, uint16 methodIndex,
}
}
// if stack_size is zero then we won't be needing a stack
if(stack_size && !(stackbase = sp = js_AllocStack(cx, stack_size, &mark)))
/*
* pushInvokeArgs allocates |2 + argc| slots, but getters and setters
* require only one rooted jsval, so waste one value.
*/
JS_ASSERT_IF(!invokeCall, argc < 2);
if (!cx->stack().pushInvokeArgsFriendAPI(cx, invokeCall ? argc : 0, args))
{
retval = NS_ERROR_OUT_OF_MEMORY;
goto pre_call_clean_up;
}
NS_ASSERTION(XPT_MD_IS_GETTER(info->flags) || sp,
"Only a getter needs no stack.");
sp = stackbase = args.getvp();
// this is a function call, so push function and 'this'
if(stack_size != argc)
if(invokeCall)
{
*sp++ = fval;
*sp++ = OBJECT_TO_JSVAL(thisObj);
@ -1675,26 +1670,23 @@ pre_call_clean_up:
// Make sure "this" doesn't get deleted during this call.
nsCOMPtr<nsIXPCWrappedJSClass> kungFuDeathGrip(this);
result = JSVAL_NULL;
AUTO_MARK_JSVAL(ccx, &result);
if(!readyToDoTheCall)
goto done;
return retval;
// do the deed - note exceptions
JS_ClearPendingException(cx);
/* On success, the return value is placed in |*stackbase|. */
if(XPT_MD_IS_GETTER(info->flags))
success = JS_GetProperty(cx, obj, name, &result);
success = JS_GetProperty(cx, obj, name, stackbase);
else if(XPT_MD_IS_SETTER(info->flags))
success = JS_SetProperty(cx, obj, name, sp-1);
success = JS_SetProperty(cx, obj, name, stackbase);
else
{
if(!JSVAL_IS_PRIMITIVE(fval))
{
success = js_Invoke(cx, argc, stackbase, 0);
result = *stackbase;
success = js_Invoke(cx, args, 0);
}
else
{
@ -1730,8 +1722,7 @@ pre_call_clean_up:
// May also want to check if we're moving from content->chrome and force
// a report in that case.
retval = CheckForException(ccx, name, GetInterfaceName(), forceReport);
goto done;
return CheckForException(ccx, name, GetInterfaceName(), forceReport);
}
ccx.GetThreadData()->SetException(nsnull); // XXX necessary?
@ -1768,7 +1759,7 @@ pre_call_clean_up:
pv = (nsXPTCMiniVariant*) nativeParams[i].val.p;
if(param.IsRetval())
val = result;
val = *stackbase;
else if(JSVAL_IS_PRIMITIVE(stackbase[i+2]) ||
!JS_GetPropertyById(cx, JSVAL_TO_OBJECT(stackbase[i+2]),
mRuntime->GetStringID(XPCJSRuntime::IDX_VALUE),
@ -1819,7 +1810,7 @@ pre_call_clean_up:
pv = (nsXPTCMiniVariant*) nativeParams[i].val.p;
if(param.IsRetval())
val = result;
val = *stackbase;
else if(!JS_GetPropertyById(cx, JSVAL_TO_OBJECT(stackbase[i+2]),
mRuntime->GetStringID(XPCJSRuntime::IDX_VALUE),
&val))
@ -1932,18 +1923,6 @@ pre_call_clean_up:
retval = pending_result;
}
done:
if(sp)
js_FreeStack(cx, mark);
if(popPrincipal)
ssm->PopContextPrincipal(ccx);
#ifdef DEBUG_stats_jband
endTime = PR_IntervalNow();
printf("%s::%s %d ( c->js ) \n", GetInterfaceName(), info->GetName(), PR_IntervalToMilliseconds(endTime-startTime));
totalTime += endTime-startTime;
#endif
return retval;
}