Bug 644074 - Simplify and consolidate VM stack code into js/src/vm/Stack*

This commit is contained in:
Luke Wagner 2011-04-13 09:27:37 -07:00
Родитель 6722d81ca6
Коммит 2bb8868bc9
81 изменённых файлов: 4595 добавлений и 4267 удалений

Просмотреть файл

@ -63,6 +63,7 @@
#include "prmem.h"
#include "jsapi.h" // for JSAutoRequest
#include "jsdbgapi.h" // for JS_ClearWatchPointsForObject
#include "jsfriendapi.h" // for JS_GetFrameScopeChainRaw
#include "nsReadableUtils.h"
#include "nsDOMClassInfo.h"
#include "nsJSEnvironment.h"
@ -5851,13 +5852,13 @@ nsGlobalWindow::CallerInnerWindow()
JSStackFrame *fp = nsnull;
JS_FrameIterator(cx, &fp);
if (fp) {
while (fp->isDummyFrame()) {
while (!JS_IsScriptFrame(cx, fp)) {
if (!JS_FrameIterator(cx, &fp))
break;
}
if (fp)
scope = &fp->scopeChain();
scope = JS_GetFrameScopeChainRaw(fp);
}
if (!scope)

Просмотреть файл

@ -46,8 +46,6 @@
#include "base/basictypes.h"
#include "jsapi.h"
#include "jscntxt.h"
#include "jsdbgapi.h"
#include "jsprf.h"
#include "xpcpublic.h"
@ -406,13 +404,14 @@ GC(JSContext *cx,
jsval *vp)
{
JSRuntime *rt;
uint32 preBytes;
uint32 preBytes, postBytes;
rt = cx->runtime;
preBytes = rt->gcBytes;
rt = JS_GetRuntime(cx);
preBytes = JS_GetGCParameter(rt, JSGC_BYTES);
JS_GC(cx);
postBytes = JS_GetGCParameter(rt, JSGC_BYTES);
fprintf(stdout, "before %lu, after %lu, break %08lx\n",
(unsigned long)preBytes, (unsigned long)rt->gcBytes,
(unsigned long)preBytes, (unsigned long)postBytes,
#ifdef XP_UNIX
(unsigned long)sbrk(0)
#else

Просмотреть файл

@ -181,6 +181,7 @@ CPPSRCS = \
jsxml.cpp \
prmjtime.cpp \
sharkctl.cpp \
Stack.cpp \
$(NULL)
INSTALLED_HEADERS = \
@ -267,6 +268,19 @@ INSTALLED_HEADERS = \
prmjtime.h \
$(NULL)
###############################################
# BEGIN include sources for the vm subdirectory
#
VPATH += \
$(srcdir)/vm \
$(NULL)
EXPORTS_NAMESPACES = vm
EXPORTS_vm = \
Stack.h \
$(NULL)
###############################################
# BEGIN include sources for low-level code shared with Gecko
#
@ -274,7 +288,7 @@ VPATH += \
$(srcdir)/../../mfbt \
$(NULL)
EXPORTS_NAMESPACES = mozilla
EXPORTS_NAMESPACES += mozilla
EXPORTS_mozilla = \
Util.h \

Просмотреть файл

@ -90,16 +90,15 @@
#include "jstypedarray.h"
#include "jsatominlines.h"
#include "jscntxtinlines.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "jsscopeinlines.h"
#include "jscntxtinlines.h"
#include "jsregexpinlines.h"
#include "jsscriptinlines.h"
#include "jsstrinlines.h"
#include "assembler/wtf/Platform.h"
#include "vm/Stack-inl.h"
#if ENABLE_YARR_JIT
#include "assembler/jit/ExecutableAllocator.h"
#include "methodjit/Logging.h"
@ -845,7 +844,7 @@ JS_SetRuntimePrivate(JSRuntime *rt, void *data)
static void
StartRequest(JSContext *cx)
{
JSThread *t = cx->thread;
JSThread *t = cx->thread();
JS_ASSERT(CURRENT_THREAD_IS_ME(t));
if (t->data.requestDepth) {
@ -855,7 +854,7 @@ StartRequest(JSContext *cx)
AutoLockGC lock(rt);
/* Wait until the GC is finished. */
if (rt->gcThread != cx->thread) {
if (rt->gcThread != cx->thread()) {
while (rt->gcThread)
JS_AWAIT_GC_DONE(rt);
}
@ -879,7 +878,7 @@ StartRequest(JSContext *cx)
static void
StopRequest(JSContext *cx)
{
JSThread *t = cx->thread;
JSThread *t = cx->thread();
JS_ASSERT(CURRENT_THREAD_IS_ME(t));
JS_ASSERT(t->data.requestDepth != 0);
if (t->data.requestDepth != 1) {
@ -947,7 +946,7 @@ JS_PUBLIC_API(jsrefcount)
JS_SuspendRequest(JSContext *cx)
{
#ifdef JS_THREADSAFE
JSThread *t = cx->thread;
JSThread *t = cx->thread();
JS_ASSERT(CURRENT_THREAD_IS_ME(t));
jsrefcount saveDepth = t->data.requestDepth;
@ -967,7 +966,7 @@ JS_PUBLIC_API(void)
JS_ResumeRequest(JSContext *cx, jsrefcount saveDepth)
{
#ifdef JS_THREADSAFE
JSThread *t = cx->thread;
JSThread *t = cx->thread();
JS_ASSERT(CURRENT_THREAD_IS_ME(t));
if (saveDepth == 0)
return;
@ -984,7 +983,7 @@ JS_PUBLIC_API(JSBool)
JS_IsInRequest(JSContext *cx)
{
#ifdef JS_THREADSAFE
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread));
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread()));
return JS_THREAD_DATA(cx)->requestDepth != 0;
#else
return false;
@ -1519,7 +1518,7 @@ JS_SetGlobalObject(JSContext *cx, JSObject *obj)
CHECK_REQUEST(cx);
cx->globalObject = obj;
if (!cx->hasfp())
if (!cx->running())
cx->resetCompartment();
}
@ -2762,7 +2761,7 @@ JS_PUBLIC_API(void)
JS_SetNativeStackQuota(JSContext *cx, size_t stackSize)
{
#ifdef JS_THREADSAFE
JS_ASSERT(cx->thread);
JS_ASSERT(cx->thread());
#endif
#if JS_STACK_GROWTH_DIRECTION > 0
@ -4210,7 +4209,7 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
CHECK_REQUEST(cx);
assertSameCompartment(cx, parent); // XXX no funobj for now
if (!parent) {
if (cx->hasfp())
if (cx->running())
parent = GetScopeChain(cx, cx->fp());
if (!parent)
parent = cx->globalObject;
@ -5107,7 +5106,7 @@ JS_New(JSContext *cx, JSObject *ctor, uintN argc, jsval *argv)
// of object to create, create it, and clamp the return value to an object,
// among other details. js_InvokeConstructor does the hard work.
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, argc, &args))
if (!cx->stack.pushInvokeArgs(cx, argc, &args))
return NULL;
args.calleev().setObject(*ctor);
@ -5141,7 +5140,7 @@ JS_PUBLIC_API(JSOperationCallback)
JS_SetOperationCallback(JSContext *cx, JSOperationCallback callback)
{
#ifdef JS_THREADSAFE
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread));
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread()));
#endif
JSOperationCallback old = cx->operationCallback;
cx->operationCallback = callback;
@ -5183,9 +5182,9 @@ JS_IsRunning(JSContext *cx)
VOUCH_DOES_NOT_REQUIRE_STACK();
#ifdef JS_TRACER
JS_ASSERT_IF(JS_ON_TRACE(cx) && JS_TRACE_MONITOR_ON_TRACE(cx)->tracecx == cx, cx->hasfp());
JS_ASSERT_IF(JS_ON_TRACE(cx) && JS_TRACE_MONITOR_ON_TRACE(cx)->tracecx == cx, cx->running());
#endif
JSStackFrame *fp = cx->maybefp();
StackFrame *fp = cx->maybefp();
while (fp && fp->isDummyFrame())
fp = fp->prev();
return fp != NULL;
@ -5195,11 +5194,11 @@ JS_PUBLIC_API(JSStackFrame *)
JS_SaveFrameChain(JSContext *cx)
{
CHECK_REQUEST(cx);
JSStackFrame *fp = js_GetTopStackFrame(cx);
StackFrame *fp = js_GetTopStackFrame(cx);
if (!fp)
return NULL;
cx->saveActiveSegment();
return fp;
cx->stack.saveActiveSegment();
return Jsvalify(fp);
}
JS_PUBLIC_API(void)
@ -5207,10 +5206,10 @@ JS_RestoreFrameChain(JSContext *cx, JSStackFrame *fp)
{
CHECK_REQUEST(cx);
JS_ASSERT_NOT_ON_TRACE(cx);
JS_ASSERT(!cx->hasfp());
JS_ASSERT(!cx->running());
if (!fp)
return;
cx->restoreSegment();
cx->stack.restoreSegment();
}
/************************************************************************/
@ -6031,9 +6030,9 @@ JS_SetContextThread(JSContext *cx)
{
#ifdef JS_THREADSAFE
JS_ASSERT(!cx->outstandingRequests);
if (cx->thread) {
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread));
return reinterpret_cast<jsword>(cx->thread->id);
if (cx->thread()) {
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread()));
return reinterpret_cast<jsword>(cx->thread()->id);
}
if (!js_InitContextThread(cx)) {
@ -6057,7 +6056,7 @@ JS_ClearContextThread(JSContext *cx)
* is a harmless no-op.
*/
JS_ASSERT(cx->outstandingRequests == 0);
JSThread *t = cx->thread;
JSThread *t = cx->thread();
if (!t)
return 0;
JS_ASSERT(CURRENT_THREAD_IS_ME(t));

Просмотреть файл

@ -111,6 +111,8 @@
#include "jsobjinlines.h"
#include "jsstrinlines.h"
#include "vm/Stack-inl.h"
using namespace js;
using namespace js::gc;
@ -366,7 +368,7 @@ GetElement(JSContext *cx, JSObject *obj, jsdouble index, JSBool *hole, Value *vp
index < obj->getArgsInitialLength() &&
!(*vp = obj->getArgsElement(uint32(index))).isMagic(JS_ARGS_HOLE)) {
*hole = JS_FALSE;
JSStackFrame *fp = (JSStackFrame *)obj->getPrivate();
StackFrame *fp = (StackFrame *)obj->getPrivate();
if (fp != JS_ARGUMENTS_OBJECT_ON_TRACE) {
if (fp)
*vp = fp->canonicalActualArg(index);
@ -434,7 +436,7 @@ GetElements(JSContext *cx, JSObject *aobj, jsuint length, Value *vp)
* fast path for deleted properties (MagicValue(JS_ARGS_HOLE) since
* this requires general-purpose property lookup.
*/
if (JSStackFrame *fp = (JSStackFrame *) aobj->getPrivate()) {
if (StackFrame *fp = (StackFrame *) aobj->getPrivate()) {
JS_ASSERT(fp->numActualArgs() <= JS_ARGS_LENGTH_MAX);
if (!fp->forEachCanonicalActualArg(CopyNonHoleArgsTo(aobj, vp)))
goto found_deleted_prop;
@ -1411,14 +1413,14 @@ array_toString(JSContext *cx, uintN argc, Value *vp)
LeaveTrace(cx);
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, 0, &args))
if (!cx->stack.pushInvokeArgs(cx, 0, &args))
return false;
args.calleev() = join;
args.thisv().setObject(*obj);
/* Do the call. */
if (!Invoke(cx, args, 0))
if (!Invoke(cx, args))
return false;
*vp = args.rval();
return true;

Просмотреть файл

@ -74,6 +74,7 @@
#include "jsobj.h"
#include "jsopcode.h"
#include "jspubtd.h"
#include "jsscan.h"
#include "jsscope.h"
#include "jsscript.h"
#include "jsstaticcheck.h"
@ -86,449 +87,11 @@
#include "jscntxtinlines.h"
#include "jscompartment.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#ifdef XP_WIN
# include "jswin.h"
#elif defined(XP_OS2)
# define INCL_DOSMEMMGR
# include <os2.h>
#else
# include <unistd.h>
# include <sys/mman.h>
# if !defined(MAP_ANONYMOUS)
# if defined(MAP_ANON)
# define MAP_ANONYMOUS MAP_ANON
# else
# define MAP_ANONYMOUS 0
# endif
# endif
#endif
using namespace js;
using namespace js::gc;
JS_REQUIRES_STACK bool
StackSegment::contains(const JSStackFrame *fp) const
{
JS_ASSERT(inContext());
if (fp < initialFrame)
return false;
JSStackFrame *start;
if (isActive()) {
JS_ASSERT(cx->hasfp() && this == cx->activeSegment());
start = cx->fp();
} else {
JS_ASSERT(suspendedRegs && suspendedRegs->fp);
start = suspendedRegs->fp;
}
if (fp > start)
return false;
#ifdef DEBUG
bool found = false;
JSStackFrame *stop = initialFrame->prev();
for (JSStackFrame *f = start; !found && f != stop; f = f->prev()) {
if (f == fp) {
found = true;
break;
}
}
JS_ASSERT(found);
#endif
return true;
}
JSStackFrame *
StackSegment::computeNextFrame(JSStackFrame *fp) const
{
JS_ASSERT(contains(fp));
JS_ASSERT(fp != getCurrentFrame());
JSStackFrame *next = getCurrentFrame();
JSStackFrame *prev;
while ((prev = next->prev()) != fp)
next = prev;
return next;
}
StackSpace::StackSpace()
: base(NULL),
#ifdef XP_WIN
commitEnd(NULL),
#endif
end(NULL),
currentSegment(NULL),
#ifdef DEBUG
invokeSegment(NULL),
invokeFrame(NULL),
#endif
invokeArgEnd(NULL)
{
}
bool
StackSpace::init()
{
void *p;
#ifdef XP_WIN
p = VirtualAlloc(NULL, CAPACITY_BYTES, MEM_RESERVE, PAGE_READWRITE);
if (!p)
return false;
void *check = VirtualAlloc(p, COMMIT_BYTES, MEM_COMMIT, PAGE_READWRITE);
if (p != check)
return false;
base = reinterpret_cast<Value *>(p);
commitEnd = base + COMMIT_VALS;
end = base + CAPACITY_VALS;
#elif defined(XP_OS2)
if (DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE | OBJ_ANY) &&
DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE))
return false;
base = reinterpret_cast<Value *>(p);
end = base + CAPACITY_VALS;
#else
JS_ASSERT(CAPACITY_BYTES % getpagesize() == 0);
p = mmap(NULL, CAPACITY_BYTES, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
if (p == MAP_FAILED)
return false;
base = reinterpret_cast<Value *>(p);
end = base + CAPACITY_VALS;
#endif
return true;
}
StackSpace::~StackSpace()
{
if (!base)
return;
#ifdef XP_WIN
VirtualFree(base, (commitEnd - base) * sizeof(Value), MEM_DECOMMIT);
VirtualFree(base, 0, MEM_RELEASE);
#elif defined(XP_OS2)
DosFreeMem(base);
#else
#ifdef SOLARIS
munmap((caddr_t)base, CAPACITY_BYTES);
#else
munmap(base, CAPACITY_BYTES);
#endif
#endif
}
#ifdef XP_WIN
JS_FRIEND_API(bool)
StackSpace::bumpCommit(Value *from, ptrdiff_t nvals) const
{
JS_ASSERT(end - from >= nvals);
Value *newCommit = commitEnd;
Value *request = from + nvals;
/* Use a dumb loop; will probably execute once. */
JS_ASSERT((end - newCommit) % COMMIT_VALS == 0);
do {
newCommit += COMMIT_VALS;
JS_ASSERT((end - newCommit) >= 0);
} while (newCommit < request);
/* The cast is safe because CAPACITY_BYTES is small. */
int32 size = static_cast<int32>(newCommit - commitEnd) * sizeof(Value);
if (!VirtualAlloc(commitEnd, size, MEM_COMMIT, PAGE_READWRITE))
return false;
commitEnd = newCommit;
return true;
}
#endif
void
StackSpace::mark(JSTracer *trc)
{
/*
* The correctness/completeness of marking depends on the continuity
* invariants described by the StackSegment and StackSpace definitions.
*
* NB:
* Stack slots might be torn or uninitialized in the presence of method
* JIT'd code. Arguments are an exception and are always fully synced
* (so they can be read by functions).
*/
Value *end = firstUnused();
for (StackSegment *seg = currentSegment; seg; seg = seg->getPreviousInMemory()) {
STATIC_ASSERT(ubound(end) >= 0);
if (seg->inContext()) {
/* This may be the only pointer to the initialVarObj. */
if (seg->hasInitialVarObj())
MarkObject(trc, seg->getInitialVarObj(), "varobj");
/* Mark slots/args trailing off of the last stack frame. */
JSStackFrame *fp = seg->getCurrentFrame();
MarkStackRangeConservatively(trc, fp->slots(), end);
/* Mark stack frames and slots/args between stack frames. */
JSStackFrame *initial = seg->getInitialFrame();
for (JSStackFrame *f = fp; f != initial; f = f->prev()) {
js_TraceStackFrame(trc, f);
MarkStackRangeConservatively(trc, f->prev()->slots(), (Value *)f);
}
/* Mark initial stack frame and leading args. */
js_TraceStackFrame(trc, initial);
MarkStackRangeConservatively(trc, seg->valueRangeBegin(), (Value *)initial);
} else {
/* Mark slots/args trailing off segment. */
MarkValueRange(trc, seg->valueRangeBegin(), end, "stack");
}
end = (Value *)seg;
}
}
bool
StackSpace::pushSegmentForInvoke(JSContext *cx, uintN argc, InvokeArgsGuard *ag)
{
Value *start = firstUnused();
ptrdiff_t nvals = VALUES_PER_STACK_SEGMENT + 2 + argc;
if (!ensureSpace(cx, start, nvals))
return false;
StackSegment *seg = new(start) StackSegment;
seg->setPreviousInMemory(currentSegment);
currentSegment = seg;
ag->cx = cx;
ag->seg = seg;
ImplicitCast<CallArgs>(*ag) = CallArgsFromVp(argc, seg->valueRangeBegin());
/* Use invokeArgEnd to root [vp, vpend) until the frame is pushed. */
#ifdef DEBUG
ag->prevInvokeSegment = invokeSegment;
invokeSegment = seg;
ag->prevInvokeFrame = invokeFrame;
invokeFrame = NULL;
#endif
ag->prevInvokeArgEnd = invokeArgEnd;
invokeArgEnd = ag->argv() + ag->argc();
return true;
}
void
StackSpace::popSegmentForInvoke(const InvokeArgsGuard &ag)
{
JS_ASSERT(!currentSegment->inContext());
JS_ASSERT(ag.seg == currentSegment);
JS_ASSERT(invokeSegment == currentSegment);
JS_ASSERT(invokeArgEnd == ag.argv() + ag.argc());
currentSegment = currentSegment->getPreviousInMemory();
#ifdef DEBUG
invokeSegment = ag.prevInvokeSegment;
invokeFrame = ag.prevInvokeFrame;
#endif
invokeArgEnd = ag.prevInvokeArgEnd;
}
bool
StackSpace::getSegmentAndFrame(JSContext *cx, uintN vplen, uintN nslots,
FrameGuard *fg) const
{
Value *start = firstUnused();
uintN nvals = VALUES_PER_STACK_SEGMENT + vplen + VALUES_PER_STACK_FRAME + nslots;
if (!ensureSpace(cx, start, nvals))
return false;
fg->seg_ = new(start) StackSegment;
fg->vp_ = start + VALUES_PER_STACK_SEGMENT;
fg->fp_ = reinterpret_cast<JSStackFrame *>(fg->vp() + vplen);
return true;
}
void
StackSpace::pushSegmentAndFrame(JSContext *cx, JSFrameRegs *regs, FrameGuard *fg)
{
/* Caller should have already initialized regs. */
JS_ASSERT(regs->fp == fg->fp());
StackSegment *seg = fg->segment();
/* Register new segment/frame with the context. */
cx->pushSegmentAndFrame(seg, *regs);
/* Officially push the segment/frame on the stack. */
seg->setPreviousInMemory(currentSegment);
currentSegment = seg;
/* Mark as 'pushed' in the guard. */
fg->cx_ = cx;
}
void
StackSpace::popSegmentAndFrame(JSContext *cx)
{
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->hasActiveSegment());
PutActivationObjects(cx, cx->fp());
/* Officially pop the segment/frame from the stack. */
currentSegment = currentSegment->getPreviousInMemory();
/* Unregister pushed segment/frame from the context. */
cx->popSegmentAndFrame();
/*
* N.B. This StackSpace should be GC-able without any operations after
* cx->popSegmentAndFrame executes since it can trigger GC.
*/
}
FrameGuard::~FrameGuard()
{
if (!pushed())
return;
JS_ASSERT(cx_->activeSegment() == segment());
JS_ASSERT(cx_->maybefp() == fp());
cx_->stack().popSegmentAndFrame(cx_);
}
bool
StackSpace::getExecuteFrame(JSContext *cx, JSScript *script, ExecuteFrameGuard *fg) const
{
return getSegmentAndFrame(cx, 2, script->nslots, fg);
}
void
StackSpace::pushExecuteFrame(JSContext *cx, JSObject *initialVarObj, ExecuteFrameGuard *fg)
{
JSStackFrame *fp = fg->fp();
JSScript *script = fp->script();
fg->regs_.pc = script->code;
fg->regs_.fp = fp;
fg->regs_.sp = fp->base();
pushSegmentAndFrame(cx, &fg->regs_, fg);
fg->seg_->setInitialVarObj(initialVarObj);
}
bool
StackSpace::pushDummyFrame(JSContext *cx, JSObject &scopeChain, DummyFrameGuard *fg)
{
if (!getSegmentAndFrame(cx, 0 /*vplen*/, 0 /*nslots*/, fg))
return false;
fg->fp()->initDummyFrame(cx, scopeChain);
fg->regs_.fp = fg->fp();
fg->regs_.pc = NULL;
fg->regs_.sp = fg->fp()->slots();
pushSegmentAndFrame(cx, &fg->regs_, fg);
return true;
}
bool
StackSpace::getGeneratorFrame(JSContext *cx, uintN vplen, uintN nslots, GeneratorFrameGuard *fg)
{
return getSegmentAndFrame(cx, vplen, nslots, fg);
}
void
StackSpace::pushGeneratorFrame(JSContext *cx, JSFrameRegs *regs, GeneratorFrameGuard *fg)
{
JS_ASSERT(regs->fp == fg->fp());
JS_ASSERT(regs->fp->prev() == cx->maybefp());
pushSegmentAndFrame(cx, regs, fg);
}
bool
StackSpace::bumpCommitAndLimit(JSStackFrame *base, Value *sp, uintN nvals, Value **limit) const
{
JS_ASSERT(sp >= firstUnused());
JS_ASSERT(sp + nvals >= *limit);
#ifdef XP_WIN
if (commitEnd <= *limit) {
Value *quotaEnd = (Value *)base + STACK_QUOTA;
if (sp + nvals < quotaEnd) {
if (!ensureSpace(NULL, sp, nvals))
return false;
*limit = Min(quotaEnd, commitEnd);
return true;
}
}
#endif
return false;
}
void
FrameRegsIter::initSlow()
{
if (!curseg) {
curfp = NULL;
cursp = NULL;
curpc = NULL;
return;
}
JS_ASSERT(curseg->isSuspended());
curfp = curseg->getSuspendedFrame();
cursp = curseg->getSuspendedRegs()->sp;
curpc = curseg->getSuspendedRegs()->pc;
}
/*
* Using the invariant described in the js::StackSegment comment, we know that,
* when a pair of prev-linked stack frames are in the same segment, the
* first frame's address is the top of the prev-frame's stack, modulo missing
* arguments.
*/
void
FrameRegsIter::incSlow(JSStackFrame *fp, JSStackFrame *prev)
{
JS_ASSERT(prev);
JS_ASSERT(curpc == curfp->pc(cx, fp));
JS_ASSERT(fp == curseg->getInitialFrame());
/*
* If fp is in cs and the prev-frame is in csprev, it is not necessarily
* the case that |cs->getPreviousInContext == csprev| or that
* |csprev->getSuspendedFrame == prev| (because of indirect eval and
* JS_EvaluateInStackFrame). To compute prev's sp, we need to do a linear
* scan, keeping track of what is immediately after prev in memory.
*/
curseg = curseg->getPreviousInContext();
cursp = curseg->getSuspendedRegs()->sp;
JSStackFrame *f = curseg->getSuspendedFrame();
while (f != prev) {
if (f == curseg->getInitialFrame()) {
curseg = curseg->getPreviousInContext();
cursp = curseg->getSuspendedRegs()->sp;
f = curseg->getSuspendedFrame();
} else {
cursp = f->formalArgsEnd();
f = f->prev();
}
}
}
AllFramesIter::AllFramesIter(JSContext *cx)
: curcs(cx->stack().getCurrentSegment()),
curfp(curcs ? curcs->getCurrentFrame() : NULL)
{
}
AllFramesIter&
AllFramesIter::operator++()
{
JS_ASSERT(!done());
if (curfp == curcs->getInitialFrame()) {
curcs = curcs->getPreviousInMemory();
curfp = curcs ? curcs->getCurrentFrame() : NULL;
} else {
curfp = curfp->prev();
}
return *this;
}
namespace js {
ThreadData::ThreadData()
@ -649,16 +212,23 @@ js_InitContextThread(JSContext *cx)
return false;
JS_APPEND_LINK(&cx->threadLinks, &thread->contextList);
cx->thread = thread;
cx->setThread(thread);
return true;
}
void
JSContext::setThread(JSThread *thread)
{
thread_ = thread;
stack.threadReset();
}
void
js_ClearContextThread(JSContext *cx)
{
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread));
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread()));
JS_REMOVE_AND_INIT_LINK(&cx->threadLinks);
cx->thread = NULL;
cx->setThread(NULL);
}
#endif /* JS_THREADSAFE */
@ -711,7 +281,7 @@ js_PurgeThreads(JSContext *cx)
JSThread *thread = e.front().value;
if (JS_CLIST_IS_EMPTY(&thread->contextList)) {
JS_ASSERT(cx->thread != thread);
JS_ASSERT(cx->thread() != thread);
Foreground::delete_(thread);
e.removeFront();
} else {
@ -892,7 +462,7 @@ DumpEvalCacheMeter(JSContext *cx)
fprintf(fp, "eval cache meter (%p):\n",
#ifdef JS_THREADSAFE
(void *) cx->thread
(void *) cx->thread()
#else
(void *) cx->runtime
#endif
@ -980,8 +550,8 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
* optimized builds. We assume that the embedding knows that an OOM error
* cannot happen in JS_SetContextThread.
*/
JS_ASSERT(cx->thread && CURRENT_THREAD_IS_ME(cx->thread));
if (!cx->thread)
JS_ASSERT(cx->thread() && CURRENT_THREAD_IS_ME(cx->thread()));
if (!cx->thread())
JS_SetContextThread(cx);
/*
@ -990,7 +560,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
* on this cx contributes to cx->thread->data.requestDepth and there is no
* JS_SuspendRequest calls that set aside the counter.
*/
JS_ASSERT(cx->outstandingRequests <= cx->thread->data.requestDepth);
JS_ASSERT(cx->outstandingRequests <= cx->thread()->data.requestDepth);
#endif
if (mode != JSDCM_NEW_FAILED) {
@ -1012,7 +582,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
* Typically we are called outside a request, so ensure that the GC is not
* running before removing the context from rt->contextList, see bug 477021.
*/
if (cx->thread->data.requestDepth == 0)
if (cx->thread()->data.requestDepth == 0)
js_WaitForGC(rt);
#endif
JS_REMOVE_LINK(&cx->link);
@ -1043,7 +613,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
* force or maybe run the GC, but by that point, rt->state will
* not be JSRTS_UP, and that GC attempt will return early.
*/
if (cx->thread->data.requestDepth == 0)
if (cx->thread()->data.requestDepth == 0)
JS_BeginRequest(cx);
#endif
@ -1089,7 +659,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
}
#ifdef JS_THREADSAFE
#ifdef DEBUG
JSThread *t = cx->thread;
JSThread *t = cx->thread();
#endif
js_ClearContextThread(cx);
JS_ASSERT_IF(JS_CLIST_IS_EMPTY(&t->contextList), !t->data.requestDepth);
@ -1123,7 +693,7 @@ js_NextActiveContext(JSRuntime *rt, JSContext *cx)
JSContext *iter = cx;
#ifdef JS_THREADSAFE
while ((cx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) {
if (cx->outstandingRequests && cx->thread->data.requestDepth)
if (cx->outstandingRequests && cx->thread()->data.requestDepth)
break;
}
return cx;
@ -1191,7 +761,7 @@ PopulateReportBlame(JSContext *cx, JSErrorReport *report)
* Walk stack until we find a frame that is associated with some script
* rather than a native frame.
*/
for (JSStackFrame *fp = js_GetTopStackFrame(cx); fp; fp = fp->prev()) {
for (StackFrame *fp = js_GetTopStackFrame(cx); fp; fp = fp->prev()) {
if (fp->pc(cx)) {
report->filename = fp->script()->filename;
report->lineno = js_FramePCToLineNumber(cx, fp);
@ -1253,22 +823,24 @@ js_ReportOutOfMemory(JSContext *cx)
}
void
js_ReportOutOfScriptQuota(JSContext *cx)
js_ReportOutOfScriptQuota(JSContext *maybecx)
{
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
JSMSG_SCRIPT_STACK_QUOTA);
}
JS_FRIEND_API(void)
js_ReportOverRecursed(JSContext *cx)
{
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_OVER_RECURSED);
if (maybecx)
JS_ReportErrorNumber(maybecx, js_GetErrorMessage, NULL, JSMSG_SCRIPT_STACK_QUOTA);
}
void
js_ReportAllocationOverflow(JSContext *cx)
js_ReportOverRecursed(JSContext *maybecx)
{
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_ALLOC_OVERFLOW);
if (maybecx)
JS_ReportErrorNumber(maybecx, js_GetErrorMessage, NULL, JSMSG_OVER_RECURSED);
}
void
js_ReportAllocationOverflow(JSContext *maybecx)
{
if (maybecx)
JS_ReportErrorNumber(maybecx, js_GetErrorMessage, NULL, JSMSG_ALLOC_OVERFLOW);
}
/*
@ -1286,7 +858,7 @@ checkReportFlags(JSContext *cx, uintN *flags)
* We assume that if the top frame is a native, then it is strict if
* the nearest scripted frame is strict, see bug 536306.
*/
JSStackFrame *fp = js_GetScriptedCaller(cx, NULL);
StackFrame *fp = js_GetScriptedCaller(cx, NULL);
if (fp && fp->script()->strictModeCode)
*flags &= ~JSREPORT_WARNING;
else if (cx->hasStrictOption())
@ -1781,7 +1353,7 @@ TriggerOperationCallback(JSContext *cx)
*/
ThreadData *td;
#ifdef JS_THREADSAFE
JSThread *thread = cx->thread;
JSThread *thread = cx->thread();
if (!thread)
return;
td = &thread->data;
@ -1800,8 +1372,8 @@ TriggerAllOperationCallbacks(JSRuntime *rt)
} /* namespace js */
JSStackFrame *
js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
StackFrame *
js_GetScriptedCaller(JSContext *cx, StackFrame *fp)
{
if (!fp)
fp = js_GetTopStackFrame(cx);
@ -1824,7 +1396,7 @@ js_GetCurrentBytecodePC(JSContext* cx)
#endif
{
JS_ASSERT_NOT_ON_TRACE(cx); /* for static analysis */
pc = cx->regs ? cx->regs->pc : NULL;
pc = cx->running() ? cx->regs().pc : NULL;
if (!pc)
return NULL;
imacpc = cx->fp()->maybeImacropc();
@ -1891,14 +1463,14 @@ JSContext::JSContext(JSRuntime *rt)
: hasVersionOverride(false),
runtime(rt),
compartment(NULL),
regs(NULL),
stack(thisDuringConstruction()),
busyArrays()
{}
JSContext::~JSContext()
{
#ifdef JS_THREADSAFE
JS_ASSERT(!thread);
JS_ASSERT(!thread_);
#endif
/* Free the stuff hanging off of cx. */
@ -1924,7 +1496,7 @@ void
JSContext::resetCompartment()
{
JSObject *scopeobj;
if (hasfp()) {
if (stack.running()) {
scopeobj = &fp()->scopeChain();
} else {
scopeobj = globalObject;
@ -1969,71 +1541,10 @@ JSContext::wrapPendingException()
setPendingException(v);
}
void
JSContext::pushSegmentAndFrame(js::StackSegment *newseg, JSFrameRegs &newregs)
{
JS_ASSERT(regs != &newregs);
if (hasActiveSegment())
currentSegment->suspend(regs);
newseg->setPreviousInContext(currentSegment);
currentSegment = newseg;
setCurrentRegs(&newregs);
newseg->joinContext(this, newregs.fp);
}
void
JSContext::popSegmentAndFrame()
{
JS_ASSERT_IF(regs->fp->hasCallObj(), !regs->fp->callObj().getPrivate());
JS_ASSERT_IF(regs->fp->hasArgsObj(), !regs->fp->argsObj().getPrivate());
JS_ASSERT(currentSegment->maybeContext() == this);
JS_ASSERT(currentSegment->getInitialFrame() == regs->fp);
/*
* NB: This function calls resetCompartment, which may GC, so the stack needs
* to be in a GC-able state by that point.
*/
currentSegment->leaveContext();
currentSegment = currentSegment->getPreviousInContext();
if (currentSegment) {
if (currentSegment->isSaved()) {
setCurrentRegs(NULL);
resetCompartment();
} else {
setCurrentRegs(currentSegment->getSuspendedRegs());
currentSegment->resume();
}
} else {
JS_ASSERT(regs->fp->prev() == NULL);
setCurrentRegs(NULL);
resetCompartment();
}
maybeMigrateVersionOverride();
}
void
JSContext::saveActiveSegment()
{
JS_ASSERT(hasActiveSegment());
currentSegment->save(regs);
setCurrentRegs(NULL);
resetCompartment();
}
void
JSContext::restoreSegment()
{
js::StackSegment *ccs = currentSegment;
setCurrentRegs(ccs->getSuspendedRegs());
ccs->restore();
resetCompartment();
}
JSGenerator *
JSContext::generatorFor(JSStackFrame *fp) const
JSContext::generatorFor(StackFrame *fp) const
{
JS_ASSERT(stack().contains(fp) && fp->isGeneratorFrame());
JS_ASSERT(stack.contains(fp) && fp->isGeneratorFrame());
JS_ASSERT(!fp->isFloatingGenerator());
JS_ASSERT(!genStack.empty());
@ -2049,17 +1560,6 @@ JSContext::generatorFor(JSStackFrame *fp) const
return NULL;
}
StackSegment *
StackSpace::containingSegment(const JSStackFrame *target)
{
for (StackSegment *seg = currentSegment; seg; seg = seg->getPreviousInMemory()) {
if (seg->contains(target))
return seg;
}
JS_NOT_REACHED("frame not in stack space");
return NULL;
}
JS_FRIEND_API(void)
JSRuntime::onTooMuchMalloc()
{

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -43,7 +43,6 @@
#include "jscntxt.h"
#include "jscompartment.h"
#include "jsparse.h"
#include "jsstaticcheck.h"
#include "jsxml.h"
#include "jsregexp.h"
@ -64,7 +63,7 @@ GetGlobalForScopeChain(JSContext *cx)
*/
VOUCH_DOES_NOT_REQUIRE_STACK();
if (cx->hasfp())
if (cx->running())
return cx->fp()->scopeChain().getGlobal();
JSObject *scope = cx->globalObject;
@ -76,425 +75,6 @@ GetGlobalForScopeChain(JSContext *cx)
return scope->asGlobal();
}
}
#ifdef JS_METHODJIT
inline js::mjit::JaegerCompartment *JSContext::jaegerCompartment()
{
return compartment->jaegerCompartment;
}
#endif
inline bool
JSContext::ensureGeneratorStackSpace()
{
bool ok = genStack.reserve(genStack.length() + 1);
if (!ok)
js_ReportOutOfMemory(this);
return ok;
}
inline js::RegExpStatics *
JSContext::regExpStatics()
{
return js::RegExpStatics::extractFrom(js::GetGlobalForScopeChain(this));
}
namespace js {
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSFrameRegs *
StackSegment::getCurrentRegs() const
{
JS_ASSERT(inContext());
return isActive() ? cx->regs : getSuspendedRegs();
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
StackSegment::getCurrentFrame() const
{
return getCurrentRegs()->fp;
}
JS_REQUIRES_STACK inline Value *
StackSpace::firstUnused() const
{
StackSegment *seg = currentSegment;
if (!seg) {
JS_ASSERT(invokeArgEnd == NULL);
return base;
}
if (seg->inContext()) {
Value *sp = seg->getCurrentRegs()->sp;
if (invokeArgEnd > sp) {
JS_ASSERT(invokeSegment == currentSegment);
JS_ASSERT_IF(seg->maybeContext()->hasfp(),
invokeFrame == seg->maybeContext()->fp());
return invokeArgEnd;
}
return sp;
}
JS_ASSERT(invokeArgEnd);
JS_ASSERT(invokeSegment == currentSegment);
return invokeArgEnd;
}
/* Inline so we don't need the friend API. */
JS_ALWAYS_INLINE bool
StackSpace::isCurrentAndActive(JSContext *cx) const
{
#ifdef DEBUG
JS_ASSERT_IF(cx->getCurrentSegment(),
cx->getCurrentSegment()->maybeContext() == cx);
cx->assertSegmentsInSync();
#endif
return currentSegment &&
currentSegment->isActive() &&
currentSegment == cx->getCurrentSegment();
}
STATIC_POSTCONDITION(!return || ubound(from) >= nvals)
JS_ALWAYS_INLINE bool
StackSpace::ensureSpace(JSContext *maybecx, Value *from, ptrdiff_t nvals) const
{
JS_ASSERT(from >= firstUnused());
#ifdef XP_WIN
JS_ASSERT(from <= commitEnd);
if (commitEnd - from >= nvals)
goto success;
if (end - from < nvals) {
if (maybecx)
js_ReportOutOfScriptQuota(maybecx);
return false;
}
if (!bumpCommit(from, nvals)) {
if (maybecx)
js_ReportOutOfScriptQuota(maybecx);
return false;
}
goto success;
#else
if (end - from < nvals) {
if (maybecx)
js_ReportOutOfScriptQuota(maybecx);
return false;
}
goto success;
#endif
success:
#ifdef DEBUG
memset(from, 0xde, nvals * sizeof(js::Value));
#endif
return true;
}
JS_ALWAYS_INLINE bool
StackSpace::ensureEnoughSpaceToEnterTrace()
{
#ifdef XP_WIN
return ensureSpace(NULL, firstUnused(), MAX_TRACE_SPACE_VALS);
#endif
return end - firstUnused() > MAX_TRACE_SPACE_VALS;
}
JS_ALWAYS_INLINE bool
StackSpace::EnsureSpaceCheck::operator()(const StackSpace &stack, JSContext *cx,
Value *from, uintN nvals)
{
return stack.ensureSpace(cx, from, nvals);
}
JS_ALWAYS_INLINE bool
StackSpace::LimitCheck::operator()(const StackSpace &stack, JSContext *cx,
Value *from, uintN nvals)
{
JS_ASSERT(from == stack.firstUnused());
JS_ASSERT(from < *limit);
if (*limit - from >= ptrdiff_t(nvals))
return true;
if (stack.bumpCommitAndLimit(base, from, nvals, limit))
return true;
js_ReportOverRecursed(cx);
return false;
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
StackSpace::pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard *ag)
{
if (JS_UNLIKELY(!isCurrentAndActive(cx)))
return pushSegmentForInvoke(cx, argc, ag);
Value *sp = cx->regs->sp;
Value *start = invokeArgEnd > sp ? invokeArgEnd : sp;
JS_ASSERT(start == firstUnused());
uintN nvals = 2 + argc;
if (!ensureSpace(cx, start, nvals))
return false;
Value *vp = start;
Value *vpend = vp + nvals;
/* Don't need to MakeRangeGCSafe: the VM stack is conservatively marked. */
/* Use invokeArgEnd to root [vp, vpend) until the frame is pushed. */
ag->prevInvokeArgEnd = invokeArgEnd;
invokeArgEnd = vpend;
#ifdef DEBUG
ag->prevInvokeSegment = invokeSegment;
invokeSegment = currentSegment;
ag->prevInvokeFrame = invokeFrame;
invokeFrame = cx->maybefp();
#endif
ag->cx = cx;
ImplicitCast<CallArgs>(*ag) = CallArgsFromVp(argc, vp);
return true;
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
StackSpace::popInvokeArgs(const InvokeArgsGuard &ag)
{
if (JS_UNLIKELY(ag.seg != NULL)) {
popSegmentForInvoke(ag);
return;
}
JS_ASSERT(isCurrentAndActive(ag.cx));
JS_ASSERT(invokeSegment == currentSegment);
JS_ASSERT(invokeFrame == ag.cx->maybefp());
JS_ASSERT(invokeArgEnd == ag.argv() + ag.argc());
#ifdef DEBUG
invokeSegment = ag.prevInvokeSegment;
invokeFrame = ag.prevInvokeFrame;
#endif
invokeArgEnd = ag.prevInvokeArgEnd;
}
JS_ALWAYS_INLINE
InvokeArgsGuard::~InvokeArgsGuard()
{
if (JS_UNLIKELY(!pushed()))
return;
cx->stack().popInvokeArgs(*this);
}
template <class Check>
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
StackSpace::getCallFrame(JSContext *cx, Value *firstUnused, uintN nactual,
JSFunction *fun, JSScript *script, uint32 *flags,
Check check) const
{
JS_ASSERT(fun->script() == script);
/* Include an extra sizeof(JSStackFrame) for the method-jit. */
uintN nvals = VALUES_PER_STACK_FRAME + script->nslots;
uintN nformal = fun->nargs;
/* Maintain layout invariant: &formalArgs[0] == ((Value *)fp) - nformal. */
if (nactual == nformal) {
if (JS_UNLIKELY(!check(*this, cx, firstUnused, nvals)))
return NULL;
return reinterpret_cast<JSStackFrame *>(firstUnused);
}
if (nactual < nformal) {
*flags |= JSFRAME_UNDERFLOW_ARGS;
uintN nmissing = nformal - nactual;
if (JS_UNLIKELY(!check(*this, cx, firstUnused, nmissing + nvals)))
return NULL;
SetValueRangeToUndefined(firstUnused, nmissing);
return reinterpret_cast<JSStackFrame *>(firstUnused + nmissing);
}
*flags |= JSFRAME_OVERFLOW_ARGS;
uintN ncopy = 2 + nformal;
if (JS_UNLIKELY(!check(*this, cx, firstUnused, ncopy + nvals)))
return NULL;
Value *dst = firstUnused;
Value *src = firstUnused - (2 + nactual);
PodCopy(dst, src, ncopy);
Debug_SetValueRangeToCrashOnTouch(src, ncopy);
return reinterpret_cast<JSStackFrame *>(firstUnused + ncopy);
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
StackSpace::getInvokeFrame(JSContext *cx, const CallArgs &args,
JSFunction *fun, JSScript *script,
uint32 *flags, InvokeFrameGuard *fg) const
{
JS_ASSERT(firstUnused() == args.argv() + args.argc());
Value *firstUnused = args.argv() + args.argc();
fg->regs_.fp = getCallFrame(cx, firstUnused, args.argc(), fun, script, flags,
EnsureSpaceCheck());
fg->regs_.sp = fg->regs_.fp->slots() + script->nfixed;
fg->regs_.pc = script->code;
return fg->regs_.fp != NULL;
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
StackSpace::pushInvokeFrame(JSContext *cx, const CallArgs &args,
InvokeFrameGuard *fg)
{
JS_ASSERT(firstUnused() == args.argv() + args.argc());
if (JS_UNLIKELY(!currentSegment->inContext())) {
cx->pushSegmentAndFrame(currentSegment, fg->regs_);
} else {
fg->prevRegs_ = cx->regs;
cx->setCurrentRegs(&fg->regs_);
}
fg->cx_ = cx;
JS_ASSERT(isCurrentAndActive(cx));
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
StackSpace::popInvokeFrame(const InvokeFrameGuard &fg)
{
JSContext *cx = fg.cx_;
JSStackFrame *fp = fg.regs_.fp;
PutActivationObjects(cx, fp);
JS_ASSERT(isCurrentAndActive(cx));
if (JS_UNLIKELY(currentSegment->getInitialFrame() == fp)) {
cx->popSegmentAndFrame();
} else {
JS_ASSERT(&fg.regs_ == cx->regs);
JS_ASSERT(fp->prev_ == fg.prevRegs_->fp);
JS_ASSERT(fp->prevpc() == fg.prevRegs_->pc);
cx->setCurrentRegs(fg.prevRegs_);
}
}
JS_ALWAYS_INLINE void
InvokeFrameGuard::pop()
{
JS_ASSERT(pushed());
cx_->stack().popInvokeFrame(*this);
cx_ = NULL;
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
StackSpace::getInlineFrame(JSContext *cx, Value *sp, uintN nactual,
JSFunction *fun, JSScript *script, uint32 *flags) const
{
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->hasActiveSegment());
JS_ASSERT(cx->regs->sp == sp);
return getCallFrame(cx, sp, nactual, fun, script, flags, EnsureSpaceCheck());
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
StackSpace::getInlineFrameWithinLimit(JSContext *cx, Value *sp, uintN nactual,
JSFunction *fun, JSScript *script, uint32 *flags,
JSStackFrame *base, Value **limit) const
{
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->hasActiveSegment());
JS_ASSERT(cx->regs->sp == sp);
return getCallFrame(cx, sp, nactual, fun, script, flags, LimitCheck(base, limit));
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
StackSpace::pushInlineFrame(JSContext *cx, JSScript *script, JSStackFrame *fp,
JSFrameRegs *regs)
{
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->regs == regs && script == fp->script());
regs->fp = fp;
regs->pc = script->code;
regs->sp = fp->slots() + script->nfixed;
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
StackSpace::popInlineFrame(JSContext *cx, JSStackFrame *prev, Value *newsp)
{
JSFrameRegs *regs = cx->regs;
JSStackFrame *fp = regs->fp;
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->hasActiveSegment());
JS_ASSERT(fp->prev_ == prev);
JS_ASSERT(!fp->hasImacropc());
JS_ASSERT(prev->base() <= newsp && newsp <= fp->formalArgsEnd());
PutActivationObjects(cx, fp);
regs->pc = prev->pc(cx, fp);
regs->fp = prev;
regs->sp = newsp;
}
JS_ALWAYS_INLINE Value *
StackSpace::getStackLimit(JSContext *cx)
{
Value *sp = cx->regs->sp;
JS_ASSERT(sp == firstUnused());
Value *limit = sp + STACK_QUOTA;
/*
* Try to reserve the whole STACK_QUOTA. If that fails, though, just
* reserve the minimum required space: enough for the nslots + an
* additional stack frame.
*/
#ifdef XP_WIN
if (JS_LIKELY(limit <= commitEnd))
return limit;
if (ensureSpace(NULL /* don't report error */, sp, STACK_QUOTA))
return limit;
uintN minimum = cx->fp()->numSlots() + VALUES_PER_STACK_FRAME;
return ensureSpace(cx, sp, minimum) ? sp + minimum : NULL;
#else
if (JS_LIKELY(limit <= end))
return limit;
uintN minimum = cx->fp()->numSlots() + VALUES_PER_STACK_FRAME;
return ensureSpace(cx, sp, minimum) ? sp + minimum : NULL;
#endif
}
JS_REQUIRES_STACK inline
FrameRegsIter::FrameRegsIter(JSContext *cx)
: cx(cx)
{
curseg = cx->getCurrentSegment();
if (JS_UNLIKELY(!curseg || !curseg->isActive())) {
initSlow();
return;
}
JS_ASSERT(cx->regs->fp);
curfp = cx->regs->fp;
cursp = cx->regs->sp;
curpc = cx->regs->pc;
return;
}
inline FrameRegsIter &
FrameRegsIter::operator++()
{
JSStackFrame *fp = curfp;
JSStackFrame *prev = curfp = curfp->prev();
if (!prev)
return *this;
curpc = curfp->pc(cx, fp);
if (JS_UNLIKELY(fp == curseg->getInitialFrame())) {
incSlow(fp, prev);
return *this;
}
cursp = fp->formalArgsEnd();
return *this;
}
inline GSNCache *
GetGSNCache(JSContext *cx)
{
@ -528,7 +108,7 @@ class CompartmentChecker
public:
explicit CompartmentChecker(JSContext *cx) : context(cx), compartment(cx->compartment) {
check(cx->hasfp() ? JS_GetGlobalForScopeChain(cx) : cx->globalObject);
check(cx->running() ? JS_GetGlobalForScopeChain(cx) : cx->globalObject);
VOUCH_DOES_NOT_REQUIRE_STACK();
}
@ -613,7 +193,7 @@ class CompartmentChecker
}
}
void check(JSStackFrame *fp) {
void check(StackFrame *fp) {
check(&fp->scopeChain());
}
};
@ -811,6 +391,28 @@ CanLeaveTrace(JSContext *cx)
} /* namespace js */
#ifdef JS_METHODJIT
inline js::mjit::JaegerCompartment *JSContext::jaegerCompartment()
{
return compartment->jaegerCompartment;
}
#endif
inline bool
JSContext::ensureGeneratorStackSpace()
{
bool ok = genStack.reserve(genStack.length() + 1);
if (!ok)
js_ReportOutOfMemory(this);
return ok;
}
inline js::RegExpStatics *
JSContext::regExpStatics()
{
return js::RegExpStatics::extractFrom(js::GetGlobalForScopeChain(this));
}
inline void
JSContext::setPendingException(js::Value v) {
this->throwing = true;

Просмотреть файл

@ -222,7 +222,7 @@ JSCompartment::wrap(JSContext *cx, Value *vp)
* This loses us some transparency, and is generally very cheesy.
*/
JSObject *global;
if (cx->hasfp()) {
if (cx->running()) {
global = cx->fp()->scopeChain().getGlobal();
} else {
global = cx->globalObject;

Просмотреть файл

@ -136,6 +136,10 @@ struct TracerState
*/
struct TraceNativeStorage
{
/* Max number of stack slots/frame that may need to be restored in LeaveTree. */
static const size_t MAX_NATIVE_STACK_SLOTS = 4096;
static const size_t MAX_CALL_STACK_ENTRIES = 500;
double stack_global_buf[MAX_NATIVE_STACK_SLOTS + GLOBAL_SLOTS_BUFFER_SIZE];
FrameInfo *callstack_buf[MAX_CALL_STACK_ENTRIES];

Просмотреть файл

@ -73,6 +73,8 @@
#include "jsobjinlines.h"
#include "vm/Stack-inl.h"
using namespace js;
/*
@ -2106,13 +2108,13 @@ date_toJSON(JSContext *cx, uintN argc, Value *vp)
/* Step 6. */
LeaveTrace(cx);
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, 0, &args))
if (!cx->stack.pushInvokeArgs(cx, 0, &args))
return false;
args.calleev() = toISO;
args.thisv().setObject(*obj);
if (!Invoke(cx, args, 0))
if (!Invoke(cx, args))
return false;
*vp = args.rval();
return true;

Просмотреть файл

@ -69,11 +69,13 @@
#include "jsatominlines.h"
#include "jsdbgapiinlines.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "jsinterpinlines.h"
#include "jsscopeinlines.h"
#include "jsscriptinlines.h"
#include "vm/Stack-inl.h"
#include "jsautooplen.h"
#include "methodjit/MethodJIT.h"
@ -116,21 +118,21 @@ JS_SetRuntimeDebugMode(JSRuntime *rt, JSBool debug)
namespace js {
void
ScriptDebugPrologue(JSContext *cx, JSStackFrame *fp)
ScriptDebugPrologue(JSContext *cx, StackFrame *fp)
{
if (fp->isFramePushedByExecute()) {
if (JSInterpreterHook hook = cx->debugHooks->executeHook)
fp->setHookData(hook(cx, fp, true, 0, cx->debugHooks->executeHookData));
fp->setHookData(hook(cx, Jsvalify(fp), true, 0, cx->debugHooks->executeHookData));
} else {
if (JSInterpreterHook hook = cx->debugHooks->callHook)
fp->setHookData(hook(cx, fp, true, 0, cx->debugHooks->callHookData));
fp->setHookData(hook(cx, Jsvalify(fp), true, 0, cx->debugHooks->callHookData));
}
Probes::enterJSFun(cx, fp->maybeFun(), fp->script());
}
bool
ScriptDebugEpilogue(JSContext *cx, JSStackFrame *fp, bool okArg)
ScriptDebugEpilogue(JSContext *cx, StackFrame *fp, bool okArg)
{
JSBool ok = okArg;
@ -139,10 +141,10 @@ ScriptDebugEpilogue(JSContext *cx, JSStackFrame *fp, bool okArg)
if (void *hookData = fp->maybeHookData()) {
if (fp->isFramePushedByExecute()) {
if (JSInterpreterHook hook = cx->debugHooks->executeHook)
hook(cx, fp, false, &ok, hookData);
hook(cx, Jsvalify(fp), false, &ok, hookData);
} else {
if (JSInterpreterHook hook = cx->debugHooks->callHook)
hook(cx, fp, false, &ok, hookData);
hook(cx, Jsvalify(fp), false, &ok, hookData);
}
}
@ -1361,33 +1363,35 @@ JS_GetScriptPrincipals(JSContext *cx, JSScript *script)
JS_PUBLIC_API(JSStackFrame *)
JS_FrameIterator(JSContext *cx, JSStackFrame **iteratorp)
{
*iteratorp = (*iteratorp == NULL) ? js_GetTopStackFrame(cx) : (*iteratorp)->prev();
StackFrame *fp = Valueify(*iteratorp);
*iteratorp = Jsvalify((fp == NULL) ? js_GetTopStackFrame(cx) : fp->prev());
return *iteratorp;
}
JS_PUBLIC_API(JSScript *)
JS_GetFrameScript(JSContext *cx, JSStackFrame *fp)
{
return fp->maybeScript();
return Valueify(fp)->maybeScript();
}
JS_PUBLIC_API(jsbytecode *)
JS_GetFramePC(JSContext *cx, JSStackFrame *fp)
{
return fp->pc(cx);
return Valueify(fp)->pc(cx);
}
JS_PUBLIC_API(JSStackFrame *)
JS_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
{
return js_GetScriptedCaller(cx, fp);
return Jsvalify(js_GetScriptedCaller(cx, Valueify(fp)));
}
JS_PUBLIC_API(void *)
JS_GetFrameAnnotation(JSContext *cx, JSStackFrame *fp)
JS_GetFrameAnnotation(JSContext *cx, JSStackFrame *fpArg)
{
StackFrame *fp = Valueify(fpArg);
if (fp->annotation() && fp->isScriptFrame()) {
JSPrincipals *principals = fp->principals(cx);
JSPrincipals *principals = fp->scopeChain().principals(cx);
if (principals && principals->globalPrivilegesEnabled(cx, principals)) {
/*
@ -1404,7 +1408,7 @@ JS_GetFrameAnnotation(JSContext *cx, JSStackFrame *fp)
JS_PUBLIC_API(void)
JS_SetFrameAnnotation(JSContext *cx, JSStackFrame *fp, void *annotation)
{
fp->setAnnotation(annotation);
Valueify(fp)->setAnnotation(annotation);
}
JS_PUBLIC_API(void *)
@ -1412,7 +1416,7 @@ JS_GetFramePrincipalArray(JSContext *cx, JSStackFrame *fp)
{
JSPrincipals *principals;
principals = fp->principals(cx);
principals = Valueify(fp)->scopeChain().principals(cx);
if (!principals)
return NULL;
return principals->getPrincipalArray(cx, principals);
@ -1421,34 +1425,36 @@ JS_GetFramePrincipalArray(JSContext *cx, JSStackFrame *fp)
JS_PUBLIC_API(JSBool)
JS_IsScriptFrame(JSContext *cx, JSStackFrame *fp)
{
return !fp->isDummyFrame();
return !Valueify(fp)->isDummyFrame();
}
/* this is deprecated, use JS_GetFrameScopeChain instead */
JS_PUBLIC_API(JSObject *)
JS_GetFrameObject(JSContext *cx, JSStackFrame *fp)
{
return &fp->scopeChain();
return &Valueify(fp)->scopeChain();
}
JS_PUBLIC_API(JSObject *)
JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fp)
JS_GetFrameScopeChain(JSContext *cx, JSStackFrame *fpArg)
{
JS_ASSERT(cx->stack().contains(fp));
StackFrame *fp = Valueify(fpArg);
JS_ASSERT(cx->stack.contains(fp));
js::AutoCompartment ac(cx, &fp->scopeChain());
if (!ac.enter())
return NULL;
/* Force creation of argument and call objects if not yet created */
(void) JS_GetFrameCallObject(cx, fp);
(void) JS_GetFrameCallObject(cx, Jsvalify(fp));
return GetScopeChain(cx, fp);
}
JS_PUBLIC_API(JSObject *)
JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fp)
JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fpArg)
{
JS_ASSERT(cx->stack().contains(fp));
StackFrame *fp = Valueify(fpArg);
JS_ASSERT(cx->stack.contains(fp));
if (!fp->isFunctionFrame())
return NULL;
@ -1467,8 +1473,9 @@ JS_GetFrameCallObject(JSContext *cx, JSStackFrame *fp)
}
JS_PUBLIC_API(JSBool)
JS_GetFrameThis(JSContext *cx, JSStackFrame *fp, jsval *thisv)
JS_GetFrameThis(JSContext *cx, JSStackFrame *fpArg, jsval *thisv)
{
StackFrame *fp = Valueify(fpArg);
if (fp->isDummyFrame())
return false;
@ -1485,12 +1492,13 @@ JS_GetFrameThis(JSContext *cx, JSStackFrame *fp, jsval *thisv)
JS_PUBLIC_API(JSFunction *)
JS_GetFrameFunction(JSContext *cx, JSStackFrame *fp)
{
return fp->maybeFun();
return Valueify(fp)->maybeFun();
}
JS_PUBLIC_API(JSObject *)
JS_GetFrameFunctionObject(JSContext *cx, JSStackFrame *fp)
JS_GetFrameFunctionObject(JSContext *cx, JSStackFrame *fpArg)
{
StackFrame *fp = Valueify(fpArg);
if (!fp->isFunctionFrame())
return NULL;
@ -1502,13 +1510,13 @@ JS_GetFrameFunctionObject(JSContext *cx, JSStackFrame *fp)
JS_PUBLIC_API(JSBool)
JS_IsConstructorFrame(JSContext *cx, JSStackFrame *fp)
{
return fp->isConstructing();
return Valueify(fp)->isConstructing();
}
JS_PUBLIC_API(JSObject *)
JS_GetFrameCalleeObject(JSContext *cx, JSStackFrame *fp)
{
return fp->maybeCallee();
return Valueify(fp)->maybeCallee();
}
JS_PUBLIC_API(JSBool)
@ -1516,7 +1524,7 @@ JS_GetValidFrameCalleeObject(JSContext *cx, JSStackFrame *fp, jsval *vp)
{
Value v;
if (!fp->getValidCalleeObject(cx, &v))
if (!Valueify(fp)->getValidCalleeObject(cx, &v))
return false;
*vp = Jsvalify(v);
return true;
@ -1525,18 +1533,19 @@ JS_GetValidFrameCalleeObject(JSContext *cx, JSStackFrame *fp, jsval *vp)
JS_PUBLIC_API(JSBool)
JS_IsDebuggerFrame(JSContext *cx, JSStackFrame *fp)
{
return fp->isDebuggerFrame();
return Valueify(fp)->isDebuggerFrame();
}
JS_PUBLIC_API(jsval)
JS_GetFrameReturnValue(JSContext *cx, JSStackFrame *fp)
{
return Jsvalify(fp->returnValue());
return Jsvalify(Valueify(fp)->returnValue());
}
JS_PUBLIC_API(void)
JS_SetFrameReturnValue(JSContext *cx, JSStackFrame *fp, jsval rval)
JS_SetFrameReturnValue(JSContext *cx, JSStackFrame *fpArg, jsval rval)
{
StackFrame *fp = Valueify(fpArg);
#ifdef JS_METHODJIT
JS_ASSERT_IF(fp->isScriptFrame(), fp->script()->debugMode);
#endif
@ -1590,7 +1599,7 @@ JS_SetDestroyScriptHook(JSRuntime *rt, JSDestroyScriptHook hook,
/***************************************************************************/
JS_PUBLIC_API(JSBool)
JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp,
JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fpArg,
const jschar *chars, uintN length,
const char *filename, uintN lineno,
jsval *rval)
@ -1600,7 +1609,7 @@ JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp,
if (!CheckDebugMode(cx))
return false;
JSObject *scobj = JS_GetFrameScopeChain(cx, fp);
JSObject *scobj = JS_GetFrameScopeChain(cx, fpArg);
if (!scobj)
return false;
@ -1614,7 +1623,8 @@ JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp,
* we use a static level that will cause us not to attempt to optimize
* variable references made by this frame.
*/
JSScript *script = Compiler::compileScript(cx, scobj, fp, fp->principals(cx),
StackFrame *fp = Valueify(fpArg);
JSScript *script = Compiler::compileScript(cx, scobj, fp, fp->scopeChain().principals(cx),
TCF_COMPILE_N_GO, chars, length,
filename, lineno, cx->findVersion(),
NULL, UpvarCookie::UPVAR_LEVEL_LIMIT);
@ -1622,7 +1632,8 @@ JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp,
if (!script)
return false;
bool ok = Execute(cx, *scobj, script, fp, JSFRAME_DEBUGGER | JSFRAME_EVAL, Valueify(rval));
uintN evalFlags = StackFrame::DEBUGGER | StackFrame::EVAL;
bool ok = Execute(cx, *scobj, script, fp, evalFlags, Valueify(rval));
js_DestroyScript(cx, script);
return ok;
@ -2423,7 +2434,7 @@ jstv_Lineno(JSContext *cx, JSStackFrame *fp)
JS_FRIEND_API(void)
js::StoreTraceVisState(JSContext *cx, TraceVisState s, TraceVisExitReason r)
{
JSStackFrame *fp = cx->fp();
StackFrame *fp = cx->fp();
char *script_file = jstv_Filename(fp);
JSHashNumber hash = JS_HashString(script_file);

Просмотреть файл

@ -2211,7 +2211,7 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
}
if (cookie.isFree()) {
JSStackFrame *caller = cg->parser->callerFrame;
StackFrame *caller = cg->parser->callerFrame;
if (caller) {
JS_ASSERT(cg->compileAndGo());

Просмотреть файл

@ -64,10 +64,10 @@
#include "jsstaticcheck.h"
#include "jswrapper.h"
#include "jscntxtinlines.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "vm/Stack-inl.h"
using namespace js;
using namespace js::gc;
@ -267,7 +267,7 @@ InitExnPrivate(JSContext *cx, JSObject *exnObject, JSString *message,
JSErrorReporter older;
JSExceptionState *state;
jsid callerid;
JSStackFrame *fp, *fpstop;
StackFrame *fp, *fpstop;
size_t stackDepth, valueCount, size;
JSBool overflow;
JSExnPrivate *priv;
@ -693,7 +693,7 @@ static JSBool
Exception(JSContext *cx, uintN argc, Value *vp)
{
JSString *message, *filename;
JSStackFrame *fp;
StackFrame *fp;
/*
* ECMA ed. 3, 15.11.1 requires Error, etc., to construct even when

Просмотреть файл

@ -74,3 +74,9 @@ JS_UnwrapObject(JSObject *obj)
{
return obj->unwrap();
}
JS_FRIEND_API(JSObject *)
JS_GetFrameScopeChainRaw(JSStackFrame *fp)
{
return &Valueify(fp)->scopeChain();
}

Просмотреть файл

@ -54,6 +54,9 @@ JS_FindCompilationScope(JSContext *cx, JSObject *obj);
extern JS_FRIEND_API(JSObject *)
JS_UnwrapObject(JSObject *obj);
extern JS_FRIEND_API(JSObject *)
JS_GetFrameScopeChainRaw(JSStackFrame *fp);
JS_END_EXTERN_C
#endif /* jsfriendapi_h___ */

Просмотреть файл

@ -86,12 +86,12 @@
#endif
#include "jsatominlines.h"
#include "jscntxtinlines.h"
#include "jsfuninlines.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "jsscriptinlines.h"
#include "vm/Stack-inl.h"
using namespace js;
using namespace js::gc;
@ -102,7 +102,7 @@ JSObject::getThrowTypeError() const
}
JSBool
js_GetArgsValue(JSContext *cx, JSStackFrame *fp, Value *vp)
js_GetArgsValue(JSContext *cx, StackFrame *fp, Value *vp)
{
JSObject *argsobj;
@ -119,7 +119,7 @@ js_GetArgsValue(JSContext *cx, JSStackFrame *fp, Value *vp)
}
JSBool
js_GetArgsProperty(JSContext *cx, JSStackFrame *fp, jsid id, Value *vp)
js_GetArgsProperty(JSContext *cx, StackFrame *fp, jsid id, Value *vp)
{
JS_ASSERT(fp->isFunctionFrame());
@ -233,7 +233,7 @@ struct STATIC_SKIP_INFERENCE PutArg
};
JSObject *
js_GetArgsObject(JSContext *cx, JSStackFrame *fp)
js_GetArgsObject(JSContext *cx, StackFrame *fp)
{
/*
* We must be in a function activation; the function must be lightweight
@ -273,7 +273,7 @@ js_GetArgsObject(JSContext *cx, JSStackFrame *fp)
}
void
js_PutArgsObject(JSContext *cx, JSStackFrame *fp)
js_PutArgsObject(StackFrame *fp)
{
JSObject &argsobj = fp->argsObj();
if (argsobj.isNormalArguments()) {
@ -357,7 +357,7 @@ args_delProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
}
static JS_REQUIRES_STACK JSObject *
WrapEscapingClosure(JSContext *cx, JSStackFrame *fp, JSFunction *fun)
WrapEscapingClosure(JSContext *cx, StackFrame *fp, JSFunction *fun)
{
JS_ASSERT(fun->optimizedClosure());
JS_ASSERT(!fun->u.i.wrapper);
@ -526,7 +526,7 @@ ArgGetter(JSContext *cx, JSObject *obj, jsid id, Value *vp)
uintN arg = uintN(JSID_TO_INT(id));
if (arg < obj->getArgsInitialLength()) {
JS_ASSERT(!obj->getArgsElement(arg).isMagic(JS_ARGS_HOLE));
if (JSStackFrame *fp = (JSStackFrame *) obj->getPrivate())
if (StackFrame *fp = (StackFrame *) obj->getPrivate())
*vp = fp->canonicalActualArg(arg);
else
*vp = obj->getArgsElement(arg);
@ -575,7 +575,7 @@ ArgSetter(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp)
if (JSID_IS_INT(id)) {
uintN arg = uintN(JSID_TO_INT(id));
if (arg < obj->getArgsInitialLength()) {
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
StackFrame *fp = (StackFrame *) obj->getPrivate();
if (fp) {
JSScript *script = fp->functionScript();
if (script->usesArguments)
@ -798,13 +798,13 @@ args_finalize(JSContext *cx, JSObject *obj)
* otherwise reachable. An executing generator is rooted by its invocation. To
* distinguish the two cases (which imply different access paths to the
* generator object), we use the JSFRAME_FLOATING_GENERATOR flag, which is only
* set on the JSStackFrame kept in the generator object's JSGenerator.
* set on the StackFrame kept in the generator object's JSGenerator.
*/
static inline void
MaybeMarkGenerator(JSTracer *trc, JSObject *obj)
{
#if JS_HAS_GENERATORS
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
StackFrame *fp = (StackFrame *) obj->getPrivate();
if (fp && fp->isFloatingGenerator()) {
JSObject *genobj = js_FloatingFrameToGenerator(fp)->obj;
MarkObject(trc, *genobj, "generator object");
@ -838,7 +838,7 @@ args_trace(JSTracer *trc, JSObject *obj)
*
* The JSClass functions below collaborate to lazily reflect and synchronize
* actual argument values, argument count, and callee function object stored
* in a JSStackFrame with their corresponding property values in the frame's
* in a StackFrame with their corresponding property values in the frame's
* arguments object.
*/
Class js_ArgumentsClass = {
@ -895,7 +895,7 @@ Class StrictArgumentsClass = {
}
/*
* A Declarative Environment object stores its active JSStackFrame pointer in
* A Declarative Environment object stores its active StackFrame pointer in
* its private slot, just as Call and Arguments objects do.
*/
Class js_DeclEnvClass = {
@ -930,7 +930,7 @@ CheckForEscapingClosure(JSContext *cx, JSObject *obj, Value *vp)
if (fun->needsWrapper()) {
LeaveTrace(cx);
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
StackFrame *fp = (StackFrame *) obj->getPrivate();
if (fp) {
JSObject *wrapper = WrapEscapingClosure(cx, fp, fun);
if (!wrapper)
@ -993,7 +993,7 @@ NewCallObject(JSContext *cx, JSScript *script, JSObject &scopeChain, JSObject *c
}
static inline JSObject *
NewDeclEnvObject(JSContext *cx, JSStackFrame *fp)
NewDeclEnvObject(JSContext *cx, StackFrame *fp)
{
JSObject *envobj = js_NewGCObject(cx, FINALIZE_OBJECT2);
if (!envobj)
@ -1011,7 +1011,7 @@ NewDeclEnvObject(JSContext *cx, JSStackFrame *fp)
namespace js {
JSObject *
CreateFunCallObject(JSContext *cx, JSStackFrame *fp)
CreateFunCallObject(JSContext *cx, StackFrame *fp)
{
JS_ASSERT(fp->isNonEvalFunctionFrame());
JS_ASSERT(!fp->hasCallObj());
@ -1048,7 +1048,7 @@ CreateFunCallObject(JSContext *cx, JSStackFrame *fp)
}
JSObject *
CreateEvalCallObject(JSContext *cx, JSStackFrame *fp)
CreateEvalCallObject(JSContext *cx, StackFrame *fp)
{
JSObject *callobj = NewCallObject(cx, fp->script(), fp->scopeChain(), NULL);
if (!callobj)
@ -1083,7 +1083,7 @@ CopyValuesToCallObject(JSObject &callobj, uintN nargs, Value *argv, uintN nvars,
}
void
js_PutCallObject(JSContext *cx, JSStackFrame *fp)
js_PutCallObject(StackFrame *fp)
{
JSObject &callobj = fp->callObj();
JS_ASSERT(callobj.getPrivate() == fp);
@ -1094,7 +1094,7 @@ js_PutCallObject(JSContext *cx, JSStackFrame *fp)
if (fp->hasArgsObj()) {
if (!fp->hasOverriddenArgs())
callobj.setCallObjArguments(ObjectValue(fp->argsObj()));
js_PutArgsObject(cx, fp);
js_PutArgsObject(fp);
}
JSScript *script = fp->script();
@ -1160,7 +1160,7 @@ js_PutCallObject(JSContext *cx, JSStackFrame *fp)
}
JSBool JS_FASTCALL
js_PutCallObjectOnTrace(JSContext *cx, JSObject *callobj, uint32 nargs, Value *argv,
js_PutCallObjectOnTrace(JSObject *callobj, uint32 nargs, Value *argv,
uint32 nvars, Value *slots)
{
JS_ASSERT(callobj->isCall());
@ -1173,7 +1173,7 @@ js_PutCallObjectOnTrace(JSContext *cx, JSObject *callobj, uint32 nargs, Value *a
return true;
}
JS_DEFINE_CALLINFO_6(extern, BOOL, js_PutCallObjectOnTrace, CONTEXT, OBJECT, UINT32, VALUEPTR,
JS_DEFINE_CALLINFO_5(extern, BOOL, js_PutCallObjectOnTrace, OBJECT, UINT32, VALUEPTR,
UINT32, VALUEPTR, 0, nanojit::ACCSET_STORE_ANY)
namespace js {
@ -1181,7 +1181,7 @@ namespace js {
static JSBool
GetCallArguments(JSContext *cx, JSObject *obj, jsid id, Value *vp)
{
JSStackFrame *fp = obj->maybeCallObjStackFrame();
StackFrame *fp = obj->maybeCallObjStackFrame();
if (fp && !fp->hasOverriddenArgs()) {
JSObject *argsobj = js_GetArgsObject(cx, fp);
if (!argsobj)
@ -1196,7 +1196,7 @@ GetCallArguments(JSContext *cx, JSObject *obj, jsid id, Value *vp)
static JSBool
SetCallArguments(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp)
{
if (JSStackFrame *fp = obj->maybeCallObjStackFrame())
if (StackFrame *fp = obj->maybeCallObjStackFrame())
fp->setOverriddenArgs();
obj->setCallObjArguments(*vp);
return true;
@ -1208,7 +1208,7 @@ GetCallArg(JSContext *cx, JSObject *obj, jsid id, Value *vp)
JS_ASSERT((int16) JSID_TO_INT(id) == JSID_TO_INT(id));
uintN i = (uint16) JSID_TO_INT(id);
if (JSStackFrame *fp = obj->maybeCallObjStackFrame())
if (StackFrame *fp = obj->maybeCallObjStackFrame())
*vp = fp->formalArg(i);
else
*vp = obj->callObjArg(i);
@ -1222,7 +1222,7 @@ SetCallArg(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp)
uintN i = (uint16) JSID_TO_INT(id);
Value *argp;
if (JSStackFrame *fp = obj->maybeCallObjStackFrame())
if (StackFrame *fp = obj->maybeCallObjStackFrame())
argp = &fp->formalArg(i);
else
argp = &obj->callObjArg(i);
@ -1261,7 +1261,7 @@ GetCallVar(JSContext *cx, JSObject *obj, jsid id, Value *vp)
JS_ASSERT((int16) JSID_TO_INT(id) == JSID_TO_INT(id));
uintN i = (uint16) JSID_TO_INT(id);
if (JSStackFrame *fp = obj->maybeCallObjStackFrame())
if (StackFrame *fp = obj->maybeCallObjStackFrame())
*vp = fp->varSlot(i);
else
*vp = obj->callObjVar(i);
@ -1302,7 +1302,7 @@ SetCallVar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *vp)
#endif
Value *varp;
if (JSStackFrame *fp = obj->maybeCallObjStackFrame())
if (StackFrame *fp = obj->maybeCallObjStackFrame())
varp = &fp->varSlot(i);
else
varp = &obj->callObjVar(i);
@ -1379,7 +1379,7 @@ static void
call_trace(JSTracer *trc, JSObject *obj)
{
JS_ASSERT(obj->isCall());
if (JSStackFrame *fp = obj->maybeCallObjStackFrame()) {
if (StackFrame *fp = obj->maybeCallObjStackFrame()) {
/*
* FIXME: Hide copies of stack values rooted by fp from the Cycle
* Collector, which currently lacks a non-stub Unlink implementation
@ -1420,7 +1420,7 @@ JS_PUBLIC_DATA(Class) js_CallClass = {
};
bool
JSStackFrame::getValidCalleeObject(JSContext *cx, Value *vp)
StackFrame::getValidCalleeObject(JSContext *cx, Value *vp)
{
if (!isFunctionFrame()) {
vp->setUndefined();
@ -1584,7 +1584,7 @@ fun_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
JSFunction *fun = obj->getFunctionPrivate();
/* Find fun's top-most activation record. */
JSStackFrame *fp;
StackFrame *fp;
for (fp = js_GetTopStackFrame(cx);
fp && (fp->maybeFun() != fun || fp->isEvalOrDebuggerFrame());
fp = fp->prev()) {
@ -2137,7 +2137,7 @@ js_fun_call(JSContext *cx, uintN argc, Value *vp)
/* Allocate stack space for fval, obj, and the args. */
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, argc, &args))
if (!cx->stack.pushInvokeArgs(cx, argc, &args))
return JS_FALSE;
/* Push fval, thisv, and the args. */
@ -2145,7 +2145,7 @@ js_fun_call(JSContext *cx, uintN argc, Value *vp)
args.thisv() = thisv;
memcpy(args.argv(), argv, argc * sizeof *argv);
bool ok = Invoke(cx, args, 0);
bool ok = Invoke(cx, args);
*vp = args.rval();
return ok;
}
@ -2188,7 +2188,7 @@ js_fun_apply(JSContext *cx, uintN argc, Value *vp)
uintN n = uintN(JS_MIN(length, JS_ARGS_LENGTH_MAX));
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, n, &args))
if (!cx->stack.pushInvokeArgs(cx, n, &args))
return false;
/* Push fval, obj, and aobj's elements as args. */
@ -2200,7 +2200,7 @@ js_fun_apply(JSContext *cx, uintN argc, Value *vp)
return false;
/* Step 9. */
if (!Invoke(cx, args, 0))
if (!Invoke(cx, args))
return false;
*vp = args.rval();
return true;
@ -2301,7 +2301,7 @@ CallOrConstructBoundFunction(JSContext *cx, uintN argc, Value *vp)
const Value &boundThis = obj->getBoundFunctionThis();
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, argc + argslen, &args))
if (!cx->stack.pushInvokeArgs(cx, argc + argslen, &args))
return false;
/* 15.3.4.5.1, 15.3.4.5.2 step 4. */
@ -2314,7 +2314,7 @@ CallOrConstructBoundFunction(JSContext *cx, uintN argc, Value *vp)
if (!constructing)
args.thisv() = boundThis;
if (constructing ? !InvokeConstructor(cx, args) : !Invoke(cx, args, 0))
if (constructing ? !InvokeConstructor(cx, args) : !Invoke(cx, args))
return false;
*vp = args.rval();
@ -2961,9 +2961,7 @@ js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, Native native,
return fun;
}
#if (JSV2F_CONSTRUCT & JSV2F_SEARCH_STACK)
# error "JSINVOKE_CONSTRUCT and JSV2F_SEARCH_STACK are not disjoint!"
#endif
JS_STATIC_ASSERT((JSV2F_CONSTRUCT & JSV2F_SEARCH_STACK) == 0);
JSFunction *
js_ValueToFunction(JSContext *cx, const Value *vp, uintN flags)

Просмотреть файл

@ -489,7 +489,7 @@ js_DefineFunction(JSContext *cx, JSObject *obj, jsid id, js::Native native,
* fact that JSINVOKE_CONSTRUCT (aka JSFRAME_CONSTRUCTING) is 1, and test that
* with #if/#error in jsfun.c.
*/
#define JSV2F_CONSTRUCT JSINVOKE_CONSTRUCT
#define JSV2F_CONSTRUCT ((uintN)js::INVOKE_CONSTRUCTOR)
#define JSV2F_SEARCH_STACK 0x10000
extern JSFunction *
@ -508,19 +508,19 @@ extern JSObject * JS_FASTCALL
js_CreateCallObjectOnTrace(JSContext *cx, JSFunction *fun, JSObject *callee, JSObject *scopeChain);
extern void
js_PutCallObject(JSContext *cx, JSStackFrame *fp);
js_PutCallObject(js::StackFrame *fp);
extern JSBool JS_FASTCALL
js_PutCallObjectOnTrace(JSContext *cx, JSObject *scopeChain, uint32 nargs,
js::Value *argv, uint32 nvars, js::Value *slots);
js_PutCallObjectOnTrace(JSObject *scopeChain, uint32 nargs, js::Value *argv,
uint32 nvars, js::Value *slots);
namespace js {
JSObject *
CreateFunCallObject(JSContext *cx, JSStackFrame *fp);
CreateFunCallObject(JSContext *cx, StackFrame *fp);
JSObject *
CreateEvalCallObject(JSContext *cx, JSStackFrame *fp);
CreateEvalCallObject(JSContext *cx, StackFrame *fp);
extern JSBool
GetCallArg(JSContext *cx, JSObject *obj, jsid id, js::Value *vp);
@ -550,10 +550,10 @@ SetCallUpvar(JSContext *cx, JSObject *obj, jsid id, JSBool strict, js::Value *vp
} // namespace js
extern JSBool
js_GetArgsValue(JSContext *cx, JSStackFrame *fp, js::Value *vp);
js_GetArgsValue(JSContext *cx, js::StackFrame *fp, js::Value *vp);
extern JSBool
js_GetArgsProperty(JSContext *cx, JSStackFrame *fp, jsid id, js::Value *vp);
js_GetArgsProperty(JSContext *cx, js::StackFrame *fp, jsid id, js::Value *vp);
/*
* Get the arguments object for the given frame. If the frame is strict mode
@ -566,10 +566,10 @@ js_GetArgsProperty(JSContext *cx, JSStackFrame *fp, jsid id, js::Value *vp);
* function.
*/
extern JSObject *
js_GetArgsObject(JSContext *cx, JSStackFrame *fp);
js_GetArgsObject(JSContext *cx, js::StackFrame *fp);
extern void
js_PutArgsObject(JSContext *cx, JSStackFrame *fp);
js_PutArgsObject(js::StackFrame *fp);
inline bool
js_IsNamedLambda(JSFunction *fun) { return (fun->flags & JSFUN_LAMBDA) && fun->atom; }

Просмотреть файл

@ -85,8 +85,6 @@
#endif
#include "jsprobes.h"
#include "jscntxtinlines.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "jshashtable.h"
#include "jsweakmap.h"
@ -968,8 +966,8 @@ RecordNativeStackTopForGC(JSContext *cx)
#ifdef JS_THREADSAFE
/* Record the stack top here only if we are called from a request. */
JS_ASSERT(cx->thread->data.requestDepth >= ctd->requestThreshold);
if (cx->thread->data.requestDepth == ctd->requestThreshold)
JS_ASSERT(cx->thread()->data.requestDepth >= ctd->requestThreshold);
if (cx->thread()->data.requestDepth == ctd->requestThreshold)
return;
#endif
ctd->recordStackTop();
@ -1235,7 +1233,7 @@ bool
CheckAllocation(JSContext *cx)
{
#ifdef JS_THREADSAFE
JS_ASSERT(cx->thread);
JS_ASSERT(cx->thread());
#endif
JS_ASSERT(!cx->runtime->gcRunning);
return true;
@ -1642,7 +1640,7 @@ gc_lock_traversal(const GCLocks::Entry &entry, JSTracer *trc)
}
void
js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp)
js_TraceStackFrame(JSTracer *trc, StackFrame *fp)
{
MarkObject(trc, fp->scopeChain(), "scope chain");
if (fp->isDummyFrame())
@ -2622,9 +2620,9 @@ LetOtherGCFinish(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
JS_ASSERT(rt->gcThread);
JS_ASSERT(cx->thread != rt->gcThread);
JS_ASSERT(cx->thread() != rt->gcThread);
size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0;
size_t requestDebit = cx->thread()->data.requestDepth ? 1 : 0;
JS_ASSERT(requestDebit <= rt->requestCount);
#ifdef JS_TRACER
JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx));
@ -2693,7 +2691,7 @@ AutoGCSession::AutoGCSession(JSContext *cx)
JSRuntime *rt = cx->runtime;
#ifdef JS_THREADSAFE
if (rt->gcThread && rt->gcThread != cx->thread)
if (rt->gcThread && rt->gcThread != cx->thread())
LetOtherGCFinish(cx);
#endif
@ -2702,7 +2700,7 @@ AutoGCSession::AutoGCSession(JSContext *cx)
#ifdef JS_THREADSAFE
/* No other thread is in GC, so indicate that we're now in GC. */
JS_ASSERT(!rt->gcThread);
rt->gcThread = cx->thread;
rt->gcThread = cx->thread();
/*
* Notify operation callbacks on other threads, which will give them a
@ -2712,7 +2710,7 @@ AutoGCSession::AutoGCSession(JSContext *cx)
*/
for (JSThread::Map::Range r = rt->threads.all(); !r.empty(); r.popFront()) {
JSThread *thread = r.front().value;
if (thread != cx->thread)
if (thread != cx->thread())
thread->data.triggerOperationCallback(rt);
}
@ -2722,7 +2720,7 @@ AutoGCSession::AutoGCSession(JSContext *cx)
* JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
* rt->requestCount transitions to 0.
*/
size_t requestDebit = cx->thread->data.requestDepth ? 1 : 0;
size_t requestDebit = cx->thread()->data.requestDepth ? 1 : 0;
JS_ASSERT(requestDebit <= rt->requestCount);
if (requestDebit != rt->requestCount) {
rt->requestCount -= requestDebit;
@ -2750,7 +2748,7 @@ AutoGCSession::~AutoGCSession()
JSRuntime *rt = context->runtime;
rt->gcRunning = false;
#ifdef JS_THREADSAFE
JS_ASSERT(rt->gcThread == context->thread);
JS_ASSERT(rt->gcThread == context->thread());
rt->gcThread = NULL;
JS_NOTIFY_GC_DONE(rt);
#endif
@ -2773,7 +2771,7 @@ GCUntilDone(JSContext *cx, JSCompartment *comp, JSGCInvocationKind gckind GCTIM
rt->gcPoke = true;
#ifdef JS_THREADSAFE
JS_ASSERT(rt->gcThread);
if (rt->gcThread != cx->thread) {
if (rt->gcThread != cx->thread()) {
/* We do not return until another GC finishes. */
LetOtherGCFinish(cx);
}
@ -2919,7 +2917,7 @@ SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto)
* request.
*/
#ifdef JS_THREADSAFE
JS_ASSERT(cx->thread->data.requestDepth);
JS_ASSERT(cx->thread()->data.requestDepth);
/*
* This is only necessary if AutoGCSession below would wait for GC to
@ -3000,7 +2998,7 @@ TraceRuntime(JSTracer *trc)
JSRuntime *rt = cx->runtime;
AutoLockGC lock(rt);
if (rt->gcThread != cx->thread) {
if (rt->gcThread != cx->thread()) {
AutoGCSession gcsession(cx);
AutoUnlockGC unlock(rt);
RecordNativeStackTopForGC(trc->context);

Просмотреть файл

@ -821,7 +821,7 @@ extern JS_FRIEND_API(bool)
js_GCThingIsMarked(void *thing, uintN color);
extern void
js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp);
js_TraceStackFrame(JSTracer *trc, js::StackFrame *fp);
namespace js {

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -45,807 +45,19 @@
*/
#include "jsprvtd.h"
#include "jspubtd.h"
#include "jsfun.h"
#include "jsopcode.h"
#include "jsscript.h"
#include "jsvalue.h"
struct JSFrameRegs
{
STATIC_SKIP_INFERENCE
js::Value *sp; /* stack pointer */
jsbytecode *pc; /* program counter */
JSStackFrame *fp; /* active frame */
};
/* Flags to toggle js::Interpret() execution. */
enum JSInterpMode
{
JSINTERP_NORMAL = 0, /* interpreter is running normally */
JSINTERP_RECORD = 1, /* interpreter has been started to record/run traces */
JSINTERP_SAFEPOINT = 2, /* interpreter should leave on a method JIT safe point */
JSINTERP_PROFILE = 3 /* interpreter should profile a loop */
};
/* Flags used in JSStackFrame::flags_ */
enum JSFrameFlags
{
/* Primary frame type */
JSFRAME_GLOBAL = 0x1, /* frame pushed for a global script */
JSFRAME_FUNCTION = 0x2, /* frame pushed for a scripted call */
JSFRAME_DUMMY = 0x4, /* frame pushed for bookkeeping */
/* Frame subtypes */
JSFRAME_EVAL = 0x8, /* frame pushed for eval() or debugger eval */
JSFRAME_DEBUGGER = 0x10, /* frame pushed for debugger eval */
JSFRAME_GENERATOR = 0x20, /* frame is associated with a generator */
JSFRAME_FLOATING_GENERATOR = 0x40, /* frame is is in generator obj, not on stack */
JSFRAME_CONSTRUCTING = 0x80, /* frame is for a constructor invocation */
/* Temporary frame states */
JSFRAME_YIELDING = 0x200, /* js::Interpret dispatched JSOP_YIELD */
JSFRAME_FINISHED_IN_INTERP = 0x400, /* set if frame finished in Interpret() */
/* Concerning function arguments */
JSFRAME_OVERRIDE_ARGS = 0x1000, /* overridden arguments local variable */
JSFRAME_OVERFLOW_ARGS = 0x2000, /* numActualArgs > numFormalArgs */
JSFRAME_UNDERFLOW_ARGS = 0x4000, /* numActualArgs < numFormalArgs */
/* Lazy frame initialization */
JSFRAME_HAS_IMACRO_PC = 0x8000, /* frame has imacpc value available */
JSFRAME_HAS_CALL_OBJ = 0x10000, /* frame has a callobj reachable from scopeChain_ */
JSFRAME_HAS_ARGS_OBJ = 0x20000, /* frame has an argsobj in JSStackFrame::args */
JSFRAME_HAS_HOOK_DATA = 0x40000, /* frame has hookData_ set */
JSFRAME_HAS_ANNOTATION = 0x80000, /* frame has annotation_ set */
JSFRAME_HAS_RVAL = 0x100000, /* frame has rval_ set */
JSFRAME_HAS_SCOPECHAIN = 0x200000, /* frame has scopeChain_ set */
JSFRAME_HAS_PREVPC = 0x400000 /* frame has prevpc_ set */
};
namespace js { namespace mjit { struct JITScript; } }
/*
* A stack frame is a part of a stack segment (see js::StackSegment) which is
* on the per-thread VM stack (see js::StackSpace).
*/
struct JSStackFrame
{
private:
mutable uint32 flags_; /* bits described by JSFrameFlags */
union { /* describes what code is executing in a */
JSScript *script; /* global frame */
JSFunction *fun; /* function frame, pre GetScopeChain */
} exec;
union { /* describes the arguments of a function */
uintN nactual; /* pre GetArgumentsObject */
JSObject *obj; /* post GetArgumentsObject */
JSScript *script; /* eval has no args, but needs a script */
} args;
mutable JSObject *scopeChain_; /* current scope chain */
JSStackFrame *prev_; /* previous cx->regs->fp */
void *ncode_; /* return address for method JIT */
/* Lazily initialized */
js::Value rval_; /* return value of the frame */
jsbytecode *prevpc_; /* pc of previous frame*/
jsbytecode *imacropc_; /* pc of macro caller */
void *hookData_; /* closure returned by call hook */
void *annotation_; /* perhaps remove with bug 546848 */
friend class js::StackSpace;
friend class js::FrameRegsIter;
friend struct JSContext;
inline void initPrev(JSContext *cx);
public:
/*
* Stack frame sort (see JSStackFrame comment above)
*
* A stack frame may have one of three types, which determines which
* members of the frame may be accessed and other invariants:
*
* global frame: execution of global code or an eval in global code
* function frame: execution of function code or an eval in a function
* dummy frame: bookkeeping frame (read: hack)
*
*/
bool isFunctionFrame() const {
return !!(flags_ & JSFRAME_FUNCTION);
}
bool isGlobalFrame() const {
return !!(flags_ & JSFRAME_GLOBAL);
}
bool isDummyFrame() const {
return !!(flags_ & JSFRAME_DUMMY);
}
bool isScriptFrame() const {
bool retval = !!(flags_ & (JSFRAME_FUNCTION | JSFRAME_GLOBAL));
JS_ASSERT(retval == !isDummyFrame());
return retval;
}
/*
* Eval frames
*
* As noted above, global and function frames may optionally be 'eval
* frames'. Eval code shares its parent's arguments which means that the
* arg-access members of JSStackFrame may not be used for eval frames.
* Search for 'hasArgs' below for more details.
*
* A further sub-classification of eval frames is whether the frame was
* pushed for an ES5 strict-mode eval().
*/
bool isEvalFrame() const {
JS_ASSERT_IF(flags_ & JSFRAME_EVAL, isScriptFrame());
return flags_ & JSFRAME_EVAL;
}
bool isNonEvalFunctionFrame() const {
return (flags_ & (JSFRAME_FUNCTION | JSFRAME_EVAL)) == JSFRAME_FUNCTION;
}
bool isStrictEvalFrame() const {
return isEvalFrame() && script()->strictModeCode;
}
bool isNonStrictEvalFrame() const {
return isEvalFrame() && !script()->strictModeCode;
}
/*
* Frame initialization
*
* After acquiring a pointer to an uninitialized stack frame on the VM
* stack from js::StackSpace, these members are used to initialize the
* stack frame before officially pushing the frame into the context.
* Collecting frame initialization into a set of inline helpers allows
* simpler reasoning and makes call-optimization easier.
*/
/* Used for Invoke, Interpret, trace-jit LeaveTree, and method-jit stubs. */
inline void initCallFrame(JSContext *cx, JSObject &callee, JSFunction *fun,
uint32 nactual, uint32 flags);
/* Used for SessionInvoke. */
inline void resetInvokeCallFrame();
/* Called by method-jit stubs and serve as a specification for jit-code. */
inline void initCallFrameCallerHalf(JSContext *cx, uint32 flags, void *ncode);
inline void initCallFrameEarlyPrologue(JSFunction *fun, uint32 nactual);
inline void initCallFrameLatePrologue();
/* Used for eval. */
inline void initEvalFrame(JSContext *cx, JSScript *script, JSStackFrame *prev,
uint32 flags);
inline void initGlobalFrame(JSScript *script, JSObject &chain, uint32 flags);
/* Used when activating generators. */
inline void stealFrameAndSlots(js::Value *vp, JSStackFrame *otherfp,
js::Value *othervp, js::Value *othersp);
/* Perhaps one fine day we will remove dummy frames. */
inline void initDummyFrame(JSContext *cx, JSObject &chain);
/*
* Previous frame
*
* A frame's 'prev' frame is either null or the previous frame pointed to
* by cx->regs->fp when this frame was pushed. Often, given two prev-linked
* frames, the next-frame is a function or eval that was called by the
* prev-frame, but not always: the prev-frame may have called a native that
* reentered the VM through JS_CallFunctionValue on the same context
* (without calling JS_SaveFrameChain) which pushed the next-frame. Thus,
* 'prev' has little semantic meaning and basically just tells the VM what
* to set cx->regs->fp to when this frame is popped.
*/
JSStackFrame *prev() const {
return prev_;
}
inline void resetGeneratorPrev(JSContext *cx);
/*
* Frame slots
*
* A frame's 'slots' are the fixed slots associated with the frame (like
* local variables) followed by an expression stack holding temporary
* values. A frame's 'base' is the base of the expression stack.
*/
js::Value *slots() const {
return (js::Value *)(this + 1);
}
js::Value *base() const {
return slots() + script()->nfixed;
}
js::Value &varSlot(uintN i) {
JS_ASSERT(i < script()->nfixed);
JS_ASSERT_IF(maybeFun(), i < script()->bindings.countVars());
return slots()[i];
}
/*
* Script
*
* All function and global frames have an associated JSScript which holds
* the bytecode being executed for the frame.
*/
/*
* Get the frame's current bytecode, assuming |this| is in |cx|.
* next is frame whose prev == this, NULL if not known or if this == cx->fp().
*/
jsbytecode *pc(JSContext *cx, JSStackFrame *next = NULL);
jsbytecode *prevpc() {
JS_ASSERT((prev_ != NULL) && (flags_ & JSFRAME_HAS_PREVPC));
return prevpc_;
}
JSScript *script() const {
JS_ASSERT(isScriptFrame());
return isFunctionFrame()
? isEvalFrame() ? args.script : fun()->script()
: exec.script;
}
JSScript *functionScript() const {
JS_ASSERT(isFunctionFrame());
return isEvalFrame() ? args.script : fun()->script();
}
JSScript *globalScript() const {
JS_ASSERT(isGlobalFrame());
return exec.script;
}
JSScript *maybeScript() const {
return isScriptFrame() ? script() : NULL;
}
size_t numFixed() const {
return script()->nfixed;
}
size_t numSlots() const {
return script()->nslots;
}
size_t numGlobalVars() const {
JS_ASSERT(isGlobalFrame());
return exec.script->nfixed;
}
/*
* Function
*
* All function frames have an associated interpreted JSFunction.
*/
JSFunction* fun() const {
JS_ASSERT(isFunctionFrame());
return exec.fun;
}
JSFunction* maybeFun() const {
return isFunctionFrame() ? fun() : NULL;
}
/*
* Arguments
*
* Only non-eval function frames have arguments. A frame follows its
* arguments contiguously in memory. The arguments pushed by the caller are
* the 'actual' arguments. The declared arguments of the callee are the
* 'formal' arguments. When the caller passes less or equal actual
* arguments, the actual and formal arguments are the same array (but with
* different extents). When the caller passes too many arguments, the
* formal subset of the actual arguments is copied onto the top of the
* stack. This allows the engine to maintain a jit-time constant offset of
* arguments from the frame pointer. Since the formal subset of the actual
* arguments is potentially on the stack twice, it is important for all
* reads/writes to refer to the same canonical memory location.
*
* An arguments object (the object returned by the 'arguments' keyword) is
* lazily created, so a given function frame may or may not have one.
*/
/* True if this frame has arguments. Contrast with hasArgsObj. */
bool hasArgs() const {
return isNonEvalFunctionFrame();
}
uintN numFormalArgs() const {
JS_ASSERT(hasArgs());
return fun()->nargs;
}
js::Value &formalArg(uintN i) const {
JS_ASSERT(i < numFormalArgs());
return formalArgs()[i];
}
js::Value *formalArgs() const {
JS_ASSERT(hasArgs());
return (js::Value *)this - numFormalArgs();
}
js::Value *formalArgsEnd() const {
JS_ASSERT(hasArgs());
return (js::Value *)this;
}
js::Value *maybeFormalArgs() const {
return (flags_ & (JSFRAME_FUNCTION | JSFRAME_EVAL)) == JSFRAME_FUNCTION
? formalArgs()
: NULL;
}
inline uintN numActualArgs() const;
inline js::Value *actualArgs() const;
inline js::Value *actualArgsEnd() const;
inline js::Value &canonicalActualArg(uintN i) const;
/*
* Apply 'op' to each arg of the specified type. Stop if 'op' returns
* false. Return 'true' iff all 'op' calls returned true.
*/
template <class Op> inline bool forEachCanonicalActualArg(Op op);
template <class Op> inline bool forEachFormalArg(Op op);
inline void clearMissingArgs();
bool hasArgsObj() const {
return !!(flags_ & JSFRAME_HAS_ARGS_OBJ);
}
JSObject &argsObj() const {
JS_ASSERT(hasArgsObj());
JS_ASSERT(!isEvalFrame());
return *args.obj;
}
JSObject *maybeArgsObj() const {
return hasArgsObj() ? &argsObj() : NULL;
}
inline void setArgsObj(JSObject &obj);
/*
* This value
*
* Every frame has a this value although, until 'this' is computed, the
* value may not be the semantically-correct 'this' value.
*
* The 'this' value is stored before the formal arguments for function
* frames and directly before the frame for global frames. The *Args
* members assert !isEvalFrame(), so we implement specialized inline
* methods for accessing 'this'. When the caller has static knowledge that
* a frame is a function or global frame, 'functionThis' and 'globalThis',
* respectively, allow more efficient access.
*/
js::Value &functionThis() const {
JS_ASSERT(isFunctionFrame());
if (isEvalFrame())
return ((js::Value *)this)[-1];
return formalArgs()[-1];
}
JSObject &constructorThis() const {
JS_ASSERT(hasArgs());
return formalArgs()[-1].toObject();
}
js::Value &globalThis() const {
JS_ASSERT(isGlobalFrame());
return ((js::Value *)this)[-1];
}
js::Value &thisValue() const {
if (flags_ & (JSFRAME_EVAL | JSFRAME_GLOBAL))
return ((js::Value *)this)[-1];
return formalArgs()[-1];
}
/*
* Callee
*
* Only function frames have a callee. An eval frame in a function has the
* same caller as its containing function frame.
*/
js::Value &calleev() const {
JS_ASSERT(isFunctionFrame());
if (isEvalFrame())
return ((js::Value *)this)[-2];
return formalArgs()[-2];
}
JSObject &callee() const {
JS_ASSERT(isFunctionFrame());
return calleev().toObject();
}
JSObject *maybeCallee() const {
return isFunctionFrame() ? &callee() : NULL;
}
js::CallReceiver callReceiver() const {
return js::CallReceiverFromArgv(formalArgs());
}
/*
* getValidCalleeObject is a fallible getter to compute the correct callee
* function object, which may require deferred cloning due to the JSObject
* methodReadBarrier. For a non-function frame, return true with *vp set
* from calleev, which may not be an object (it could be undefined).
*/
bool getValidCalleeObject(JSContext *cx, js::Value *vp);
/*
* Scope chain
*
* Every frame has a scopeChain which, when traversed via the 'parent' link
* to the root, indicates the current global object. A 'call object' is a
* node on a scope chain representing a function's activation record. A
* call object is used for dynamically-scoped name lookup and lexically-
* scoped upvar access. The call object holds the values of locals and
* arguments when a function returns (and its stack frame is popped). For
* performance reasons, call objects are created lazily for 'lightweight'
* functions, i.e., functions which are not statically known to require a
* call object. Thus, a given function frame may or may not have a call
* object. When a function does have a call object, it is found by walking
* up the scope chain until the first call object. Thus, it is important,
* when setting the scope chain, to indicate whether the new scope chain
* contains a new call object and thus changes the 'hasCallObj' state.
*
* NB: 'fp->hasCallObj()' implies that fp->callObj() needs to be 'put' when
* the frame is popped. Since the scope chain of a non-strict eval frame
* contains the call object of the parent (function) frame, it is possible
* to have:
* !fp->hasCall() && fp->scopeChain().isCall()
*/
JSObject &scopeChain() const {
JS_ASSERT_IF(!(flags_ & JSFRAME_HAS_SCOPECHAIN), isFunctionFrame());
if (!(flags_ & JSFRAME_HAS_SCOPECHAIN)) {
scopeChain_ = callee().getParent();
flags_ |= JSFRAME_HAS_SCOPECHAIN;
}
return *scopeChain_;
}
bool hasCallObj() const {
bool ret = !!(flags_ & JSFRAME_HAS_CALL_OBJ);
JS_ASSERT_IF(ret, !isNonStrictEvalFrame());
return ret;
}
inline JSObject &callObj() const;
inline void setScopeChainNoCallObj(JSObject &obj);
inline void setScopeChainWithOwnCallObj(JSObject &obj);
inline void markActivationObjectsAsPut();
/*
* Frame compartment
*
* A stack frame's compartment is the frame's containing context's
* compartment when the frame was pushed.
*/
JSCompartment *compartment() const {
JS_ASSERT_IF(isScriptFrame(), scopeChain().compartment() == script()->compartment);
return scopeChain().compartment();
}
inline JSPrincipals *principals(JSContext *cx) const;
/*
* Imacropc
*
* A frame's IMacro pc is the bytecode address when an imacro started
* executing (guaranteed non-null). An imacro does not push a frame, so
* when the imacro finishes, the frame's IMacro pc becomes the current pc.
*/
bool hasImacropc() const {
return flags_ & JSFRAME_HAS_IMACRO_PC;
}
jsbytecode *imacropc() const {
JS_ASSERT(hasImacropc());
return imacropc_;
}
jsbytecode *maybeImacropc() const {
return hasImacropc() ? imacropc() : NULL;
}
void clearImacropc() {
flags_ &= ~JSFRAME_HAS_IMACRO_PC;
}
void setImacropc(jsbytecode *pc) {
JS_ASSERT(pc);
JS_ASSERT(!(flags_ & JSFRAME_HAS_IMACRO_PC));
imacropc_ = pc;
flags_ |= JSFRAME_HAS_IMACRO_PC;
}
/* Annotation (will be removed after bug 546848) */
void* annotation() const {
return (flags_ & JSFRAME_HAS_ANNOTATION) ? annotation_ : NULL;
}
void setAnnotation(void *annot) {
flags_ |= JSFRAME_HAS_ANNOTATION;
annotation_ = annot;
}
/* Debugger hook data */
bool hasHookData() const {
return !!(flags_ & JSFRAME_HAS_HOOK_DATA);
}
void* hookData() const {
JS_ASSERT(hasHookData());
return hookData_;
}
void* maybeHookData() const {
return hasHookData() ? hookData_ : NULL;
}
void setHookData(void *v) {
hookData_ = v;
flags_ |= JSFRAME_HAS_HOOK_DATA;
}
/* Return value */
const js::Value &returnValue() {
if (!(flags_ & JSFRAME_HAS_RVAL))
rval_.setUndefined();
return rval_;
}
void markReturnValue() {
flags_ |= JSFRAME_HAS_RVAL;
}
void setReturnValue(const js::Value &v) {
rval_ = v;
markReturnValue();
}
void clearReturnValue() {
rval_.setUndefined();
markReturnValue();
}
/* Native-code return address */
void *nativeReturnAddress() const {
return ncode_;
}
void setNativeReturnAddress(void *addr) {
ncode_ = addr;
}
void **addressOfNativeReturnAddress() {
return &ncode_;
}
/*
* Generator-specific members
*
* A non-eval function frame may optionally be the activation of a
* generator. For the most part, generator frames act like ordinary frames.
* For exceptions, see js_FloatingFrameIfGenerator.
*/
bool isGeneratorFrame() const {
return !!(flags_ & JSFRAME_GENERATOR);
}
bool isFloatingGenerator() const {
JS_ASSERT_IF(flags_ & JSFRAME_FLOATING_GENERATOR, isGeneratorFrame());
return !!(flags_ & JSFRAME_FLOATING_GENERATOR);
}
void initFloatingGenerator() {
JS_ASSERT(!(flags_ & JSFRAME_GENERATOR));
flags_ |= (JSFRAME_GENERATOR | JSFRAME_FLOATING_GENERATOR);
}
void unsetFloatingGenerator() {
flags_ &= ~JSFRAME_FLOATING_GENERATOR;
}
void setFloatingGenerator() {
flags_ |= JSFRAME_FLOATING_GENERATOR;
}
/*
* js::Execute pushes both global and function frames (since eval() in a
* function pushes a frame with isFunctionFrame() && isEvalFrame()). Most
* code should not care where a frame was pushed, but if it is necessary to
* pick out frames pushed by js::Execute, this is the right query:
*/
bool isFramePushedByExecute() const {
return !!(flags_ & (JSFRAME_GLOBAL | JSFRAME_EVAL));
}
/*
* Other flags
*/
bool isConstructing() const {
return !!(flags_ & JSFRAME_CONSTRUCTING);
}
uint32 isConstructingFlag() const {
JS_ASSERT(isFunctionFrame());
JS_ASSERT((flags_ & ~(JSFRAME_CONSTRUCTING | JSFRAME_FUNCTION)) == 0);
return flags_;
}
bool isDebuggerFrame() const {
return !!(flags_ & JSFRAME_DEBUGGER);
}
bool isEvalOrDebuggerFrame() const {
return !!(flags_ & (JSFRAME_EVAL | JSFRAME_DEBUGGER));
}
bool hasOverriddenArgs() const {
return !!(flags_ & JSFRAME_OVERRIDE_ARGS);
}
bool hasOverflowArgs() const {
return !!(flags_ & JSFRAME_OVERFLOW_ARGS);
}
void setOverriddenArgs() {
flags_ |= JSFRAME_OVERRIDE_ARGS;
}
bool isYielding() {
return !!(flags_ & JSFRAME_YIELDING);
}
void setYielding() {
flags_ |= JSFRAME_YIELDING;
}
void clearYielding() {
flags_ &= ~JSFRAME_YIELDING;
}
void setFinishedInInterpreter() {
flags_ |= JSFRAME_FINISHED_IN_INTERP;
}
bool finishedInInterpreter() const {
return !!(flags_ & JSFRAME_FINISHED_IN_INTERP);
}
/*
* Variables object accessors
*
* A stack frame's 'varobj' refers to the 'variables object' (ES3 term)
* associated with the Execution Context's VariableEnvironment (ES5 10.3).
*
* To compute the frame's varobj, the caller must supply the segment
* containing the frame (see js::StackSegment comment). As an abbreviation,
* the caller may pass the context if the frame is contained in that
* context's active segment.
*/
inline JSObject &varobj(js::StackSegment *seg) const;
inline JSObject &varobj(JSContext *cx) const;
/* Access to privates from the jits. */
static size_t offsetOfFlags() {
return offsetof(JSStackFrame, flags_);
}
static size_t offsetOfExec() {
return offsetof(JSStackFrame, exec);
}
void *addressOfArgs() {
return &args;
}
static size_t offsetOfScopeChain() {
return offsetof(JSStackFrame, scopeChain_);
}
JSObject **addressOfScopeChain() {
JS_ASSERT(flags_ & JSFRAME_HAS_SCOPECHAIN);
return &scopeChain_;
}
static size_t offsetOfPrev() {
return offsetof(JSStackFrame, prev_);
}
static size_t offsetOfReturnValue() {
return offsetof(JSStackFrame, rval_);
}
static ptrdiff_t offsetOfncode() {
return offsetof(JSStackFrame, ncode_);
}
static ptrdiff_t offsetOfCallee(JSFunction *fun) {
JS_ASSERT(fun != NULL);
return -(fun->nargs + 2) * sizeof(js::Value);
}
static ptrdiff_t offsetOfThis(JSFunction *fun) {
return fun == NULL
? -1 * ptrdiff_t(sizeof(js::Value))
: -(fun->nargs + 1) * ptrdiff_t(sizeof(js::Value));
}
static ptrdiff_t offsetOfFormalArg(JSFunction *fun, uintN i) {
JS_ASSERT(i < fun->nargs);
return (-(int)fun->nargs + i) * sizeof(js::Value);
}
static size_t offsetOfFixed(uintN i) {
return sizeof(JSStackFrame) + i * sizeof(js::Value);
}
/* Workaround for static asserts on private members. */
void staticAsserts() {
JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) % sizeof(js::Value) == 0);
JS_STATIC_ASSERT(sizeof(JSStackFrame) % sizeof(js::Value) == 0);
}
#ifdef JS_METHODJIT
js::mjit::JITScript *jit() {
return script()->getJIT(isConstructing());
}
#endif
void methodjitStaticAsserts();
#ifdef DEBUG
/* Poison scopeChain value set before a frame is flushed. */
static JSObject *const sInvalidScopeChain;
#endif
};
#include "vm/Stack.h"
namespace js {
static const size_t VALUES_PER_STACK_FRAME = sizeof(JSStackFrame) / sizeof(Value);
extern JSObject *
GetBlockChain(JSContext *cx, StackFrame *fp);
extern JSObject *
GetBlockChain(JSContext *cx, JSStackFrame *fp);
extern JSObject *
GetBlockChainFast(JSContext *cx, JSStackFrame *fp, JSOp op, size_t oplen);
GetBlockChainFast(JSContext *cx, StackFrame *fp, JSOp op, size_t oplen);
extern JSObject *
GetScopeChain(JSContext *cx);
@ -858,10 +70,10 @@ GetScopeChain(JSContext *cx);
* must reflect at runtime.
*/
extern JSObject *
GetScopeChain(JSContext *cx, JSStackFrame *fp);
GetScopeChain(JSContext *cx, StackFrame *fp);
extern JSObject *
GetScopeChainFast(JSContext *cx, JSStackFrame *fp, JSOp op, size_t oplen);
GetScopeChainFast(JSContext *cx, StackFrame *fp, JSOp op, size_t oplen);
/*
* Report an error that the this value passed as |this| in the given arguments
@ -881,16 +93,6 @@ ReportIncompatibleMethod(JSContext *cx, Value *vp, Class *clasp);
template <typename T>
bool GetPrimitiveThis(JSContext *cx, Value *vp, T *v);
inline void
PutActivationObjects(JSContext *cx, JSStackFrame *fp)
{
/* The order is important since js_PutCallObject does js_PutArgsObject. */
if (fp->hasCallObj())
js_PutCallObject(cx, fp);
else if (fp->hasArgsObj())
js_PutArgsObject(cx, fp);
}
/*
* ScriptPrologue/ScriptEpilogue must be called in pairs. ScriptPrologue
* must be called before the script executes. ScriptEpilogue must be called
@ -898,10 +100,10 @@ PutActivationObjects(JSContext *cx, JSStackFrame *fp)
*/
inline bool
ScriptPrologue(JSContext *cx, JSStackFrame *fp, JSScript *script);
ScriptPrologue(JSContext *cx, StackFrame *fp, JSScript *script);
inline bool
ScriptEpilogue(JSContext *cx, JSStackFrame *fp, bool ok);
ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok);
/*
* It is not valid to call ScriptPrologue when a generator is resumed or to
@ -911,18 +113,18 @@ ScriptEpilogue(JSContext *cx, JSStackFrame *fp, bool ok);
*/
inline bool
ScriptPrologueOrGeneratorResume(JSContext *cx, JSStackFrame *fp);
ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp);
inline bool
ScriptEpilogueOrGeneratorYield(JSContext *cx, JSStackFrame *fp, bool ok);
ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok);
/* Implemented in jsdbgapi: */
extern void
ScriptDebugPrologue(JSContext *cx, JSStackFrame *fp);
ScriptDebugPrologue(JSContext *cx, StackFrame *fp);
extern bool
ScriptDebugEpilogue(JSContext *cx, JSStackFrame *fp, bool ok);
ScriptDebugEpilogue(JSContext *cx, StackFrame *fp, bool ok);
/*
* For a given |call|, convert null/undefined |this| into the global object for
@ -940,18 +142,40 @@ BoxNonStrictThis(JSContext *cx, const CallReceiver &call);
* an optimization to avoid global-this computation).
*/
inline bool
ComputeThis(JSContext *cx, JSStackFrame *fp);
ComputeThis(JSContext *cx, StackFrame *fp);
/*
* Choose enumerator values so that the enum can be passed used directly as the
* stack frame flags.
*/
enum ConstructOption {
INVOKE_NORMAL = 0,
INVOKE_CONSTRUCTOR = StackFrame::CONSTRUCTING
};
JS_STATIC_ASSERT(INVOKE_CONSTRUCTOR != INVOKE_NORMAL);
static inline uintN
ToReportFlags(ConstructOption option)
{
return (uintN)option;
}
static inline uint32
ToFrameFlags(ConstructOption option)
{
return (uintN)option;
}
/*
* The js::InvokeArgumentsGuard passed to js_Invoke must come from an
* immediately-enclosing successful call to js::StackSpace::pushInvokeArgs,
* i.e., there must have been no un-popped pushes to cx->stack(). Furthermore,
* i.e., there must have been no un-popped pushes to cx->stack. Furthermore,
* |args.getvp()[0]| should be the callee, |args.getvp()[1]| should be |this|,
* and the range [args.getvp() + 2, args.getvp() + 2 + args.getArgc()) should
* be initialized actual arguments.
*/
extern JS_REQUIRES_STACK bool
Invoke(JSContext *cx, const CallArgs &args, uint32 flags);
Invoke(JSContext *cx, const CallArgs &args, ConstructOption option = INVOKE_NORMAL);
/*
* Natives like sort/forEach/replace call Invoke repeatedly with the same
@ -979,29 +203,9 @@ Invoke(JSContext *cx, const CallArgs &args, uint32 flags);
*/
class InvokeSessionGuard;
/*
* Consolidated js_Invoke flags simply rename certain JSFRAME_* flags, so that
* we can share bits stored in JSStackFrame.flags and passed to:
*
* js_Invoke
* js_InternalInvoke
* js_ValueToFunction
* js_ValueToFunctionObject
* js_ValueToCallableObject
* js_ReportIsNotFunction
*
* See jsfun.h for the latter four and flag renaming macros.
*/
#define JSINVOKE_CONSTRUCT JSFRAME_CONSTRUCTING
/*
* Mask to isolate construct and iterator flags for use with jsfun.h functions.
*/
#define JSINVOKE_FUNFLAGS JSINVOKE_CONSTRUCT
/*
* "External" calls may come from C or C++ code using a JSContext on which no
* JS is running (!cx->fp), so they may need to push a dummy JSStackFrame.
* JS is running (!cx->fp), so they may need to push a dummy StackFrame.
*/
extern bool
@ -1037,17 +241,26 @@ ExternalInvokeConstructor(JSContext *cx, const Value &fval, uintN argc, Value *a
*/
extern JS_FORCES_STACK bool
Execute(JSContext *cx, JSObject &chain, JSScript *script,
JSStackFrame *prev, uintN flags, Value *result);
StackFrame *prev, uintN flags, Value *result);
/* Flags to toggle js::Interpret() execution. */
enum InterpMode
{
JSINTERP_NORMAL = 0, /* interpreter is running normally */
JSINTERP_RECORD = 1, /* interpreter has been started to record/run traces */
JSINTERP_SAFEPOINT = 2, /* interpreter should leave on a method JIT safe point */
JSINTERP_PROFILE = 3 /* interpreter should profile a loop */
};
/*
* Execute the caller-initialized frame for a user-defined script or function
* pointed to by cx->fp until completion or error.
*/
extern JS_REQUIRES_STACK JS_NEVER_INLINE bool
Interpret(JSContext *cx, JSStackFrame *stopFp, uintN inlineCallCount = 0, JSInterpMode mode = JSINTERP_NORMAL);
Interpret(JSContext *cx, StackFrame *stopFp, uintN inlineCallCount = 0, InterpMode mode = JSINTERP_NORMAL);
extern JS_REQUIRES_STACK bool
RunScript(JSContext *cx, JSScript *script, JSStackFrame *fp);
RunScript(JSContext *cx, JSScript *script, StackFrame *fp);
extern bool
CheckRedeclaration(JSContext *cx, JSObject *obj, jsid id, uintN attrs);
@ -1102,8 +315,6 @@ GetUpvar(JSContext *cx, uintN level, js::UpvarCookie cookie);
# endif
#endif
#define JS_MAX_INLINE_CALL_COUNT 3000
#if !JS_LONE_INTERPRET
# define JS_STATIC_INTERPRET static
#else

Просмотреть файл

@ -52,425 +52,7 @@
#include "jsfuninlines.h"
inline void
JSStackFrame::initPrev(JSContext *cx)
{
JS_ASSERT(flags_ & JSFRAME_HAS_PREVPC);
if (JSFrameRegs *regs = cx->regs) {
prev_ = regs->fp;
prevpc_ = regs->pc;
JS_ASSERT_IF(!prev_->isDummyFrame() && !prev_->hasImacropc(),
uint32(prevpc_ - prev_->script()->code) < prev_->script()->length);
} else {
prev_ = NULL;
#ifdef DEBUG
prevpc_ = (jsbytecode *)0xbadc;
#endif
}
}
inline void
JSStackFrame::resetGeneratorPrev(JSContext *cx)
{
flags_ |= JSFRAME_HAS_PREVPC;
initPrev(cx);
}
inline void
JSStackFrame::initCallFrame(JSContext *cx, JSObject &callee, JSFunction *fun,
uint32 nactual, uint32 flagsArg)
{
JS_ASSERT((flagsArg & ~(JSFRAME_CONSTRUCTING |
JSFRAME_OVERFLOW_ARGS |
JSFRAME_UNDERFLOW_ARGS)) == 0);
JS_ASSERT(fun == callee.getFunctionPrivate());
/* Initialize stack frame members. */
flags_ = JSFRAME_FUNCTION | JSFRAME_HAS_PREVPC | JSFRAME_HAS_SCOPECHAIN | flagsArg;
exec.fun = fun;
args.nactual = nactual; /* only need to write if over/under-flow */
scopeChain_ = callee.getParent();
initPrev(cx);
JS_ASSERT(!hasImacropc());
JS_ASSERT(!hasHookData());
JS_ASSERT(annotation() == NULL);
JS_ASSERT(!hasCallObj());
}
inline void
JSStackFrame::resetInvokeCallFrame()
{
/* Undo changes to frame made during execution; see initCallFrame */
JS_ASSERT(!(flags_ & ~(JSFRAME_FUNCTION |
JSFRAME_OVERFLOW_ARGS |
JSFRAME_UNDERFLOW_ARGS |
JSFRAME_OVERRIDE_ARGS |
JSFRAME_HAS_PREVPC |
JSFRAME_HAS_RVAL |
JSFRAME_HAS_SCOPECHAIN |
JSFRAME_HAS_ANNOTATION |
JSFRAME_HAS_HOOK_DATA |
JSFRAME_HAS_CALL_OBJ |
JSFRAME_HAS_ARGS_OBJ |
JSFRAME_FINISHED_IN_INTERP)));
/*
* Since the stack frame is usually popped after PutActivationObjects,
* these bits aren't cleared. The activation objects must have actually
* been put, though.
*/
JS_ASSERT_IF(flags_ & JSFRAME_HAS_CALL_OBJ, callObj().getPrivate() == NULL);
JS_ASSERT_IF(flags_ & JSFRAME_HAS_ARGS_OBJ, argsObj().getPrivate() == NULL);
flags_ &= JSFRAME_FUNCTION |
JSFRAME_OVERFLOW_ARGS |
JSFRAME_HAS_PREVPC |
JSFRAME_UNDERFLOW_ARGS;
JS_ASSERT(exec.fun == callee().getFunctionPrivate());
scopeChain_ = callee().getParent();
}
inline void
JSStackFrame::initCallFrameCallerHalf(JSContext *cx, uint32 flagsArg,
void *ncode)
{
JS_ASSERT((flagsArg & ~(JSFRAME_CONSTRUCTING |
JSFRAME_FUNCTION |
JSFRAME_OVERFLOW_ARGS |
JSFRAME_UNDERFLOW_ARGS)) == 0);
flags_ = JSFRAME_FUNCTION | flagsArg;
prev_ = cx->regs->fp;
ncode_ = ncode;
}
/*
* The "early prologue" refers to the members that are stored for the benefit
* of slow paths before initializing the rest of the members.
*/
inline void
JSStackFrame::initCallFrameEarlyPrologue(JSFunction *fun, uint32 nactual)
{
exec.fun = fun;
if (flags_ & (JSFRAME_OVERFLOW_ARGS | JSFRAME_UNDERFLOW_ARGS))
args.nactual = nactual;
}
/*
* The "late prologue" refers to the members that are stored after having
* checked for stack overflow and formal/actual arg mismatch.
*/
inline void
JSStackFrame::initCallFrameLatePrologue()
{
SetValueRangeToUndefined(slots(), script()->nfixed);
}
inline void
JSStackFrame::initEvalFrame(JSContext *cx, JSScript *script, JSStackFrame *prev, uint32 flagsArg)
{
JS_ASSERT(flagsArg & JSFRAME_EVAL);
JS_ASSERT((flagsArg & ~(JSFRAME_EVAL | JSFRAME_DEBUGGER)) == 0);
JS_ASSERT(prev->isScriptFrame());
/* Copy (callee, thisv). */
js::Value *dstvp = (js::Value *)this - 2;
js::Value *srcvp = prev->hasArgs()
? prev->formalArgs() - 2
: (js::Value *)prev - 2;
dstvp[0] = srcvp[0];
dstvp[1] = srcvp[1];
JS_ASSERT_IF(prev->isFunctionFrame(),
dstvp[0].toObject().isFunction());
/* Initialize stack frame members. */
flags_ = flagsArg | JSFRAME_HAS_PREVPC | JSFRAME_HAS_SCOPECHAIN |
(prev->flags_ & (JSFRAME_FUNCTION | JSFRAME_GLOBAL));
if (isFunctionFrame()) {
exec = prev->exec;
args.script = script;
} else {
exec.script = script;
}
scopeChain_ = &prev->scopeChain();
prev_ = prev;
prevpc_ = prev->pc(cx);
JS_ASSERT(!hasImacropc());
JS_ASSERT(!hasHookData());
setAnnotation(prev->annotation());
}
inline void
JSStackFrame::initGlobalFrame(JSScript *script, JSObject &chain, uint32 flagsArg)
{
JS_ASSERT((flagsArg & ~(JSFRAME_EVAL | JSFRAME_DEBUGGER)) == 0);
/* Initialize (callee, thisv). */
js::Value *vp = (js::Value *)this - 2;
vp[0].setUndefined();
vp[1].setUndefined(); /* Set after frame pushed using thisObject */
/* Initialize stack frame members. */
flags_ = flagsArg | JSFRAME_GLOBAL | JSFRAME_HAS_PREVPC | JSFRAME_HAS_SCOPECHAIN;
exec.script = script;
args.script = (JSScript *)0xbad;
scopeChain_ = &chain;
prev_ = NULL;
JS_ASSERT(!hasImacropc());
JS_ASSERT(!hasHookData());
JS_ASSERT(annotation() == NULL);
}
inline void
JSStackFrame::initDummyFrame(JSContext *cx, JSObject &chain)
{
js::PodZero(this);
flags_ = JSFRAME_DUMMY | JSFRAME_HAS_PREVPC | JSFRAME_HAS_SCOPECHAIN;
initPrev(cx);
chain.isGlobal();
setScopeChainNoCallObj(chain);
}
inline void
JSStackFrame::stealFrameAndSlots(js::Value *vp, JSStackFrame *otherfp,
js::Value *othervp, js::Value *othersp)
{
JS_ASSERT(vp == (js::Value *)this - (otherfp->formalArgsEnd() - othervp));
JS_ASSERT(othervp == otherfp->actualArgs() - 2);
JS_ASSERT(othersp >= otherfp->slots());
JS_ASSERT(othersp <= otherfp->base() + otherfp->numSlots());
PodCopy(vp, othervp, othersp - othervp);
JS_ASSERT(vp == this->actualArgs() - 2);
/* Catch bad-touching of non-canonical args (e.g., generator_trace). */
if (otherfp->hasOverflowArgs())
Debug_SetValueRangeToCrashOnTouch(othervp, othervp + 2 + otherfp->numFormalArgs());
/*
* Repoint Call, Arguments, Block and With objects to the new live frame.
* Call and Arguments are done directly because we have pointers to them.
* Block and With objects are done indirectly through 'liveFrame'. See
* js_LiveFrameToFloating comment in jsiter.h.
*/
if (hasCallObj()) {
JSObject &obj = callObj();
obj.setPrivate(this);
otherfp->flags_ &= ~JSFRAME_HAS_CALL_OBJ;
if (js_IsNamedLambda(fun())) {
JSObject *env = obj.getParent();
JS_ASSERT(env->getClass() == &js_DeclEnvClass);
env->setPrivate(this);
}
}
if (hasArgsObj()) {
JSObject &args = argsObj();
JS_ASSERT(args.isArguments());
if (args.isNormalArguments())
args.setPrivate(this);
else
JS_ASSERT(!args.getPrivate());
otherfp->flags_ &= ~JSFRAME_HAS_ARGS_OBJ;
}
}
inline js::Value &
JSStackFrame::canonicalActualArg(uintN i) const
{
if (i < numFormalArgs())
return formalArg(i);
JS_ASSERT(i < numActualArgs());
return actualArgs()[i];
}
template <class Op>
inline bool
JSStackFrame::forEachCanonicalActualArg(Op op)
{
uintN nformal = fun()->nargs;
js::Value *formals = formalArgsEnd() - nformal;
uintN nactual = numActualArgs();
if (nactual <= nformal) {
uintN i = 0;
js::Value *actualsEnd = formals + nactual;
for (js::Value *p = formals; p != actualsEnd; ++p, ++i) {
if (!op(i, p))
return false;
}
} else {
uintN i = 0;
js::Value *formalsEnd = formalArgsEnd();
for (js::Value *p = formals; p != formalsEnd; ++p, ++i) {
if (!op(i, p))
return false;
}
js::Value *actuals = formalsEnd - (nactual + 2);
js::Value *actualsEnd = formals - 2;
for (js::Value *p = actuals; p != actualsEnd; ++p, ++i) {
if (!op(i, p))
return false;
}
}
return true;
}
template <class Op>
inline bool
JSStackFrame::forEachFormalArg(Op op)
{
js::Value *formals = formalArgsEnd() - fun()->nargs;
js::Value *formalsEnd = formalArgsEnd();
uintN i = 0;
for (js::Value *p = formals; p != formalsEnd; ++p, ++i) {
if (!op(i, p))
return false;
}
return true;
}
namespace js {
struct CopyTo
{
Value *dst;
CopyTo(Value *dst) : dst(dst) {}
bool operator()(uintN, Value *src) {
*dst++ = *src;
return true;
}
};
}
JS_ALWAYS_INLINE void
JSStackFrame::clearMissingArgs()
{
if (flags_ & JSFRAME_UNDERFLOW_ARGS)
SetValueRangeToUndefined(formalArgs() + numActualArgs(), formalArgsEnd());
}
inline JSObject &
JSStackFrame::varobj(js::StackSegment *seg) const
{
JS_ASSERT(seg->contains(this));
return isFunctionFrame() ? callObj() : seg->getInitialVarObj();
}
inline JSObject &
JSStackFrame::varobj(JSContext *cx) const
{
JS_ASSERT(cx->activeSegment()->contains(this));
return isFunctionFrame() ? callObj() : cx->activeSegment()->getInitialVarObj();
}
inline uintN
JSStackFrame::numActualArgs() const
{
JS_ASSERT(hasArgs());
if (JS_UNLIKELY(flags_ & (JSFRAME_OVERFLOW_ARGS | JSFRAME_UNDERFLOW_ARGS)))
return hasArgsObj() ? argsObj().getArgsInitialLength() : args.nactual;
return numFormalArgs();
}
inline js::Value *
JSStackFrame::actualArgs() const
{
JS_ASSERT(hasArgs());
js::Value *argv = formalArgs();
if (JS_UNLIKELY(flags_ & JSFRAME_OVERFLOW_ARGS)) {
uintN nactual = hasArgsObj() ? argsObj().getArgsInitialLength() : args.nactual;
return argv - (2 + nactual);
}
return argv;
}
inline js::Value *
JSStackFrame::actualArgsEnd() const
{
JS_ASSERT(hasArgs());
if (JS_UNLIKELY(flags_ & JSFRAME_OVERFLOW_ARGS))
return formalArgs() - 2;
return formalArgs() + numActualArgs();
}
inline void
JSStackFrame::setArgsObj(JSObject &obj)
{
JS_ASSERT_IF(hasArgsObj(), &obj == args.obj);
JS_ASSERT_IF(!hasArgsObj(), numActualArgs() == obj.getArgsInitialLength());
args.obj = &obj;
flags_ |= JSFRAME_HAS_ARGS_OBJ;
}
inline void
JSStackFrame::setScopeChainNoCallObj(JSObject &obj)
{
#ifdef DEBUG
JS_ASSERT(&obj != NULL);
if (&obj != sInvalidScopeChain) {
if (hasCallObj()) {
JSObject *pobj = &obj;
while (pobj && pobj->getPrivate() != this)
pobj = pobj->getParent();
JS_ASSERT(pobj);
} else {
for (JSObject *pobj = &obj; pobj; pobj = pobj->getParent())
JS_ASSERT_IF(pobj->isCall(), pobj->getPrivate() != this);
}
}
#endif
scopeChain_ = &obj;
flags_ |= JSFRAME_HAS_SCOPECHAIN;
}
inline void
JSStackFrame::setScopeChainWithOwnCallObj(JSObject &obj)
{
JS_ASSERT(&obj != NULL);
JS_ASSERT(!hasCallObj() && obj.isCall() && obj.getPrivate() == this);
scopeChain_ = &obj;
flags_ |= JSFRAME_HAS_SCOPECHAIN | JSFRAME_HAS_CALL_OBJ;
}
inline JSObject &
JSStackFrame::callObj() const
{
JS_ASSERT_IF(isNonEvalFunctionFrame() || isStrictEvalFrame(), hasCallObj());
JSObject *pobj = &scopeChain();
while (JS_UNLIKELY(pobj->getClass() != &js_CallClass)) {
JS_ASSERT(js::IsCacheableNonGlobalScope(pobj) || pobj->isWith());
pobj = pobj->getParent();
}
return *pobj;
}
inline void
JSStackFrame::markActivationObjectsAsPut()
{
if (flags_ & (JSFRAME_HAS_ARGS_OBJ | JSFRAME_HAS_CALL_OBJ)) {
if (hasArgsObj() && !argsObj().getPrivate()) {
args.nactual = args.obj->getArgsInitialLength();
flags_ &= ~JSFRAME_HAS_ARGS_OBJ;
}
if (hasCallObj() && !callObj().getPrivate()) {
/*
* For function frames, the call object may or may not have have an
* enclosing DeclEnv object, so we use the callee's parent, since
* it was the initial scope chain. For global (strict) eval frames,
* there is no calle, but the call object's parent is the initial
* scope chain.
*/
scopeChain_ = isFunctionFrame()
? callee().getParent()
: scopeChain_->getParent();
flags_ &= ~JSFRAME_HAS_CALL_OBJ;
}
}
}
#include "vm/Stack-inl.h"
namespace js {
@ -548,12 +130,11 @@ InvokeSessionGuard::invoke(JSContext *cx) const
#else
if (!optimized())
#endif
return Invoke(cx, args_, 0);
return Invoke(cx, args_);
/* Clear any garbage left from the last Invoke. */
JSStackFrame *fp = frame_.fp();
StackFrame *fp = frame_.fp();
fp->clearMissingArgs();
PutActivationObjects(cx, frame_.fp());
fp->resetInvokeCallFrame();
SetValueRangeToUndefined(fp->slots(), script_->nfixed);
@ -563,7 +144,7 @@ InvokeSessionGuard::invoke(JSContext *cx) const
Probes::enterJSFun(cx, fp->fun(), script_);
#ifdef JS_METHODJIT
ok = mjit::EnterMethodJIT(cx, fp, code, stackLimit_);
cx->regs->pc = stop_;
cx->regs().pc = stop_;
#else
cx->regs->pc = script_->code;
ok = Interpret(cx, cx->fp());
@ -606,7 +187,7 @@ class PrimitiveBehavior<double> {
} // namespace detail
template <typename T>
bool
inline bool
GetPrimitiveThis(JSContext *cx, Value *vp, T *v)
{
typedef detail::PrimitiveBehavior<T> Behavior;
@ -704,7 +285,7 @@ ComputeImplicitThis(JSContext *cx, JSObject *obj, const Value &funval, Value *vp
}
inline bool
ComputeThis(JSContext *cx, JSStackFrame *fp)
ComputeThis(JSContext *cx, StackFrame *fp)
{
Value &thisv = fp->thisValue();
if (thisv.isObject())
@ -761,7 +342,7 @@ ValuePropertyBearer(JSContext *cx, const Value &v, int spindex)
}
inline bool
ScriptPrologue(JSContext *cx, JSStackFrame *fp)
ScriptPrologue(JSContext *cx, StackFrame *fp)
{
JS_ASSERT_IF(fp->isNonEvalFunctionFrame() && fp->fun()->isHeavyweight(), fp->hasCallObj());
@ -778,7 +359,7 @@ ScriptPrologue(JSContext *cx, JSStackFrame *fp)
}
inline bool
ScriptEpilogue(JSContext *cx, JSStackFrame *fp, bool ok)
ScriptEpilogue(JSContext *cx, StackFrame *fp, bool ok)
{
if (cx->compartment->debugMode)
ok = ScriptDebugEpilogue(cx, fp, ok);
@ -797,7 +378,7 @@ ScriptEpilogue(JSContext *cx, JSStackFrame *fp, bool ok)
}
inline bool
ScriptPrologueOrGeneratorResume(JSContext *cx, JSStackFrame *fp)
ScriptPrologueOrGeneratorResume(JSContext *cx, StackFrame *fp)
{
if (!fp->isGeneratorFrame())
return ScriptPrologue(cx, fp);
@ -807,7 +388,7 @@ ScriptPrologueOrGeneratorResume(JSContext *cx, JSStackFrame *fp)
}
inline bool
ScriptEpilogueOrGeneratorYield(JSContext *cx, JSStackFrame *fp, bool ok)
ScriptEpilogueOrGeneratorYield(JSContext *cx, StackFrame *fp, bool ok)
{
if (!fp->isYielding())
return ScriptEpilogue(cx, fp, ok);
@ -816,6 +397,6 @@ ScriptEpilogueOrGeneratorYield(JSContext *cx, JSStackFrame *fp, bool ok)
return ok;
}
}
} /* namespace js */
#endif /* jsinterpinlines_h__ */

Просмотреть файл

@ -65,6 +65,7 @@
#include "jsobj.h"
#include "jsopcode.h"
#include "jsproxy.h"
#include "jsscan.h"
#include "jsscope.h"
#include "jsscript.h"
#include "jsstaticcheck.h"
@ -74,11 +75,11 @@
#include "jsxml.h"
#endif
#include "jscntxtinlines.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "jsstrinlines.h"
#include "vm/Stack-inl.h"
using namespace js;
using namespace js::gc;
@ -1091,7 +1092,7 @@ generator_trace(JSTracer *trc, JSObject *obj)
if (gen->state == JSGEN_RUNNING || gen->state == JSGEN_CLOSING)
return;
JSStackFrame *fp = gen->floatingFrame();
StackFrame *fp = gen->floatingFrame();
JS_ASSERT(gen->liveFrame() == fp);
/*
@ -1134,17 +1135,10 @@ Class js_GeneratorClass = {
}
};
static inline void
RebaseRegsFromTo(JSFrameRegs *regs, JSStackFrame *from, JSStackFrame *to)
{
regs->fp = to;
regs->sp = to->slots() + (regs->sp - from->slots());
}
/*
* Called from the JSOP_GENERATOR case in the interpreter, with fp referring
* to the frame by which the generator function was activated. Create a new
* JSGenerator object, which contains its own JSStackFrame that we populate
* JSGenerator object, which contains its own StackFrame that we populate
* from *fp. We know that upon return, the JSOP_GENERATOR opcode will return
* from the activation in fp, so we can steal away fp->callobj and fp->argsobj
* if they are non-null.
@ -1156,8 +1150,8 @@ js_NewGenerator(JSContext *cx)
if (!obj)
return NULL;
JSStackFrame *stackfp = cx->fp();
JS_ASSERT(stackfp->base() == cx->regs->sp);
StackFrame *stackfp = cx->fp();
JS_ASSERT(stackfp->base() == cx->regs().sp);
JS_ASSERT(stackfp->actualArgs() <= stackfp->formalArgs());
/* Load and compute stack slot counts. */
@ -1177,7 +1171,7 @@ js_NewGenerator(JSContext *cx)
/* Cut up floatingStack space. */
Value *genvp = gen->floatingStack;
JSStackFrame *genfp = reinterpret_cast<JSStackFrame *>(genvp + vplen);
StackFrame *genfp = reinterpret_cast<StackFrame *>(genvp + vplen);
/* Initialize JSGenerator. */
gen->obj = obj;
@ -1186,11 +1180,11 @@ js_NewGenerator(JSContext *cx)
gen->floating = genfp;
/* Initialize regs stored in generator. */
gen->regs = *cx->regs;
RebaseRegsFromTo(&gen->regs, stackfp, genfp);
gen->regs = cx->regs();
gen->regs.rebaseFromTo(stackfp, genfp);
/* Copy frame off the stack. */
genfp->stealFrameAndSlots(genvp, stackfp, stackvp, cx->regs->sp);
genfp->stealFrameAndSlots(genvp, stackfp, stackvp, cx->regs().sp);
genfp->initFloatingGenerator();
obj->setPrivate(gen);
@ -1198,7 +1192,7 @@ js_NewGenerator(JSContext *cx)
}
JSGenerator *
js_FloatingFrameToGenerator(JSStackFrame *fp)
js_FloatingFrameToGenerator(StackFrame *fp)
{
JS_ASSERT(fp->isGeneratorFrame() && fp->isFloatingGenerator());
char *floatingStackp = (char *)(fp->actualArgs() - 2);
@ -1258,11 +1252,11 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
break;
}
JSStackFrame *genfp = gen->floatingFrame();
StackFrame *genfp = gen->floatingFrame();
Value *genvp = gen->floatingStack;
uintN vplen = genfp->formalArgsEnd() - genvp;
JSStackFrame *stackfp;
StackFrame *stackfp;
Value *stackvp;
JSBool ok;
{
@ -1271,7 +1265,7 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
* the code before pushExecuteFrame must not reenter the interpreter.
*/
GeneratorFrameGuard frame;
if (!cx->stack().getGeneratorFrame(cx, vplen, genfp->numSlots(), &frame)) {
if (!cx->stack.getGeneratorFrame(cx, vplen, genfp->numSlots(), &frame)) {
gen->state = JSGEN_CLOSED;
return JS_FALSE;
}
@ -1282,11 +1276,11 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
stackfp->stealFrameAndSlots(stackvp, genfp, genvp, gen->regs.sp);
stackfp->resetGeneratorPrev(cx);
stackfp->unsetFloatingGenerator();
RebaseRegsFromTo(&gen->regs, genfp, stackfp);
gen->regs.rebaseFromTo(genfp, stackfp);
MUST_FLOW_THROUGH("restore");
/* Officially push frame. frame's destructor pops. */
cx->stack().pushGeneratorFrame(cx, &gen->regs, &frame);
cx->stack.pushGeneratorFrame(gen->regs, &frame);
cx->enterGenerator(gen); /* OOM check above. */
JSObject *enumerators = cx->enumerators;
@ -1306,7 +1300,7 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
genfp->setFloatingGenerator();
}
MUST_FLOW_LABEL(restore)
RebaseRegsFromTo(&gen->regs, stackfp, genfp);
gen->regs.rebaseFromTo(stackfp, genfp);
if (gen->floatingFrame()->isYielding()) {
/* Yield cannot fail, throw or be called on closing. */

Просмотреть файл

@ -181,19 +181,19 @@ typedef enum JSGeneratorState {
struct JSGenerator {
JSObject *obj;
JSGeneratorState state;
JSFrameRegs regs;
js::FrameRegs regs;
JSObject *enumerators;
JSStackFrame *floating;
js::StackFrame *floating;
js::Value floatingStack[1];
JSStackFrame *floatingFrame() {
js::StackFrame *floatingFrame() {
return floating;
}
JSStackFrame *liveFrame() {
js::StackFrame *liveFrame() {
JS_ASSERT((state == JSGEN_RUNNING || state == JSGEN_CLOSING) ==
(regs.fp != floatingFrame()));
return regs.fp;
(regs.fp() != floatingFrame()));
return regs.fp();
}
};
@ -211,10 +211,10 @@ js_NewGenerator(JSContext *cx);
* Block and With objects must "normalize" to and from the floating/live frames
* in the case of generators using the following functions.
*/
inline JSStackFrame *
js_FloatingFrameIfGenerator(JSContext *cx, JSStackFrame *fp)
inline js::StackFrame *
js_FloatingFrameIfGenerator(JSContext *cx, js::StackFrame *fp)
{
JS_ASSERT(cx->stack().contains(fp));
JS_ASSERT(cx->stack.contains(fp));
if (JS_UNLIKELY(fp->isGeneratorFrame()))
return cx->generatorFor(fp)->floatingFrame();
return fp;
@ -222,10 +222,10 @@ js_FloatingFrameIfGenerator(JSContext *cx, JSStackFrame *fp)
/* Given a floating frame, given the JSGenerator containing it. */
extern JSGenerator *
js_FloatingFrameToGenerator(JSStackFrame *fp);
js_FloatingFrameToGenerator(js::StackFrame *fp);
inline JSStackFrame *
js_LiveFrameIfGenerator(JSStackFrame *fp)
inline js::StackFrame *
js_LiveFrameIfGenerator(js::StackFrame *fp)
{
return fp->isGeneratorFrame() ? js_FloatingFrameToGenerator(fp)->liveFrame() : fp;
}

Просмотреть файл

@ -81,7 +81,7 @@ typedef struct JSThinLock {
JSFatLock *fat;
} JSThinLock;
#define CX_THINLOCK_ID(cx) ((jsword)(cx)->thread)
#define CX_THINLOCK_ID(cx) ((jsword)(cx)->thread())
#define CURRENT_THREAD_IS_ME(me) (((JSThread *)me)->id == js_CurrentThreadId())
typedef PRLock JSLock;

Просмотреть файл

@ -959,7 +959,7 @@ EvalCacheHash(JSContext *cx, JSLinearString *str)
}
static JS_ALWAYS_INLINE JSScript *
EvalCacheLookup(JSContext *cx, JSLinearString *str, JSStackFrame *caller, uintN staticLevel,
EvalCacheLookup(JSContext *cx, JSLinearString *str, StackFrame *caller, uintN staticLevel,
JSPrincipals *principals, JSObject &scopeobj, JSScript **bucket)
{
/*
@ -1082,7 +1082,7 @@ class EvalScriptGuard
}
}
void lookupInEvalCache(JSStackFrame *caller, uintN staticLevel,
void lookupInEvalCache(StackFrame *caller, uintN staticLevel,
JSPrincipals *principals, JSObject &scopeobj) {
if (JSScript *found = EvalCacheLookup(cx_, str_, caller, staticLevel,
principals, scopeobj, bucket_)) {
@ -1120,7 +1120,7 @@ class EvalScriptGuard
enum EvalType { DIRECT_EVAL, INDIRECT_EVAL };
static bool
EvalKernel(JSContext *cx, const CallArgs &call, EvalType evalType, JSStackFrame *caller,
EvalKernel(JSContext *cx, const CallArgs &call, EvalType evalType, StackFrame *caller,
JSObject &scopeobj)
{
JS_ASSERT((evalType == INDIRECT_EVAL) == (caller == NULL));
@ -1235,7 +1235,7 @@ EvalKernel(JSContext *cx, const CallArgs &call, EvalType evalType, JSStackFrame
esg.setNewScript(compiled);
}
return Execute(cx, scopeobj, esg.script(), caller, JSFRAME_EVAL, &call.rval());
return Execute(cx, scopeobj, esg.script(), caller, StackFrame::EVAL, &call.rval());
}
/*
@ -1247,7 +1247,7 @@ static inline bool
WarnOnTooManyArgs(JSContext *cx, const CallArgs &call)
{
if (call.argc() > 1) {
if (JSStackFrame *caller = js_GetScriptedCaller(cx, NULL)) {
if (StackFrame *caller = js_GetScriptedCaller(cx, NULL)) {
if (!caller->script()->warnedAboutTwoArgumentEval) {
static const char TWO_ARGUMENT_WARNING[] =
"Support for eval(code, scopeObject) has been removed. "
@ -1286,10 +1286,10 @@ bool
DirectEval(JSContext *cx, const CallArgs &call)
{
/* Direct eval can assume it was called from an interpreted frame. */
JSStackFrame *caller = cx->fp();
StackFrame *caller = cx->fp();
JS_ASSERT(caller->isScriptFrame());
JS_ASSERT(IsBuiltinEvalForScope(&caller->scopeChain(), call.calleev()));
JS_ASSERT(*cx->regs->pc == JSOP_EVAL);
JS_ASSERT(*cx->regs().pc == JSOP_EVAL);
AutoFunctionCallProbe callProbe(cx, call.callee().getFunctionPrivate(), caller->script());
@ -1339,8 +1339,8 @@ PrincipalsForCompiledCode(const CallArgs &call, JSContext *cx)
#ifdef DEBUG
if (calleePrincipals) {
if (JSStackFrame *caller = js_GetScriptedCaller(cx, NULL)) {
if (JSPrincipals *callerPrincipals = caller->principals(cx)) {
if (StackFrame *caller = js_GetScriptedCaller(cx, NULL)) {
if (JSPrincipals *callerPrincipals = caller->scopeChain().principals(cx)) {
JS_ASSERT(callerPrincipals->subsume(callerPrincipals, calleePrincipals));
}
}
@ -1360,8 +1360,8 @@ obj_watch_handler(JSContext *cx, JSObject *obj, jsid id, jsval old,
{
JSObject *callable = (JSObject *) closure;
if (JSPrincipals *watcher = callable->principals(cx)) {
if (JSStackFrame *caller = js_GetScriptedCaller(cx, NULL)) {
if (JSPrincipals *subject = caller->principals(cx)) {
if (StackFrame *caller = js_GetScriptedCaller(cx, NULL)) {
if (JSPrincipals *subject = caller->scopeChain().principals(cx)) {
if (!watcher->subsume(watcher, subject)) {
/* Silently don't call the watch handler. */
return JS_TRUE;
@ -3110,8 +3110,8 @@ js_InferFlags(JSContext *cx, uintN defaultFlags)
uint32 format;
uintN flags = 0;
JSStackFrame *const fp = js_GetTopStackFrame(cx);
if (!fp || !(pc = cx->regs->pc))
StackFrame *const fp = js_GetTopStackFrame(cx);
if (!fp || !(pc = cx->regs().pc))
return defaultFlags;
cs = &js_CodeSpec[js_GetOpcode(cx, fp->script(), pc)];
format = cs->format;
@ -3239,7 +3239,7 @@ js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth)
if (!obj)
return NULL;
JSStackFrame *priv = js_FloatingFrameIfGenerator(cx, cx->fp());
StackFrame *priv = js_FloatingFrameIfGenerator(cx, cx->fp());
EmptyShape *emptyWithShape = EmptyShape::getEmptyWithShape(cx);
if (!emptyWithShape)
@ -3281,7 +3281,7 @@ js_NewBlockObject(JSContext *cx)
}
JSObject *
js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp)
js_CloneBlockObject(JSContext *cx, JSObject *proto, StackFrame *fp)
{
JS_ASSERT(proto->isStaticBlock());
@ -3292,7 +3292,7 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp)
if (!clone)
return NULL;
JSStackFrame *priv = js_FloatingFrameIfGenerator(cx, fp);
StackFrame *priv = js_FloatingFrameIfGenerator(cx, fp);
/* The caller sets parent on its own. */
clone->initClonedBlock(cx, proto, priv);
@ -3309,7 +3309,7 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp)
JS_REQUIRES_STACK JSBool
js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
{
JSStackFrame *const fp = cx->fp();
StackFrame *const fp = cx->fp();
JSObject *obj = &fp->scopeChain();
JS_ASSERT(obj->isClonedBlock());
JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp()));
@ -3320,8 +3320,8 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
/* The block and its locals must be on the current stack for GC safety. */
uintN depth = OBJ_BLOCK_DEPTH(cx, obj);
JS_ASSERT(depth <= size_t(cx->regs->sp - fp->base()));
JS_ASSERT(count <= size_t(cx->regs->sp - fp->base() - depth));
JS_ASSERT(depth <= size_t(cx->regs().sp - fp->base()));
JS_ASSERT(count <= size_t(cx->regs().sp - fp->base() - depth));
/* See comments in CheckDestructuring from jsparse.cpp. */
JS_ASSERT(count >= 1);
@ -3350,7 +3350,7 @@ block_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
uintN index = (uintN) JSID_TO_INT(id);
JS_ASSERT(index < OBJ_BLOCK_COUNT(cx, obj));
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
StackFrame *fp = (StackFrame *) obj->getPrivate();
if (fp) {
fp = js_LiveFrameIfGenerator(fp);
index += fp->numFixed() + OBJ_BLOCK_DEPTH(cx, obj);
@ -3371,7 +3371,7 @@ block_setProperty(JSContext *cx, JSObject *obj, jsid id, JSBool strict, Value *v
uintN index = (uintN) JSID_TO_INT(id);
JS_ASSERT(index < OBJ_BLOCK_COUNT(cx, obj));
JSStackFrame *fp = (JSStackFrame *) obj->getPrivate();
StackFrame *fp = (StackFrame *) obj->getPrivate();
if (fp) {
fp = js_LiveFrameIfGenerator(fp);
index += fp->numFixed() + OBJ_BLOCK_DEPTH(cx, obj);
@ -4227,7 +4227,7 @@ JSBool
js_FindClassObject(JSContext *cx, JSObject *start, JSProtoKey protoKey,
Value *vp, Class *clasp)
{
JSStackFrame *fp;
StackFrame *fp;
JSObject *obj, *cobj, *pobj;
jsid id;
JSProperty *prop;
@ -4945,7 +4945,7 @@ js_LookupPropertyWithFlagsInline(JSContext *cx, JSObject *obj, jsid id, uintN fl
* Non-native objects must have either non-native lookup results,
* or else native results from the non-native's prototype chain.
*
* See JSStackFrame::getValidCalleeObject, where we depend on this
* See StackFrame::getValidCalleeObject, where we depend on this
* fact to force a prototype-delegated joined method accessed via
* arguments.callee through the delegating |this| object's method
* read barrier.
@ -5459,7 +5459,7 @@ js_GetMethod(JSContext *cx, JSObject *obj, jsid id, uintN getHow, Value *vp)
JS_FRIEND_API(bool)
js_CheckUndeclaredVarAssignment(JSContext *cx, JSString *propname)
{
JSStackFrame *const fp = js_GetTopStackFrame(cx);
StackFrame *const fp = js_GetTopStackFrame(cx);
if (!fp)
return true;
@ -5873,7 +5873,7 @@ js_DeleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval, JSBool str
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
if (fun != funobj) {
for (JSStackFrame *fp = cx->maybefp(); fp; fp = fp->prev()) {
for (StackFrame *fp = cx->maybefp(); fp; fp = fp->prev()) {
if (fp->isFunctionFrame() &&
fp->callee() == fun->compiledFunObj() &&
fp->thisValue().isObject())
@ -6157,7 +6157,7 @@ js_GetClassPrototype(JSContext *cx, JSObject *scopeobj, JSProtoKey protoKey,
if (protoKey != JSProto_Null) {
if (!scopeobj) {
if (cx->hasfp())
if (cx->running())
scopeobj = &cx->fp()->scopeChain();
if (!scopeobj) {
scopeobj = cx->globalObject;
@ -6858,7 +6858,7 @@ MaybeDumpValue(const char *name, const Value &v)
}
JS_FRIEND_API(void)
js_DumpStackFrame(JSContext *cx, JSStackFrame *start)
js_DumpStackFrame(JSContext *cx, StackFrame *start)
{
/* This should only called during live debugging. */
VOUCH_DOES_NOT_REQUIRE_STACK();
@ -6875,9 +6875,9 @@ js_DumpStackFrame(JSContext *cx, JSStackFrame *start)
}
for (; !i.done(); ++i) {
JSStackFrame *const fp = i.fp();
StackFrame *const fp = i.fp();
fprintf(stderr, "JSStackFrame at %p\n", (void *) fp);
fprintf(stderr, "StackFrame at %p\n", (void *) fp);
if (fp->isFunctionFrame()) {
fprintf(stderr, "callee fun: ");
dumpValue(ObjectValue(fp->callee()));

Просмотреть файл

@ -477,7 +477,7 @@ struct JSObject : js::gc::Cell {
/* Functions for setting up scope chain object maps and shapes. */
void initCall(JSContext *cx, const js::Bindings &bindings, JSObject *parent);
void initClonedBlock(JSContext *cx, JSObject *proto, JSStackFrame *priv);
void initClonedBlock(JSContext *cx, JSObject *proto, js::StackFrame *priv);
void setBlockOwnShape(JSContext *cx);
void deletingShapeChange(JSContext *cx, const js::Shape &shape);
@ -916,7 +916,7 @@ struct JSObject : js::gc::Cell {
inline bool callIsForEval() const;
/* The stack frame for this Call object, if the frame is still active. */
inline JSStackFrame *maybeCallObjStackFrame() const;
inline js::StackFrame *maybeCallObjStackFrame() const;
/*
* The callee function if this Call object was created for a function
@ -1416,7 +1416,7 @@ inline bool JSObject::isBlock() const { return getClass() == &js_BlockClass; }
/*
* Block scope object macros. The slots reserved by js_BlockClass are:
*
* private JSStackFrame * active frame pointer or null
* private StackFrame * active frame pointer or null
* JSSLOT_BLOCK_DEPTH int depth of block slots in frame
*
* After JSSLOT_BLOCK_DEPTH come one or more slots for the block locals.
@ -1424,7 +1424,7 @@ inline bool JSObject::isBlock() const { return getClass() == &js_BlockClass; }
* A With object is like a Block object, in that both have one reserved slot
* telling the stack depth of the relevant slots (the slot whose value is the
* object named in the with statement, the slots containing the block's local
* variables); and both have a private slot referring to the JSStackFrame in
* variables); and both have a private slot referring to the StackFrame in
* whose activation they were created (or null if the with or block object
* outlives the frame).
*/
@ -1480,7 +1480,7 @@ extern JSObject *
js_NewBlockObject(JSContext *cx);
extern JSObject *
js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp);
js_CloneBlockObject(JSContext *cx, JSObject *proto, js::StackFrame *fp);
extern JS_REQUIRES_STACK JSBool
js_PutBlockObject(JSContext *cx, JSBool normalUnwind);
@ -1912,7 +1912,7 @@ JS_FRIEND_API(void) js_DumpAtom(JSAtom *atom);
JS_FRIEND_API(void) js_DumpObject(JSObject *obj);
JS_FRIEND_API(void) js_DumpValue(const js::Value &val);
JS_FRIEND_API(void) js_DumpId(jsid id);
JS_FRIEND_API(void) js_DumpStackFrame(JSContext *cx, JSStackFrame *start = NULL);
JS_FRIEND_API(void) js_DumpStackFrame(JSContext *cx, js::StackFrame *start = NULL);
#endif
extern uintN
@ -1922,7 +1922,6 @@ js_InferFlags(JSContext *cx, uintN defaultFlags);
JSBool
js_Object(JSContext *cx, uintN argc, js::Value *vp);
namespace js {
extern bool

Просмотреть файл

@ -169,7 +169,7 @@ JSObject::initCall(JSContext *cx, const js::Bindings &bindings, JSObject *parent
* shape.
*/
inline void
JSObject::initClonedBlock(JSContext *cx, JSObject *proto, JSStackFrame *frame)
JSObject::initClonedBlock(JSContext *cx, JSObject *proto, js::StackFrame *frame)
{
init(cx, &js_BlockClass, proto, NULL, frame, false);
@ -509,11 +509,11 @@ JSObject::callIsForEval() const
return getSlot(JSSLOT_CALL_CALLEE).isNull();
}
inline JSStackFrame *
inline js::StackFrame *
JSObject::maybeCallObjStackFrame() const
{
JS_ASSERT(isCall());
return reinterpret_cast<JSStackFrame *>(getPrivate());
return reinterpret_cast<js::StackFrame *>(getPrivate());
}
inline void
@ -881,12 +881,6 @@ JSObject::principals(JSContext *cx)
return compPrincipals;
}
inline JSPrincipals *
JSStackFrame::principals(JSContext *cx) const
{
return scopeChain().principals(cx);
}
inline uint32
JSObject::slotSpan() const
{
@ -1222,7 +1216,7 @@ NewBuiltinClassInstance(JSContext *cx, Class *clasp, gc::FinalizeKind kind)
/* NB: inline-expanded and specialized version of js_GetClassPrototype. */
JSObject *global;
if (!cx->hasfp()) {
if (!cx->running()) {
global = cx->globalObject;
OBJ_TO_INNER_OBJECT(cx, global);
if (!global)

Просмотреть файл

@ -67,6 +67,8 @@
#include "jsobjinlines.h"
#include "jsstrinlines.h"
#include "vm/Stack-inl.h"
using namespace js;
using namespace js::gc;
@ -336,14 +338,14 @@ PreprocessValue(JSContext *cx, JSObject *holder, jsid key, Value *vp, StringifyC
LeaveTrace(cx);
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, 1, &args))
if (!cx->stack.pushInvokeArgs(cx, 1, &args))
return false;
args.calleev() = toJSON;
args.thisv() = *vp;
args[0] = StringValue(keyStr);
if (!Invoke(cx, args, 0))
if (!Invoke(cx, args))
return false;
*vp = args.rval();
}
@ -359,7 +361,7 @@ PreprocessValue(JSContext *cx, JSObject *holder, jsid key, Value *vp, StringifyC
LeaveTrace(cx);
InvokeArgsGuard args;
if (!cx->stack().pushInvokeArgs(cx, 2, &args))
if (!cx->stack.pushInvokeArgs(cx, 2, &args))
return false;
args.calleev() = ObjectValue(*scx->replacer);
@ -367,7 +369,7 @@ PreprocessValue(JSContext *cx, JSObject *holder, jsid key, Value *vp, StringifyC
args[0] = StringValue(keyStr);
args[1] = *vp;
if (!Invoke(cx, args, 0))
if (!Invoke(cx, args))
return false;
*vp = args.rval();
}

Просмотреть файл

@ -72,7 +72,6 @@
#include "jsstaticcheck.h"
#include "jsvector.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "jsscriptinlines.h"
#include "jscntxtinlines.h"
@ -311,7 +310,7 @@ js_DumpPC(JSContext *cx)
void *mark = JS_ARENA_MARK(&cx->tempPool);
Sprinter sprinter;
INIT_SPRINTER(cx, &sprinter, &cx->tempPool, 0);
JSBool ok = js_DisassembleAtPC(cx, cx->fp()->script(), true, cx->regs->pc, &sprinter);
JSBool ok = js_DisassembleAtPC(cx, cx->fp()->script(), true, cx->regs().pc, &sprinter);
fprintf(stdout, "%s", sprinter.base);
JS_ARENA_RELEASE(&cx->tempPool, mark);
return ok;
@ -2053,7 +2052,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop)
token = CodeToken[op];
if (pc + oplen == jp->dvgfence) {
JSStackFrame *fp;
StackFrame *fp;
uint32 format, mode, type;
/*
@ -2915,7 +2914,7 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop)
* object that's not a constructor, causing us to be
* called with an intervening frame on the stack.
*/
JSStackFrame *fp = js_GetTopStackFrame(cx);
StackFrame *fp = js_GetTopStackFrame(cx);
if (fp) {
while (!fp->isEvalFrame())
fp = fp->prev();
@ -5088,7 +5087,7 @@ char *
js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in,
JSString *fallback)
{
JSStackFrame *fp;
StackFrame *fp;
JSScript *script;
jsbytecode *pc;
@ -5100,12 +5099,12 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in,
LeaveTrace(cx);
if (!cx->regs || !cx->regs->fp || !cx->regs->fp->isScriptFrame())
if (!cx->running() || !cx->fp()->isScriptFrame())
goto do_fallback;
fp = cx->regs->fp;
fp = cx->fp();
script = fp->script();
pc = fp->hasImacropc() ? fp->imacropc() : cx->regs->pc;
pc = fp->hasImacropc() ? fp->imacropc() : cx->regs().pc;
JS_ASSERT(script->code <= pc && pc < script->code + script->length);
if (pc < script->main)
@ -5139,7 +5138,7 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in,
* it that caused exception, see bug 328664.
*/
Value *stackBase = fp->base();
Value *sp = cx->regs->sp;
Value *sp = cx->regs().sp;
do {
if (sp == stackBase) {
pcdepth = -1;
@ -5167,11 +5166,10 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in,
}
{
jsbytecode* basepc = cx->regs->pc;
jsbytecode* basepc = cx->regs().pc;
jsbytecode* savedImacropc = fp->maybeImacropc();
if (savedImacropc) {
JS_ASSERT(cx->hasfp());
cx->regs->pc = savedImacropc;
cx->regs().pc = savedImacropc;
fp->clearImacropc();
}
@ -5186,8 +5184,7 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in,
name = DecompileExpression(cx, script, fp->maybeFun(), pc);
if (savedImacropc) {
JS_ASSERT(cx->hasfp());
cx->regs->pc = basepc;
cx->regs().pc = basepc;
fp->setImacropc(savedImacropc);
}
@ -5467,7 +5464,7 @@ ReconstructImacroPCStack(JSContext *cx, JSScript *script,
* Begin with a recursive call back to ReconstructPCStack to pick up
* the state-of-the-world at the *start* of the imacro.
*/
JSStackFrame *fp = js_GetScriptedCaller(cx, NULL);
StackFrame *fp = js_GetScriptedCaller(cx, NULL);
JS_ASSERT(fp->hasImacropc());
intN pcdepth = ReconstructPCStack(cx, script, fp->imacropc(), pcstack);
if (pcdepth < 0)

Просмотреть файл

@ -91,7 +91,6 @@
#endif
#include "jsatominlines.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "jsregexpinlines.h"
#include "jsscriptinlines.h"
@ -185,14 +184,14 @@ JSParseNode::clear()
pn_parens = false;
}
Parser::Parser(JSContext *cx, JSPrincipals *prin, JSStackFrame *cfp)
Parser::Parser(JSContext *cx, JSPrincipals *prin, StackFrame *cfp)
: js::AutoGCRooter(cx, PARSER),
context(cx),
aleFreeList(NULL),
tokenStream(cx),
principals(NULL),
callerFrame(cfp),
callerVarObj(cfp ? &cfp->varobj(cx->stack().containingSegment(cfp)) : NULL),
callerVarObj(cfp ? &cx->stack.space().varObjForFrame(cfp) : NULL),
nodeList(NULL),
functionCount(0),
traceListHead(NULL),
@ -891,13 +890,13 @@ SetStaticLevel(JSTreeContext *tc, uintN staticLevel)
/*
* Compile a top-level script.
*/
Compiler::Compiler(JSContext *cx, JSPrincipals *prin, JSStackFrame *cfp)
Compiler::Compiler(JSContext *cx, JSPrincipals *prin, StackFrame *cfp)
: parser(cx, prin, cfp)
{
}
JSScript *
Compiler::compileScript(JSContext *cx, JSObject *scopeChain, JSStackFrame *callerFrame,
Compiler::compileScript(JSContext *cx, JSObject *scopeChain, StackFrame *callerFrame,
JSPrincipals *principals, uint32 tcflags,
const jschar *chars, size_t length,
const char *filename, uintN lineno, JSVersion version,
@ -8762,7 +8761,7 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot)
return NULL;
JSObject *obj;
if (context->hasfp()) {
if (context->running()) {
obj = RegExp::createObject(context, context->regExpStatics(),
tokenStream.getTokenbuf().begin(),
tokenStream.getTokenbuf().length(),

Просмотреть файл

@ -1048,14 +1048,14 @@ namespace js {
struct Parser : private js::AutoGCRooter
{
JSContext * const context; /* FIXME Bug 551291: use AutoGCRooter::context? */
JSContext *const context; /* FIXME Bug 551291: use AutoGCRooter::context? */
JSAtomListElement *aleFreeList;
void *tempFreeList[NUM_TEMP_FREELISTS];
TokenStream tokenStream;
void *tempPoolMark; /* initial JSContext.tempPool mark */
JSPrincipals *principals; /* principals associated with source */
JSStackFrame *const callerFrame; /* scripted caller frame for eval and dbgapi */
JSObject *const callerVarObj; /* callerFrame's varObj */
StackFrame *const callerFrame; /* scripted caller frame for eval and dbgapi */
JSObject *const callerVarObj; /* callerFrame's varObj */
JSParseNode *nodeList; /* list of recyclable parse-node structs */
uint32 functionCount; /* number of functions in current unit */
JSObjectBox *traceListHead; /* list of parsed object for GC tracing */
@ -1065,7 +1065,7 @@ struct Parser : private js::AutoGCRooter
/* Root atoms and objects allocated for the parsed tree. */
js::AutoKeepAtoms keepAtoms;
Parser(JSContext *cx, JSPrincipals *prin = NULL, JSStackFrame *cfp = NULL);
Parser(JSContext *cx, JSPrincipals *prin = NULL, StackFrame *cfp = NULL);
~Parser();
friend void js::AutoGCRooter::trace(JSTracer *trc);
@ -1238,7 +1238,7 @@ struct Compiler
Parser parser;
GlobalScope *globalScope;
Compiler(JSContext *cx, JSPrincipals *prin = NULL, JSStackFrame *cfp = NULL);
Compiler(JSContext *cx, JSPrincipals *prin = NULL, StackFrame *cfp = NULL);
/*
* Initialize a compiler. Parameters are passed on to init parser.
@ -1255,7 +1255,7 @@ struct Compiler
const char *filename, uintN lineno, JSVersion version);
static JSScript *
compileScript(JSContext *cx, JSObject *scopeChain, JSStackFrame *callerFrame,
compileScript(JSContext *cx, JSObject *scopeChain, StackFrame *callerFrame,
JSPrincipals *principals, uint32 tcflags,
const jschar *chars, size_t length,
const char *filename, uintN lineno, JSVersion version,

Просмотреть файл

@ -135,7 +135,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
* Optimize the cached vword based on our parameters and the current pc's
* opcode format flags.
*/
pc = cx->regs->pc;
pc = cx->regs().pc;
op = js_GetOpcode(cx, cx->fp()->script(), pc);
cs = &js_CodeSpec[op];
kshape = 0;
@ -324,7 +324,7 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject
JSObject *obj, *pobj, *tmp;
uint32 vcap;
JSStackFrame *fp = cx->fp();
StackFrame *fp = cx->fp();
JS_ASSERT(this == &JS_PROPERTY_CACHE(cx));
JS_ASSERT(uintN((fp->hasImacropc() ? fp->imacropc() : pc) - fp->script()->code)

Просмотреть файл

@ -138,12 +138,18 @@ class ExecuteArgsGuard;
class InvokeFrameGuard;
class InvokeArgsGuard;
class InvokeSessionGuard;
class StringBuffer;
class TraceRecorder;
struct TraceMonitor;
class StackSpace;
class FrameRegs;
class StackFrame;
class StackSegment;
class StackSpace;
class ContextStack;
class FrameRegsIter;
class StringBuffer;
class CallReceiver;
class CallArgs;
struct Compiler;
struct Parser;

Просмотреть файл

@ -49,8 +49,8 @@
#include "jsscope.h"
#include "jsgc.h"
#include "jsgcinlines.h"
#include "jscntxtinlines.h"
#include "jsgcinlines.h"
#include "jsobjinlines.h"
inline void

Просмотреть файл

@ -68,7 +68,6 @@
#endif
#include "methodjit/MethodJIT.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "jsscriptinlines.h"
@ -1225,7 +1224,7 @@ JSScript::NewScript(JSContext *cx, uint32 length, uint32 nsrcnotes, uint32 natom
script->compartment = cx->compartment;
#ifdef CHECK_SCRIPT_OWNER
script->owner = cx->thread;
script->owner = cx->thread();
#endif
JS_APPEND_LINK(&script->links, &cx->compartment->scripts);
@ -1470,7 +1469,7 @@ DestroyScript(JSContext *cx, JSScript *script)
JS_PROPERTY_CACHE(cx).purgeForScript(cx, script);
#ifdef CHECK_SCRIPT_OWNER
JS_ASSERT(script->owner == cx->thread);
JS_ASSERT(script->owner == cx->thread());
#endif
}
@ -1646,7 +1645,7 @@ js_GetSrcNoteCached(JSContext *cx, JSScript *script, jsbytecode *pc)
}
uintN
js_FramePCToLineNumber(JSContext *cx, JSStackFrame *fp)
js_FramePCToLineNumber(JSContext *cx, StackFrame *fp)
{
return js_PCToLineNumber(cx, fp->script(),
fp->hasImacropc() ? fp->imacropc() : fp->pc(cx));
@ -1662,7 +1661,7 @@ js_PCToLineNumber(JSContext *cx, JSScript *script, jsbytecode *pc)
jssrcnote *sn;
JSSrcNoteType type;
/* Cope with JSStackFrame.pc value prior to entering js_Interpret. */
/* Cope with StackFrame.pc value prior to entering js_Interpret. */
if (!pc)
return 0;
@ -1767,12 +1766,12 @@ js_GetScriptLineExtent(JSScript *script)
const char *
js::CurrentScriptFileAndLineSlow(JSContext *cx, uintN *linenop)
{
if (!cx->hasfp()) {
if (!cx->running()) {
*linenop = 0;
return NULL;
}
JSStackFrame *fp = cx->fp();
StackFrame *fp = cx->fp();
while (fp->isDummyFrame())
fp = fp->prev();

Просмотреть файл

@ -738,7 +738,7 @@ js_GetSrcNoteCached(JSContext *cx, JSScript *script, jsbytecode *pc);
* fp->imacpc may be non-null, indicating an active imacro.
*/
extern uintN
js_FramePCToLineNumber(JSContext *cx, JSStackFrame *fp);
js_FramePCToLineNumber(JSContext *cx, js::StackFrame *fp);
extern uintN
js_PCToLineNumber(JSContext *cx, JSScript *script, jsbytecode *pc);

Просмотреть файл

@ -102,9 +102,9 @@ inline const char *
CurrentScriptFileAndLine(JSContext *cx, uintN *linenop, LineOption opt)
{
if (opt == CALLED_FROM_JSOP_EVAL) {
JS_ASSERT(*cx->regs->pc == JSOP_EVAL);
JS_ASSERT(*(cx->regs->pc + JSOP_EVAL_LENGTH) == JSOP_LINENO);
*linenop = GET_UINT16(cx->regs->pc + JSOP_EVAL_LENGTH);
JS_ASSERT(*cx->regs().pc == JSOP_EVAL);
JS_ASSERT(*(cx->regs().pc + JSOP_EVAL_LENGTH) == JSOP_LINENO);
*linenop = GET_UINT16(cx->regs().pc + JSOP_EVAL_LENGTH);
return cx->fp()->script()->filename;
}

Просмотреть файл

@ -76,7 +76,6 @@
#include "jsvector.h"
#include "jsversion.h"
#include "jscntxtinlines.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "jsregexpinlines.h"
@ -2452,7 +2451,7 @@ str_replace_flat_lambda(JSContext *cx, uintN argc, Value *vp, ReplaceData &rdata
/* lambda(matchStr, matchStart, textstr) */
static const uint32 lambdaArgc = 3;
if (!cx->stack().pushInvokeArgs(cx, lambdaArgc, &rdata.singleShot))
if (!cx->stack.pushInvokeArgs(cx, lambdaArgc, &rdata.singleShot))
return false;
CallArgs &args = rdata.singleShot;
@ -2464,7 +2463,7 @@ str_replace_flat_lambda(JSContext *cx, uintN argc, Value *vp, ReplaceData &rdata
sp[1].setInt32(fm.match());
sp[2].setString(rdata.str);
if (!Invoke(cx, rdata.singleShot, 0))
if (!Invoke(cx, rdata.singleShot))
return false;
JSString *repstr = js_ValueToString(cx, args.rval());

Просмотреть файл

@ -43,8 +43,8 @@
#include "jsatom.h"
#include "jsstr.h"
#include "jscntxtinlines.h"
#include "jsgcinlines.h"
#include "jscntxtinlines.h"
namespace js {

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -653,7 +653,7 @@ public:
JSScript *entryScript;
/* The stack frame where we started profiling. Only valid while profiling! */
JSStackFrame *entryfp;
StackFrame *entryfp;
/* The bytecode locations of the loop header and the back edge. */
jsbytecode *top, *bottom;
@ -716,12 +716,12 @@ public:
* and how many iterations we execute it.
*/
struct InnerLoop {
JSStackFrame *entryfp;
StackFrame *entryfp;
jsbytecode *top, *bottom;
uintN iters;
InnerLoop() {}
InnerLoop(JSStackFrame *entryfp, jsbytecode *top, jsbytecode *bottom)
InnerLoop(StackFrame *entryfp, jsbytecode *top, jsbytecode *bottom)
: entryfp(entryfp), top(top), bottom(bottom), iters(0) {}
};
@ -772,7 +772,7 @@ public:
return StackValue(false);
}
LoopProfile(TraceMonitor *tm, JSStackFrame *entryfp, jsbytecode *top, jsbytecode *bottom);
LoopProfile(TraceMonitor *tm, StackFrame *entryfp, jsbytecode *top, jsbytecode *bottom);
void reset();
@ -1188,9 +1188,9 @@ class TraceRecorder
JS_REQUIRES_STACK ptrdiff_t nativespOffsetImpl(const void* p) const;
JS_REQUIRES_STACK ptrdiff_t nativespOffset(const Value* p) const;
JS_REQUIRES_STACK void importImpl(tjit::Address addr, const void* p, JSValueType t,
const char *prefix, uintN index, JSStackFrame *fp);
const char *prefix, uintN index, StackFrame *fp);
JS_REQUIRES_STACK void import(tjit::Address addr, const Value* p, JSValueType t,
const char *prefix, uintN index, JSStackFrame *fp);
const char *prefix, uintN index, StackFrame *fp);
JS_REQUIRES_STACK void import(TreeFragment* tree, nanojit::LIns* sp, unsigned stackSlots,
unsigned callDepth, unsigned ngslots, JSValueType* typeMap);
void trackNativeStackUse(unsigned slots);
@ -1265,7 +1265,7 @@ class TraceRecorder
JS_REQUIRES_STACK nanojit::LIns* scopeChain();
JS_REQUIRES_STACK nanojit::LIns* entryScopeChain() const;
JS_REQUIRES_STACK nanojit::LIns* entryFrameIns() const;
JS_REQUIRES_STACK JSStackFrame* frameIfInRange(JSObject* obj, unsigned* depthp = NULL) const;
JS_REQUIRES_STACK StackFrame* frameIfInRange(JSObject* obj, unsigned* depthp = NULL) const;
JS_REQUIRES_STACK RecordingStatus traverseScopeChain(JSObject *obj, nanojit::LIns *obj_ins, JSObject *obj2, nanojit::LIns *&obj2_ins);
JS_REQUIRES_STACK AbortableRecordingStatus scopeChainProp(JSObject* obj, Value*& vp, nanojit::LIns*& ins, NameResult& nr, JSObject **scopeObjp = NULL);
JS_REQUIRES_STACK RecordingStatus callProp(JSObject* obj, JSProperty* shape, jsid id, Value*& vp, nanojit::LIns*& ins, NameResult& nr);
@ -1482,8 +1482,8 @@ class TraceRecorder
JS_REQUIRES_STACK RecordingStatus createThis(JSObject& ctor, nanojit::LIns* ctor_ins,
nanojit::LIns** thisobj_insp);
JS_REQUIRES_STACK RecordingStatus guardCallee(Value& callee);
JS_REQUIRES_STACK JSStackFrame *guardArguments(JSObject *obj, nanojit::LIns* obj_ins,
unsigned *depthp);
JS_REQUIRES_STACK StackFrame *guardArguments(JSObject *obj, nanojit::LIns* obj_ins,
unsigned *depthp);
JS_REQUIRES_STACK nanojit::LIns* guardArgsLengthNotAssigned(nanojit::LIns* argsobj_ins);
JS_REQUIRES_STACK void guardNotHole(nanojit::LIns* argsobj_ins, nanojit::LIns* ids_ins);
JS_REQUIRES_STACK RecordingStatus getClassPrototype(JSObject* ctor,
@ -1683,7 +1683,7 @@ class TraceRecorder
#define TRACE_2(x,a,b) TRACE_ARGS(x, (a, b))
extern JS_REQUIRES_STACK MonitorResult
MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, JSInterpMode interpMode);
MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, InterpMode interpMode);
extern JS_REQUIRES_STACK TracePointAction
RecordTracePoint(JSContext*, uintN& inlineCallCount, bool* blacklist);

Просмотреть файл

@ -1241,87 +1241,5 @@ Debug_SetValueRangeToCrashOnTouch(Value *vec, size_t len)
#endif
}
/*
* Abstracts the layout of the (callee,this) receiver pair that is passed to
* natives and scripted functions.
*/
class CallReceiver
{
#ifdef DEBUG
mutable bool usedRval_;
#endif
protected:
Value *argv_;
CallReceiver() {}
CallReceiver(Value *argv) : argv_(argv) {
#ifdef DEBUG
usedRval_ = false;
#endif
}
public:
friend CallReceiver CallReceiverFromVp(Value *);
friend CallReceiver CallReceiverFromArgv(Value *);
Value *base() const { return argv_ - 2; }
JSObject &callee() const { JS_ASSERT(!usedRval_); return argv_[-2].toObject(); }
Value &calleev() const { JS_ASSERT(!usedRval_); return argv_[-2]; }
Value &thisv() const { return argv_[-1]; }
Value &rval() const {
#ifdef DEBUG
usedRval_ = true;
#endif
return argv_[-2];
}
void calleeHasBeenReset() const {
#ifdef DEBUG
usedRval_ = false;
#endif
}
};
JS_ALWAYS_INLINE CallReceiver
CallReceiverFromVp(Value *vp)
{
return CallReceiver(vp + 2);
}
JS_ALWAYS_INLINE CallReceiver
CallReceiverFromArgv(Value *argv)
{
return CallReceiver(argv);
}
/*
* Abstracts the layout of the stack passed to natives from the engine and from
* natives to js::Invoke.
*/
class CallArgs : public CallReceiver
{
uintN argc_;
protected:
CallArgs() {}
CallArgs(uintN argc, Value *argv) : CallReceiver(argv), argc_(argc) {}
public:
friend CallArgs CallArgsFromVp(uintN, Value *);
friend CallArgs CallArgsFromArgv(uintN, Value *);
Value &operator[](unsigned i) const { JS_ASSERT(i < argc_); return argv_[i]; }
Value *argv() const { return argv_; }
uintN argc() const { return argc_; }
};
JS_ALWAYS_INLINE CallArgs
CallArgsFromVp(uintN argc, Value *vp)
{
return CallArgs(argc, vp + 2);
}
JS_ALWAYS_INLINE CallArgs
CallArgsFromArgv(uintN argc, Value *argv)
{
return CallArgs(argc, argv);
}
} /* namespace js */
#endif /* jsvalue_h__ */

Просмотреть файл

@ -385,7 +385,7 @@ AutoCompartment::enter()
JS_ASSERT(scopeChain->isNative());
frame.construct();
if (!context->stack().pushDummyFrame(context, *scopeChain, &frame.ref())) {
if (!context->stack.pushDummyFrame(context, *scopeChain, &frame.ref())) {
context->compartment = origin;
return false;
}

Просмотреть файл

@ -165,7 +165,7 @@ class AutoCompartment
JSCompartment * const destination;
private:
LazilyConstructed<DummyFrameGuard> frame;
JSFrameRegs regs;
FrameRegs regs;
AutoStringRooter input;
bool entered;

Просмотреть файл

@ -57,7 +57,6 @@
#include "jsfun.h"
#include "jsgc.h"
#include "jsgcmark.h"
#include "jsinterp.h"
#include "jslock.h"
#include "jsnum.h"
#include "jsobj.h"
@ -72,11 +71,11 @@
#include "jsvector.h"
#include "jsatominlines.h"
#include "jscntxtinlines.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
#include "jsstrinlines.h"
#include "vm/Stack-inl.h"
#ifdef DEBUG
#include <string.h> /* for #ifdef DEBUG memset calls */
#endif
@ -1745,7 +1744,7 @@ ParseXMLSource(JSContext *cx, JSString *src)
filename = NULL;
lineno = 1;
if (!i.done()) {
JSStackFrame *fp = i.fp();
StackFrame *fp = i.fp();
op = (JSOp) *i.pc();
if (op == JSOP_TOXML || op == JSOP_TOXMLLIST) {
filename = fp->script()->filename;
@ -7307,8 +7306,7 @@ js_SetDefaultXMLNamespace(JSContext *cx, const Value &v)
if (!ns)
return JS_FALSE;
JSStackFrame *fp = js_GetTopStackFrame(cx);
JSObject &varobj = fp->varobj(cx);
JSObject &varobj = cx->stack.currentVarObj();
if (!varobj.defineProperty(cx, JS_DEFAULT_XML_NAMESPACE_ID, ObjectValue(*ns),
PropertyStub, StrictPropertyStub, JSPROP_PERMANENT)) {
return JS_FALSE;
@ -7386,7 +7384,7 @@ js_ValueToXMLString(JSContext *cx, const Value &v)
JSBool
js_GetAnyName(JSContext *cx, jsid *idp)
{
JSObject *global = cx->hasfp() ? cx->fp()->scopeChain().getGlobal() : cx->globalObject;
JSObject *global = cx->running() ? cx->fp()->scopeChain().getGlobal() : cx->globalObject;
Value v = global->getReservedSlot(JSProto_AnyName);
if (v.isUndefined()) {
JSObject *obj = NewNonFunction<WithProto::Given>(cx, &js_AnyNameClass, NULL, global);
@ -7627,7 +7625,7 @@ js_StepXMLListFilter(JSContext *cx, JSBool initialized)
JSXMLFilter *filter;
LeaveTrace(cx);
sp = Jsvalify(cx->regs->sp);
sp = Jsvalify(cx->regs().sp);
if (!initialized) {
/*
* We haven't iterated yet, so initialize the filter based on the

Просмотреть файл

@ -589,7 +589,7 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist
if (frameDepth >= 0) {
// sp = fp->slots() + frameDepth
// regs->sp = sp
addPtr(Imm32(sizeof(JSStackFrame) + frameDepth * sizeof(jsval)),
addPtr(Imm32(sizeof(StackFrame) + frameDepth * sizeof(jsval)),
JSFrameReg,
ClobberInCall);
storePtr(ClobberInCall, FrameAddress(offsetof(VMFrame, regs.sp)));
@ -605,11 +605,11 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::SparcRegist
setupInfallibleVMFrame(frameDepth);
/* regs->fp = fp */
storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp));
/* PC -> regs->pc :( */
storePtr(ImmPtr(pc),
FrameAddress(offsetof(VMFrame, regs) + offsetof(JSFrameRegs, pc)));
FrameAddress(offsetof(VMFrame, regs) + offsetof(FrameRegs, pc)));
}
// An infallible VM call is a stub call (taking a VMFrame & and one
@ -753,7 +753,7 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = Assembler::JSPar
struct FrameFlagsAddress : JSC::MacroAssembler::Address
{
FrameFlagsAddress()
: Address(JSFrameReg, JSStackFrame::offsetOfFlags())
: Address(JSFrameReg, StackFrame::offsetOfFlags())
{}
};

Просмотреть файл

@ -83,7 +83,7 @@ static const char *OpcodeNames[] = {
};
#endif
mjit::Compiler::Compiler(JSContext *cx, JSStackFrame *fp)
mjit::Compiler::Compiler(JSContext *cx, StackFrame *fp)
: BaseCompiler(cx),
fp(fp),
script(fp->script()),
@ -239,7 +239,7 @@ mjit::Compiler::~Compiler()
}
CompileStatus JS_NEVER_INLINE
mjit::TryCompile(JSContext *cx, JSStackFrame *fp)
mjit::TryCompile(JSContext *cx, StackFrame *fp)
{
JS_ASSERT(cx->fp() == fp);
@ -294,7 +294,7 @@ mjit::Compiler::generatePrologue()
Label fastPath = masm.label();
/* Store this early on so slow paths can access it. */
masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
masm.storePtr(ImmPtr(fun), Address(JSFrameReg, StackFrame::offsetOfExec()));
{
/*
@ -312,8 +312,8 @@ mjit::Compiler::generatePrologue()
stubcc.masm.move(JSParamReg_Argc, Registers::ArgReg1);
/* Slow path - call the arity check function. Returns new fp. */
stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, JSStackFrame::offsetOfExec()));
stubcc.masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
stubcc.masm.storePtr(ImmPtr(fun), Address(JSFrameReg, StackFrame::offsetOfExec()));
stubcc.masm.storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp));
OOL_STUBCALL(stubs::FixupArity);
stubcc.masm.move(Registers::ReturnReg, JSFrameReg);
stubcc.crossJump(stubcc.masm.jump(), fastPath);
@ -343,7 +343,7 @@ mjit::Compiler::generatePrologue()
*/
for (uint32 i = 0; i < script->nfixed; i++) {
if (analysis->localHasUseBeforeDef(i) || addTraceHints) {
Address local(JSFrameReg, sizeof(JSStackFrame) + i * sizeof(Value));
Address local(JSFrameReg, sizeof(StackFrame) + i * sizeof(Value));
masm.storeValue(UndefinedValue(), local);
}
}
@ -364,10 +364,11 @@ mjit::Compiler::generatePrologue()
*/
RegisterID t0 = Registers::ReturnReg;
Jump hasScope = masm.branchTest32(Assembler::NonZero,
FrameFlagsAddress(), Imm32(JSFRAME_HAS_SCOPECHAIN));
masm.loadPayload(Address(JSFrameReg, JSStackFrame::offsetOfCallee(fun)), t0);
FrameFlagsAddress(),
Imm32(StackFrame::HAS_SCOPECHAIN));
masm.loadPayload(Address(JSFrameReg, StackFrame::offsetOfCallee(fun)), t0);
masm.loadPtr(Address(t0, offsetof(JSObject, parent)), t0);
masm.storePtr(t0, Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()));
masm.storePtr(t0, Address(JSFrameReg, StackFrame::offsetOfScopeChain()));
hasScope.linkTo(masm.label(), &masm);
}
}
@ -968,12 +969,12 @@ mjit::Compiler::generateMethod()
{
RegisterID reg = frame.allocReg();
masm.load32(FrameFlagsAddress(), reg);
masm.or32(Imm32(JSFRAME_HAS_RVAL), reg);
masm.or32(Imm32(StackFrame::HAS_RVAL), reg);
masm.store32(reg, FrameFlagsAddress());
frame.freeReg(reg);
FrameEntry *fe = frame.peek(-1);
frame.storeTo(fe, Address(JSFrameReg, JSStackFrame::offsetOfReturnValue()), true);
frame.storeTo(fe, Address(JSFrameReg, StackFrame::offsetOfReturnValue()), true);
frame.pop();
}
END_CASE(JSOP_POPV)
@ -2119,7 +2120,7 @@ mjit::Compiler::jsop_getglobal(uint32 index)
void
mjit::Compiler::emitFinalReturn(Assembler &masm)
{
masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfncode()), Registers::ReturnReg);
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfNcode()), Registers::ReturnReg);
masm.jump(Registers::ReturnReg);
}
@ -2166,8 +2167,8 @@ mjit::Compiler::loadReturnValue(Assembler *masm, FrameEntry *fe)
if (analysis->usesReturnValue()) {
Jump rvalClear = masm->branchTest32(Assembler::Zero,
FrameFlagsAddress(),
Imm32(JSFRAME_HAS_RVAL));
Address rvalAddress(JSFrameReg, JSStackFrame::offsetOfReturnValue());
Imm32(StackFrame::HAS_RVAL));
Address rvalAddress(JSFrameReg, StackFrame::offsetOfReturnValue());
masm->loadValueAsComponents(rvalAddress, typeReg, dataReg);
rvalClear.linkTo(masm->label(), masm);
}
@ -2184,7 +2185,7 @@ mjit::Compiler::fixPrimitiveReturn(Assembler *masm, FrameEntry *fe)
JS_ASSERT(isConstructing);
bool ool = (masm != &this->masm);
Address thisv(JSFrameReg, JSStackFrame::offsetOfThis(fun));
Address thisv(JSFrameReg, StackFrame::offsetOfThis(fun));
// We can just load |thisv| if either of the following is true:
// (1) There is no explicit return value, AND fp->rval is not used.
@ -2256,8 +2257,8 @@ mjit::Compiler::emitReturn(FrameEntry *fe)
} else {
/* if (hasCallObj() || hasArgsObj()) */
Jump putObjs = masm.branchTest32(Assembler::NonZero,
Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
Address(JSFrameReg, StackFrame::offsetOfFlags()),
Imm32(StackFrame::HAS_CALL_OBJ | StackFrame::HAS_ARGS_OBJ));
stubcc.linkExit(putObjs, Uses(frame.frameSlots()));
stubcc.leave();
@ -2338,7 +2339,7 @@ mjit::Compiler::interruptCheckHelper()
* interrupt is on another thread.
*/
stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, cx)), reg);
stubcc.masm.loadPtr(Address(reg, offsetof(JSContext, thread)), reg);
stubcc.masm.loadPtr(Address(reg, JSContext::threadOffset()), reg);
Address flag(reg, offsetof(JSThread, data.interruptFlags));
Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag);
#endif
@ -2377,16 +2378,16 @@ mjit::Compiler::emitUncachedCall(uint32 argc, bool callingNew)
Jump notCompiled = masm.branchTestPtr(Assembler::Zero, r0, r0);
masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
callPatch.hasFastNcode = true;
callPatch.fastNcodePatch =
masm.storePtrWithPatch(ImmPtr(NULL),
Address(JSFrameReg, JSStackFrame::offsetOfncode()));
Address(JSFrameReg, StackFrame::offsetOfNcode()));
masm.jump(r0);
callPatch.joinPoint = masm.label();
addReturnSite(callPatch.joinPoint, __LINE__);
masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfPrev()), JSFrameReg);
frame.popn(argc + 2);
frame.takeReg(JSReturnReg_Type);
@ -2460,8 +2461,8 @@ mjit::Compiler::checkCallApplySpeculation(uint32 callImmArgc, uint32 speculatedA
RegisterID r0 = Registers::ReturnReg;
Jump notCompiled = stubcc.masm.branchTestPtr(Assembler::Zero, r0, r0);
stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
Address ncodeAddr(JSFrameReg, JSStackFrame::offsetOfncode());
stubcc.masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
Address ncodeAddr(JSFrameReg, StackFrame::offsetOfNcode());
uncachedCallPatch->hasSlowNcode = true;
uncachedCallPatch->slowNcodePatch = stubcc.masm.storePtrWithPatch(ImmPtr(NULL), ncodeAddr);
@ -2731,11 +2732,11 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew)
stubcc.masm.move(Imm32(callIC.frameSize.staticArgc()), JSParamReg_Argc);
else
stubcc.masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), JSParamReg_Argc);
stubcc.masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
stubcc.masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
callPatch.hasSlowNcode = true;
callPatch.slowNcodePatch =
stubcc.masm.storePtrWithPatch(ImmPtr(NULL),
Address(JSFrameReg, JSStackFrame::offsetOfncode()));
Address(JSFrameReg, StackFrame::offsetOfNcode()));
stubcc.masm.jump(Registers::ReturnReg);
/*
@ -2764,7 +2765,7 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew)
uint32 flags = 0;
if (callingNew)
flags |= JSFRAME_CONSTRUCTING;
flags |= StackFrame::CONSTRUCTING;
InlineFrameAssembler inlFrame(masm, callIC, flags);
callPatch.hasFastNcode = true;
@ -2775,7 +2776,7 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew)
addReturnSite(callPatch.joinPoint, __LINE__);
if (lowerFunCallOrApply)
uncachedCallPatch.joinPoint = callIC.joinPoint;
masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfPrev()), JSFrameReg);
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfPrev()), JSFrameReg);
/*
* We've placed hotJump, joinPoint and hotPathLabel, and no other labels are located by offset
@ -2818,7 +2819,7 @@ mjit::Compiler::inlineCallHelper(uint32 callImmArgc, bool callingNew)
/*
* This function must be called immediately after any instruction which could
* cause a new JSStackFrame to be pushed and could lead to a new debug trap
* cause a new StackFrame to be pushed and could lead to a new debug trap
* being set. This includes any API callbacks and any scripted or native call.
*/
void
@ -2830,7 +2831,7 @@ mjit::Compiler::addCallSite(const InternalCallSite &site)
void
mjit::Compiler::restoreFrameRegs(Assembler &masm)
{
masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
}
bool
@ -3163,7 +3164,7 @@ mjit::Compiler::jsop_callprop_generic(JSAtom *atom)
* since a sync will be needed for the upcoming call.
*/
uint32 thisvSlot = frame.localSlots();
Address thisv = Address(JSFrameReg, sizeof(JSStackFrame) + thisvSlot * sizeof(Value));
Address thisv = Address(JSFrameReg, sizeof(StackFrame) + thisvSlot * sizeof(Value));
#if defined JS_NUNBOX32
masm.storeValueFromComponents(pic.typeReg, pic.objReg, thisv);
@ -3622,7 +3623,7 @@ mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache)
pic.fastPathStart = masm.label();
Address parent(pic.objReg, offsetof(JSObject, parent));
masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
pic.shapeGuard = masm.label();
Jump inlineJump = masm.branchPtr(Assembler::NotEqual, parent, ImmPtr(0));
@ -3689,7 +3690,7 @@ void
mjit::Compiler::jsop_bindname(JSAtom *atom, bool usePropCache)
{
RegisterID reg = frame.allocReg();
Address scopeChain(JSFrameReg, JSStackFrame::offsetOfScopeChain());
Address scopeChain(JSFrameReg, StackFrame::offsetOfScopeChain());
masm.loadPtr(scopeChain, reg);
Address address(reg, offsetof(JSObject, parent));

Просмотреть файл

@ -326,7 +326,7 @@ class Compiler : public BaseCompiler
size_t offsetIndex;
};
JSStackFrame *fp;
StackFrame *fp;
JSScript *script;
JSObject *scopeChain;
JSObject *globalObj;
@ -372,7 +372,7 @@ class Compiler : public BaseCompiler
// follows interpreter usage in JSOP_LENGTH.
enum { LengthAtomIndex = uint32(-2) };
Compiler(JSContext *cx, JSStackFrame *fp);
Compiler(JSContext *cx, StackFrame *fp);
~Compiler();
CompileStatus compile();

Просмотреть файл

@ -733,13 +733,13 @@ FrameState::addressOf(const FrameEntry *fe) const
{
int32 frameOffset = 0;
if (fe >= locals)
frameOffset = JSStackFrame::offsetOfFixed(uint32(fe - locals));
frameOffset = StackFrame::offsetOfFixed(uint32(fe - locals));
else if (fe >= args)
frameOffset = JSStackFrame::offsetOfFormalArg(fun, uint32(fe - args));
frameOffset = StackFrame::offsetOfFormalArg(fun, uint32(fe - args));
else if (fe == this_)
frameOffset = JSStackFrame::offsetOfThis(fun);
frameOffset = StackFrame::offsetOfThis(fun);
else if (fe == callee_)
frameOffset = JSStackFrame::offsetOfCallee(fun);
frameOffset = StackFrame::offsetOfCallee(fun);
JS_ASSERT(frameOffset);
return Address(JSFrameReg, frameOffset);
}

Просмотреть файл

@ -741,13 +741,13 @@ class FrameState
void assertValidRegisterState() const;
#endif
// Return an address, relative to the JSStackFrame, that represents where
// Return an address, relative to the StackFrame, that represents where
// this FrameEntry is stored in memory. Note that this is its canonical
// address, not its backing store. There is no guarantee that the memory
// is coherent.
Address addressOf(const FrameEntry *fe) const;
// Returns an address, relative to the JSStackFrame, that represents where
// Returns an address, relative to the StackFrame, that represents where
// this FrameEntry is backed in memory. This is not necessarily its
// canonical address, but the address for which the payload has been synced
// to memory. The caller guarantees that the payload has been synced.

Просмотреть файл

@ -62,7 +62,7 @@ struct AdjustedFrame {
/*
* This is used for emitting code to inline callee-side frame creation and
* should jit code equivalent to JSStackFrame::initCallFrameCallerHalf.
* should jit code equivalent to StackFrame::initCallFrameCallerHalf.
*
* Once finished, JSFrameReg is advanced to be the new fp.
*/
@ -105,23 +105,23 @@ class InlineFrameAssembler {
DataLabelPtr assemble(void *ncode)
{
JS_ASSERT((flags & ~JSFRAME_CONSTRUCTING) == 0);
JS_ASSERT((flags & ~StackFrame::CONSTRUCTING) == 0);
/* Generate JSStackFrame::initCallFrameCallerHalf. */
/* Generate StackFrame::initCallFrameCallerHalf. */
DataLabelPtr ncodePatch;
if (frameSize.isStatic()) {
uint32 frameDepth = frameSize.staticLocalSlots();
AdjustedFrame newfp(sizeof(JSStackFrame) + frameDepth * sizeof(Value));
AdjustedFrame newfp(sizeof(StackFrame) + frameDepth * sizeof(Value));
Address flagsAddr = newfp.addrOf(JSStackFrame::offsetOfFlags());
masm.store32(Imm32(JSFRAME_FUNCTION | flags), flagsAddr);
Address prevAddr = newfp.addrOf(JSStackFrame::offsetOfPrev());
Address flagsAddr = newfp.addrOf(StackFrame::offsetOfFlags());
masm.store32(Imm32(StackFrame::FUNCTION | flags), flagsAddr);
Address prevAddr = newfp.addrOf(StackFrame::offsetOfPrev());
masm.storePtr(JSFrameReg, prevAddr);
Address ncodeAddr = newfp.addrOf(JSStackFrame::offsetOfncode());
Address ncodeAddr = newfp.addrOf(StackFrame::offsetOfNcode());
ncodePatch = masm.storePtrWithPatch(ImmPtr(ncode), ncodeAddr);
masm.addPtr(Imm32(sizeof(JSStackFrame) + frameDepth * sizeof(Value)), JSFrameReg);
masm.addPtr(Imm32(sizeof(StackFrame) + frameDepth * sizeof(Value)), JSFrameReg);
} else {
/*
* If the frame size is dynamic, then the fast path generated by
@ -134,11 +134,11 @@ class InlineFrameAssembler {
RegisterID newfp = tempRegs.takeAnyReg();
masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.sp)), newfp);
Address flagsAddr(newfp, JSStackFrame::offsetOfFlags());
masm.store32(Imm32(JSFRAME_FUNCTION | flags), flagsAddr);
Address prevAddr(newfp, JSStackFrame::offsetOfPrev());
Address flagsAddr(newfp, StackFrame::offsetOfFlags());
masm.store32(Imm32(StackFrame::FUNCTION | flags), flagsAddr);
Address prevAddr(newfp, StackFrame::offsetOfPrev());
masm.storePtr(JSFrameReg, prevAddr);
Address ncodeAddr(newfp, JSStackFrame::offsetOfncode());
Address ncodeAddr(newfp, StackFrame::offsetOfNcode());
ncodePatch = masm.storePtrWithPatch(ImmPtr(ncode), ncodeAddr);
masm.move(newfp, JSFrameReg);

Просмотреть файл

@ -81,13 +81,13 @@ using ic::Repatcher;
static jsbytecode *
FindExceptionHandler(JSContext *cx)
{
JSStackFrame *fp = cx->fp();
StackFrame *fp = cx->fp();
JSScript *script = fp->script();
top:
if (cx->isExceptionPending() && JSScript::isValidOffset(script->trynotesOffset)) {
// The PC is updated before every stub call, so we can use it here.
unsigned offset = cx->regs->pc - script->main;
unsigned offset = cx->regs().pc - script->main;
JSTryNoteArray *tnarray = script->trynotes();
for (unsigned i = 0; i < tnarray->length; ++i) {
@ -110,12 +110,12 @@ top:
// bytecode compiler cannot throw, so this is not possible.
if (offset - tn->start > tn->length)
continue;
if (tn->stackDepth > cx->regs->sp - fp->base())
if (tn->stackDepth > cx->regs().sp - fp->base())
continue;
jsbytecode *pc = script->main + tn->start + tn->length;
JSBool ok = js_UnwindScope(cx, tn->stackDepth, JS_TRUE);
JS_ASSERT(cx->regs->sp == fp->base() + tn->stackDepth);
JS_ASSERT(cx->regs().sp == fp->base() + tn->stackDepth);
switch (tn->kind) {
case JSTRY_CATCH:
@ -139,9 +139,9 @@ top:
* Push (true, exception) pair for finally to indicate that
* [retsub] should rethrow the exception.
*/
cx->regs->sp[0].setBoolean(true);
cx->regs->sp[1] = cx->getPendingException();
cx->regs->sp += 2;
cx->regs().sp[0].setBoolean(true);
cx->regs().sp[1] = cx->getPendingException();
cx->regs().sp += 2;
cx->clearPendingException();
return pc;
@ -157,8 +157,8 @@ top:
Value v = cx->getPendingException();
JS_ASSERT(js_GetOpcode(cx, fp->script(), pc) == JSOP_ENDITER);
cx->clearPendingException();
ok = !!js_CloseIterator(cx, &cx->regs->sp[-1].toObject());
cx->regs->sp -= 1;
ok = !!js_CloseIterator(cx, &cx->regs().sp[-1].toObject());
cx->regs().sp -= 1;
if (!ok)
goto top;
cx->setPendingException(v);
@ -176,16 +176,9 @@ top:
static void
InlineReturn(VMFrame &f)
{
JSContext *cx = f.cx;
JSStackFrame *fp = f.regs.fp;
JS_ASSERT(f.fp() != f.entryfp);
JS_ASSERT(!js_IsActiveWithOrBlock(cx, &fp->scopeChain(), 0));
Value *newsp = fp->actualArgs() - 1;
newsp[-1] = fp->returnValue();
cx->stack().popInlineFrame(cx, fp->prev(), newsp);
JS_ASSERT(!js_IsActiveWithOrBlock(f.cx, &f.fp()->scopeChain(), 0));
f.cx->stack.popInlineFrame();
}
void JS_FASTCALL
@ -193,7 +186,7 @@ stubs::SlowCall(VMFrame &f, uint32 argc)
{
Value *vp = f.regs.sp - (argc + 2);
if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(vp, argc), 0))
if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(argc, vp)))
THROW();
}
@ -203,7 +196,7 @@ stubs::SlowNew(VMFrame &f, uint32 argc)
JSContext *cx = f.cx;
Value *vp = f.regs.sp - (argc + 2);
if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(vp, argc)))
if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(argc, vp)))
THROW();
}
@ -212,11 +205,9 @@ stubs::SlowNew(VMFrame &f, uint32 argc)
* on fp->exec.fun.
*/
static inline void
RemovePartialFrame(JSContext *cx, JSStackFrame *fp)
RemovePartialFrame(JSContext *cx, StackFrame *fp)
{
JSStackFrame *prev = fp->prev();
Value *newsp = (Value *)fp;
cx->stack().popInlineFrame(cx, prev, newsp);
cx->stack.popInlineFrame();
}
/*
@ -229,7 +220,8 @@ stubs::HitStackQuota(VMFrame &f)
/* Include space to push another frame. */
uintN nvals = f.fp()->script()->nslots + VALUES_PER_STACK_FRAME;
JS_ASSERT(f.regs.sp == f.fp()->base());
if (f.cx->stack().bumpCommitAndLimit(f.entryfp, f.regs.sp, nvals, &f.stackLimit))
StackSpace &space = f.cx->stack.space();
if (space.bumpLimitWithinQuota(NULL, f.entryfp, f.regs.sp, nvals, &f.stackLimit))
return;
/* Remove the current partially-constructed frame before throwing. */
@ -246,7 +238,7 @@ void * JS_FASTCALL
stubs::FixupArity(VMFrame &f, uint32 nactual)
{
JSContext *cx = f.cx;
JSStackFrame *oldfp = f.fp();
StackFrame *oldfp = f.fp();
JS_ASSERT(nactual != oldfp->numFormalArgs());
@ -261,13 +253,12 @@ stubs::FixupArity(VMFrame &f, uint32 nactual)
void *ncode = oldfp->nativeReturnAddress();
/* Pop the inline frame. */
f.fp() = oldfp->prev();
f.regs.sp = (Value*) oldfp;
f.regs.popPartialFrame((Value *)oldfp);
/* Reserve enough space for a callee frame. */
JSStackFrame *newfp = cx->stack().getInlineFrameWithinLimit(cx, (Value*) oldfp, nactual,
fun, fun->script(), &flags,
f.entryfp, &f.stackLimit);
StackFrame *newfp = cx->stack.getInlineFrameWithinLimit(cx, (Value*) oldfp, nactual,
fun, fun->script(), &flags,
f.entryfp, &f.stackLimit);
if (!newfp) {
/*
* The PC is not coherent with the current frame, so fix it up for
@ -295,7 +286,7 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual)
* compile though because we could throw, so get a full, adjusted frame.
*/
JSContext *cx = f.cx;
JSStackFrame *fp = f.fp();
StackFrame *fp = f.fp();
/*
* Since we can only use members set by initCallFrameCallerHalf,
@ -312,7 +303,7 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual)
fp->initCallFrameEarlyPrologue(fun, nactual);
if (nactual != fp->numFormalArgs()) {
fp = (JSStackFrame *)FixupArity(f, nactual);
fp = (StackFrame *)FixupArity(f, nactual);
if (!fp)
return NULL;
}
@ -321,9 +312,7 @@ stubs::CompileFunction(VMFrame &f, uint32 nactual)
fp->initCallFrameLatePrologue();
/* These would have been initialized by the prologue. */
f.regs.fp = fp;
f.regs.sp = fp->base();
f.regs.pc = script->code;
f.regs.prepareToRun(fp, script);
if (fun->isHeavyweight() && !js::CreateFunCallObject(cx, fp))
THROWV(NULL);
@ -352,10 +341,9 @@ UncachedInlineCall(VMFrame &f, uint32 flags, void **pret, bool *unjittable, uint
JSScript *newscript = newfun->script();
/* Get pointer to new frame/slots, prepare arguments. */
StackSpace &stack = cx->stack();
JSStackFrame *newfp = stack.getInlineFrameWithinLimit(cx, f.regs.sp, argc,
newfun, newscript, &flags,
f.entryfp, &f.stackLimit);
StackFrame *newfp = cx->stack.getInlineFrameWithinLimit(cx, f.regs.sp, argc,
newfun, newscript, &flags,
f.entryfp, &f.stackLimit);
if (JS_UNLIKELY(!newfp))
return false;
@ -364,8 +352,8 @@ UncachedInlineCall(VMFrame &f, uint32 flags, void **pret, bool *unjittable, uint
SetValueRangeToUndefined(newfp->slots(), newscript->nfixed);
/* Officially push the frame. */
stack.pushInlineFrame(cx, newscript, newfp, &f.regs);
JS_ASSERT(newfp == f.regs.fp);
cx->stack.pushInlineFrame(newscript, newfp, f.regs);
JS_ASSERT(newfp == f.fp());
/* Scope with a call object parented by callee's parent. */
if (newfun->isHeavyweight() && !js::CreateFunCallObject(cx, newfp))
@ -414,10 +402,10 @@ stubs::UncachedNewHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr)
/* Try to do a fast inline call before the general Invoke path. */
if (IsFunctionObject(*vp, &ucr->fun) && ucr->fun->isInterpretedConstructor()) {
ucr->callee = &vp->toObject();
if (!UncachedInlineCall(f, JSFRAME_CONSTRUCTING, &ucr->codeAddr, &ucr->unjittable, argc))
if (!UncachedInlineCall(f, StackFrame::CONSTRUCTING, &ucr->codeAddr, &ucr->unjittable, argc))
THROW();
} else {
if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(vp, argc)))
if (!InvokeConstructor(cx, InvokeArgsAlreadyOnTheStack(argc, vp)))
THROW();
}
}
@ -435,13 +423,13 @@ stubs::Eval(VMFrame &f, uint32 argc)
{
Value *vp = f.regs.sp - (argc + 2);
if (!IsBuiltinEvalForScope(&f.regs.fp->scopeChain(), *vp)) {
if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(vp, argc), 0))
if (!IsBuiltinEvalForScope(&f.fp()->scopeChain(), *vp)) {
if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(argc, vp)))
THROW();
return;
}
JS_ASSERT(f.regs.fp == f.cx->fp());
JS_ASSERT(f.fp() == f.cx->fp());
if (!DirectEval(f.cx, CallArgsFromVp(argc, vp)))
THROW();
@ -473,7 +461,7 @@ stubs::UncachedCallHelper(VMFrame &f, uint32 argc, UncachedCallResult *ucr)
}
}
if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(vp, argc), 0))
if (!Invoke(f.cx, InvokeArgsAlreadyOnTheStack(argc, vp)))
THROW();
return;
@ -483,7 +471,7 @@ void JS_FASTCALL
stubs::PutActivationObjects(VMFrame &f)
{
JS_ASSERT(f.fp()->hasCallObj() || f.fp()->hasArgsObj());
js::PutActivationObjects(f.cx, f.fp());
f.fp()->putActivationObjects();
}
extern "C" void *
@ -508,13 +496,13 @@ js_InternalThrow(VMFrame &f)
}
// Make sure sp is up to date.
JS_ASSERT(cx->regs == &f.regs);
JS_ASSERT(&cx->regs() == &f.regs);
// Call the throw hook if necessary
JSThrowHook handler = f.cx->debugHooks->throwHook;
if (handler) {
Value rval;
switch (handler(cx, cx->fp()->script(), cx->regs->pc, Jsvalify(&rval),
switch (handler(cx, cx->fp()->script(), cx->regs().pc, Jsvalify(&rval),
cx->debugHooks->throwHookData)) {
case JSTRAP_ERROR:
cx->clearPendingException();
@ -555,16 +543,16 @@ js_InternalThrow(VMFrame &f)
if (f.entryfp == f.fp())
break;
JS_ASSERT(f.regs.sp == cx->regs->sp);
JS_ASSERT(f.regs.sp == cx->regs().sp);
InlineReturn(f);
}
JS_ASSERT(f.regs.sp == cx->regs->sp);
JS_ASSERT(f.regs.sp == cx->regs().sp);
if (!pc)
return NULL;
JSStackFrame *fp = cx->fp();
StackFrame *fp = cx->fp();
JSScript *script = fp->script();
return script->nativeCodeForPC(fp->isConstructing(), pc);
}
@ -581,7 +569,7 @@ void JS_FASTCALL
stubs::CreateThis(VMFrame &f, JSObject *proto)
{
JSContext *cx = f.cx;
JSStackFrame *fp = f.fp();
StackFrame *fp = f.fp();
JSObject *callee = &fp->callee();
JSObject *obj = js_CreateThisForFunctionWithProto(cx, callee, proto);
if (!obj)
@ -610,7 +598,7 @@ stubs::ScriptDebugEpilogue(VMFrame &f)
* handler in the process.
*/
static inline bool
HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostFrame = true)
HandleErrorInExcessFrame(VMFrame &f, StackFrame *stopFp, bool searchedTopmostFrame = true)
{
JSContext *cx = f.cx;
@ -621,7 +609,7 @@ HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostF
*
* Note that this also guarantees ScriptEpilogue() has been called.
*/
JSStackFrame *fp = cx->fp();
StackFrame *fp = cx->fp();
if (searchedTopmostFrame) {
/*
* This is a special case meaning that fp->finishedInInterpreter() is
@ -647,7 +635,7 @@ HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostF
/* Clear imacros. */
if (fp->hasImacropc()) {
cx->regs->pc = fp->imacropc();
cx->regs().pc = fp->imacropc();
fp->clearImacropc();
}
JS_ASSERT(!fp->hasImacropc());
@ -656,7 +644,7 @@ HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostF
if (cx->isExceptionPending()) {
jsbytecode *pc = FindExceptionHandler(cx);
if (pc) {
cx->regs->pc = pc;
cx->regs().pc = pc;
returnOK = true;
break;
}
@ -672,7 +660,7 @@ HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostF
InlineReturn(f);
}
JS_ASSERT(&f.regs == cx->regs);
JS_ASSERT(&f.regs == &cx->regs());
JS_ASSERT_IF(!returnOK, cx->fp() == stopFp);
return returnOK;
@ -682,12 +670,12 @@ HandleErrorInExcessFrame(VMFrame &f, JSStackFrame *stopFp, bool searchedTopmostF
static inline void *
AtSafePoint(JSContext *cx)
{
JSStackFrame *fp = cx->fp();
StackFrame *fp = cx->fp();
if (fp->hasImacropc())
return NULL;
JSScript *script = fp->script();
return script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs->pc);
return script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc);
}
/*
@ -698,13 +686,13 @@ static inline JSBool
PartialInterpret(VMFrame &f)
{
JSContext *cx = f.cx;
JSStackFrame *fp = cx->fp();
StackFrame *fp = cx->fp();
#ifdef DEBUG
JSScript *script = fp->script();
JS_ASSERT(!fp->finishedInInterpreter());
JS_ASSERT(fp->hasImacropc() ||
!script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs->pc));
!script->maybeNativeCodeForPC(fp->isConstructing(), cx->regs().pc));
#endif
JSBool ok = JS_TRUE;
@ -726,7 +714,7 @@ JS_STATIC_ASSERT(JSOP_NOP == 0);
static inline bool
FrameIsFinished(JSContext *cx)
{
JSOp op = JSOp(*cx->regs->pc);
JSOp op = JSOp(*cx->regs().pc);
return (op == JSOP_RETURN ||
op == JSOP_RETRVAL ||
op == JSOP_STOP)
@ -739,12 +727,12 @@ FrameIsFinished(JSContext *cx)
static inline void
AdvanceReturnPC(JSContext *cx)
{
JS_ASSERT(*cx->regs->pc == JSOP_CALL ||
*cx->regs->pc == JSOP_NEW ||
*cx->regs->pc == JSOP_EVAL ||
*cx->regs->pc == JSOP_FUNCALL ||
*cx->regs->pc == JSOP_FUNAPPLY);
cx->regs->pc += JSOP_CALL_LENGTH;
JS_ASSERT(*cx->regs().pc == JSOP_CALL ||
*cx->regs().pc == JSOP_NEW ||
*cx->regs().pc == JSOP_EVAL ||
*cx->regs().pc == JSOP_FUNCALL ||
*cx->regs().pc == JSOP_FUNAPPLY);
cx->regs().pc += JSOP_CALL_LENGTH;
}
@ -756,7 +744,7 @@ AdvanceReturnPC(JSContext *cx)
* (and faster) to finish frames in C++ even if at a safe point here.
*/
static bool
HandleFinishedFrame(VMFrame &f, JSStackFrame *entryFrame)
HandleFinishedFrame(VMFrame &f, StackFrame *entryFrame)
{
JSContext *cx = f.cx;
@ -791,7 +779,7 @@ HandleFinishedFrame(VMFrame &f, JSStackFrame *entryFrame)
*/
bool returnOK = true;
if (!cx->fp()->finishedInInterpreter()) {
if (JSOp(*cx->regs->pc) == JSOP_RETURN)
if (JSOp(*cx->regs().pc) == JSOP_RETURN)
cx->fp()->setReturnValue(f.regs.sp[-1]);
returnOK = ScriptEpilogue(cx, cx->fp(), true);
@ -821,10 +809,10 @@ HandleFinishedFrame(VMFrame &f, JSStackFrame *entryFrame)
* pushed by a call, that has method JIT'd code.
*/
static bool
EvaluateExcessFrame(VMFrame &f, JSStackFrame *entryFrame)
EvaluateExcessFrame(VMFrame &f, StackFrame *entryFrame)
{
JSContext *cx = f.cx;
JSStackFrame *fp = cx->fp();
StackFrame *fp = cx->fp();
/*
* A "finished" frame is when the interpreter rested on a STOP,
@ -851,7 +839,7 @@ EvaluateExcessFrame(VMFrame &f, JSStackFrame *entryFrame)
* always leave f.regs.fp == entryFrame.
*/
static bool
FinishExcessFrames(VMFrame &f, JSStackFrame *entryFrame)
FinishExcessFrames(VMFrame &f, StackFrame *entryFrame)
{
JSContext *cx = f.cx;
@ -940,7 +928,7 @@ RunTracer(VMFrame &f)
#endif
{
JSContext *cx = f.cx;
JSStackFrame *entryFrame = f.fp();
StackFrame *entryFrame = f.fp();
TracePointAction tpa;
/* :TODO: nuke PIC? */
@ -989,7 +977,6 @@ RunTracer(VMFrame &f)
// error failures correctly.
JS_ASSERT_IF(cx->isExceptionPending(), tpa == TPA_Error);
f.fp() = cx->fp();
JS_ASSERT(f.fp() == cx->fp());
switch (tpa) {
case TPA_Nothing:

Просмотреть файл

@ -63,7 +63,7 @@ struct Registers {
static const RegisterID ScratchReg = JSC::X86Registers::r11;
#endif
// Register that homes the current JSStackFrame.
// Register that homes the current StackFrame.
#if defined(JS_CPU_X86) || defined(JS_CPU_X64)
static const RegisterID JSFrameReg = JSC::X86Registers::ebx;
#elif defined(JS_CPU_ARM)

Просмотреть файл

@ -57,7 +57,7 @@ static const size_t CALLS_BEFORE_COMPILE = 16;
static const size_t BACKEDGES_BEFORE_COMPILE = 16;
static inline CompileStatus
CanMethodJIT(JSContext *cx, JSScript *script, JSStackFrame *fp, CompileRequest request)
CanMethodJIT(JSContext *cx, JSScript *script, StackFrame *fp, CompileRequest request)
{
if (!cx->methodJitEnabled)
return Compile_Abort;
@ -81,7 +81,7 @@ CanMethodJIT(JSContext *cx, JSScript *script, JSStackFrame *fp, CompileRequest r
* methodjit. If so, we compile the given function.
*/
static inline CompileStatus
CanMethodJITAtBranch(JSContext *cx, JSScript *script, JSStackFrame *fp, jsbytecode *pc)
CanMethodJITAtBranch(JSContext *cx, JSScript *script, StackFrame *fp, jsbytecode *pc)
{
if (!cx->methodJitEnabled)
return Compile_Abort;

Просмотреть файл

@ -62,20 +62,20 @@ js::mjit::CompilerAllocPolicy::CompilerAllocPolicy(JSContext *cx, Compiler &comp
{
}
void
JSStackFrame::methodjitStaticAsserts()
StackFrame::methodjitStaticAsserts()
{
/* Static assert for x86 trampolines in MethodJIT.cpp. */
#if defined(JS_CPU_X86)
JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) == 0x18);
JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) + 4 == 0x1C);
JS_STATIC_ASSERT(offsetof(JSStackFrame, ncode_) == 0x14);
JS_STATIC_ASSERT(offsetof(StackFrame, rval_) == 0x18);
JS_STATIC_ASSERT(offsetof(StackFrame, rval_) + 4 == 0x1C);
JS_STATIC_ASSERT(offsetof(StackFrame, ncode_) == 0x14);
/* ARM uses decimal literals. */
JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) == 24);
JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) + 4 == 28);
JS_STATIC_ASSERT(offsetof(JSStackFrame, ncode_) == 20);
JS_STATIC_ASSERT(offsetof(StackFrame, rval_) == 24);
JS_STATIC_ASSERT(offsetof(StackFrame, rval_) + 4 == 28);
JS_STATIC_ASSERT(offsetof(StackFrame, ncode_) == 20);
#elif defined(JS_CPU_X64)
JS_STATIC_ASSERT(offsetof(JSStackFrame, rval_) == 0x30);
JS_STATIC_ASSERT(offsetof(JSStackFrame, ncode_) == 0x28);
JS_STATIC_ASSERT(offsetof(StackFrame, rval_) == 0x30);
JS_STATIC_ASSERT(offsetof(StackFrame, ncode_) == 0x28);
#endif
}
@ -119,7 +119,7 @@ extern "C" void JS_FASTCALL
PushActiveVMFrame(VMFrame &f)
{
f.entryfp->script()->compartment->jaegerCompartment->pushActiveFrame(&f);
f.regs.fp->setNativeReturnAddress(JS_FUNC_TO_DATA_PTR(void*, JaegerTrampolineReturn));
f.regs.fp()->setNativeReturnAddress(JS_FUNC_TO_DATA_PTR(void*, JaegerTrampolineReturn));
}
extern "C" void JS_FASTCALL
@ -131,7 +131,8 @@ PopActiveVMFrame(VMFrame &f)
extern "C" void JS_FASTCALL
SetVMFrameRegs(VMFrame &f)
{
f.cx->setCurrentRegs(&f.regs);
/* Restored on exit from EnterMethodJIT. */
f.cx->stack.repointRegs(&f.regs);
}
#if defined(__APPLE__) || (defined(XP_WIN) && !defined(JS_CPU_X64)) || defined(XP_OS2)
@ -140,7 +141,7 @@ SetVMFrameRegs(VMFrame &f)
# define SYMBOL_STRING(name) #name
#endif
JS_STATIC_ASSERT(offsetof(JSFrameRegs, sp) == 0);
JS_STATIC_ASSERT(offsetof(FrameRegs, sp) == 0);
#if defined(__linux__) && defined(JS_CPU_X64)
# define SYMBOL_STRING_RELOC(name) #name "@plt"
@ -179,7 +180,7 @@ JS_STATIC_ASSERT(sizeof(VMFrame) % 16 == 0);
* *** DANGER ***
*/
JS_STATIC_ASSERT(offsetof(VMFrame, savedRBX) == 0x58);
JS_STATIC_ASSERT(offsetof(VMFrame, regs.fp) == 0x38);
JS_STATIC_ASSERT(VMFrame::offsetOfFp == 0x38);
JS_STATIC_ASSERT(JSVAL_TAG_MASK == 0xFFFF800000000000LL);
JS_STATIC_ASSERT(JSVAL_PAYLOAD_MASK == 0x00007FFFFFFFFFFFLL);
@ -284,7 +285,7 @@ SYMBOL_STRING(JaegerThrowpoline) ":" "\n"
* *** DANGER ***
*/
JS_STATIC_ASSERT(offsetof(VMFrame, savedEBX) == 0x2c);
JS_STATIC_ASSERT(offsetof(VMFrame, regs.fp) == 0x1C);
JS_STATIC_ASSERT((VMFrame::offsetOfFp) == 0x1C);
asm (
".text\n"
@ -370,7 +371,7 @@ JS_STATIC_ASSERT(offsetof(VMFrame, savedLR) == (4*19));
JS_STATIC_ASSERT(offsetof(VMFrame, entryfp) == (4*10));
JS_STATIC_ASSERT(offsetof(VMFrame, stackLimit) == (4*9));
JS_STATIC_ASSERT(offsetof(VMFrame, cx) == (4*8));
JS_STATIC_ASSERT(offsetof(VMFrame, regs.fp) == (4*7));
JS_STATIC_ASSERT(VMFrame::offsetOfFp == (4*7));
JS_STATIC_ASSERT(offsetof(VMFrame, unused) == (4*4));
JS_STATIC_ASSERT(offsetof(VMFrame, previous) == (4*3));
@ -520,11 +521,11 @@ SYMBOL_STRING(JaegerStubVeneer) ":" "\n"
* *** DANGER ***
*/
JS_STATIC_ASSERT(offsetof(VMFrame, savedEBX) == 0x2c);
JS_STATIC_ASSERT(offsetof(VMFrame, regs.fp) == 0x1C);
JS_STATIC_ASSERT(VMFrame::offsetOfFp == 0x1C);
extern "C" {
__declspec(naked) JSBool JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code,
__declspec(naked) JSBool JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code,
Value *stackLimit)
{
__asm {
@ -615,7 +616,7 @@ extern "C" {
* *** DANGER ***
*/
JS_STATIC_ASSERT(offsetof(VMFrame, savedRBX) == 0x58);
JS_STATIC_ASSERT(offsetof(VMFrame, regs.fp) == 0x38);
JS_STATIC_ASSERT(VMFrame::offsetOfFp == 0x38);
JS_STATIC_ASSERT(JSVAL_TAG_MASK == 0xFFFF800000000000LL);
JS_STATIC_ASSERT(JSVAL_PAYLOAD_MASK == 0x00007FFFFFFFFFFFLL);
@ -660,10 +661,10 @@ JaegerCompartment::Finish()
}
extern "C" JSBool
JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLimit);
JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit);
JSBool
mjit::EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLimit)
mjit::EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit)
{
#ifdef JS_METHODJIT_SPEW
Profiler prof;
@ -674,8 +675,8 @@ mjit::EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLi
prof.start();
#endif
JS_ASSERT(cx->regs->fp == fp);
JSFrameRegs *oldRegs = cx->regs;
JS_ASSERT(cx->fp() == fp);
FrameRegs &oldRegs = cx->regs();
JSBool ok;
{
@ -684,7 +685,8 @@ mjit::EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLi
ok = JaegerTrampoline(cx, fp, code, stackLimit);
}
cx->setCurrentRegs(oldRegs);
/* Undo repointRegs in SetVMFrameRegs. */
cx->stack.repointRegs(&oldRegs);
JS_ASSERT(fp == cx->fp());
/* The trampoline wrote the return value but did not set the HAS_RVAL flag. */
@ -702,11 +704,11 @@ mjit::EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLi
}
static inline JSBool
CheckStackAndEnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code)
CheckStackAndEnterMethodJIT(JSContext *cx, StackFrame *fp, void *code)
{
JS_CHECK_RECURSION(cx, return false);
Value *stackLimit = cx->stack().getStackLimit(cx);
Value *stackLimit = cx->stack.space().getStackLimit(cx);
if (!stackLimit)
return false;
@ -716,7 +718,7 @@ CheckStackAndEnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code)
JSBool
mjit::JaegerShot(JSContext *cx)
{
JSStackFrame *fp = cx->fp();
StackFrame *fp = cx->fp();
JSScript *script = fp->script();
JITScript *jit = script->getJIT(fp->isConstructing());
@ -725,7 +727,7 @@ mjit::JaegerShot(JSContext *cx)
AbortRecording(cx, "attempt to enter method JIT while recording");
#endif
JS_ASSERT(cx->regs->pc == script->code);
JS_ASSERT(cx->regs().pc == script->code);
return CheckStackAndEnterMethodJIT(cx, cx->fp(), jit->invokeEntry);
}

Просмотреть файл

@ -107,10 +107,10 @@ struct VMFrame
VMFrame *previous;
void *unused;
JSFrameRegs regs;
FrameRegs regs;
JSContext *cx;
Value *stackLimit;
JSStackFrame *entryfp;
StackFrame *entryfp;
#if defined(JS_CPU_X86)
void *savedEBX;
@ -178,8 +178,13 @@ struct VMFrame
JSRuntime *runtime() { return cx->runtime; }
JSStackFrame *&fp() { return regs.fp; }
StackFrame *fp() { return regs.fp(); }
mjit::JITScript *jit() { return fp()->jit(); }
static const size_t offsetOfFp = 5 * sizeof(void *) + FrameRegs::offsetOfFp;
static void staticAssert() {
JS_STATIC_ASSERT(offsetOfFp == offsetof(VMFrame, regs) + FrameRegs::offsetOfFp);
}
};
#ifdef JS_CPU_ARM
@ -425,7 +430,7 @@ struct JITScript {
* Execute the given mjit code. This is a low-level call and callers must
* provide the same guarantees as JaegerShot/CheckStackAndEnterMethodJIT.
*/
JSBool EnterMethodJIT(JSContext *cx, JSStackFrame *fp, void *code, Value *stackLimit);
JSBool EnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, Value *stackLimit);
/* Execute a method that has been JIT compiled. */
JSBool JaegerShot(JSContext *cx);
@ -445,7 +450,7 @@ void JS_FASTCALL
ProfileStubCall(VMFrame &f);
CompileStatus JS_NEVER_INLINE
TryCompile(JSContext *cx, JSStackFrame *fp);
TryCompile(JSContext *cx, StackFrame *fp);
void
ReleaseScriptCode(JSContext *cx, JSScript *script);

Просмотреть файл

@ -652,7 +652,7 @@ class CallCompiler : public BaseCompiler
Jump hasCode = masm.branchPtr(Assembler::Above, t0, ImmPtr(JS_UNJITTABLE_SCRIPT));
/* Try and compile. On success we get back the nmap pointer. */
masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
masm.storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp));
void *compilePtr = JS_FUNC_TO_DATA_PTR(void *, stubs::CompileFunction);
if (ic.frameSize.isStatic()) {
masm.move(Imm32(ic.frameSize.staticArgc()), Registers::ArgReg1);
@ -661,7 +661,7 @@ class CallCompiler : public BaseCompiler
masm.load32(FrameAddress(offsetof(VMFrame, u.call.dynamicArgc)), Registers::ArgReg1);
masm.fallibleVMCall(compilePtr, script->code, -1);
}
masm.loadPtr(FrameAddress(offsetof(VMFrame, regs.fp)), JSFrameReg);
masm.loadPtr(FrameAddress(VMFrame::offsetOfFp), JSFrameReg);
Jump notCompiled = masm.branchTestPtr(Assembler::Zero, Registers::ReturnReg,
Registers::ReturnReg);
@ -777,7 +777,7 @@ class CallCompiler : public BaseCompiler
JITScript *jit = f.jit();
/* Snapshot the frameDepth before SplatApplyArgs modifies it. */
uintN initialFrameDepth = f.regs.sp - f.regs.fp->slots();
uintN initialFrameDepth = f.regs.sp - f.regs.fp()->slots();
/*
* SplatApplyArgs has not been called, so we call it here before
@ -785,7 +785,7 @@ class CallCompiler : public BaseCompiler
*/
Value *vp;
if (ic.frameSize.isStatic()) {
JS_ASSERT(f.regs.sp - f.regs.fp->slots() == (int)ic.frameSize.staticLocalSlots());
JS_ASSERT(f.regs.sp - f.regs.fp()->slots() == (int)ic.frameSize.staticLocalSlots());
vp = f.regs.sp - (2 + ic.frameSize.staticArgc());
} else {
JS_ASSERT(*f.regs.pc == JSOP_FUNAPPLY && GET_ARGC(f.regs.pc) == 2);
@ -839,18 +839,18 @@ class CallCompiler : public BaseCompiler
RegisterID t0 = tempRegs.takeAnyReg();
/* Store pc. */
masm.storePtr(ImmPtr(cx->regs->pc),
masm.storePtr(ImmPtr(cx->regs().pc),
FrameAddress(offsetof(VMFrame, regs.pc)));
/* Store sp (if not already set by ic::SplatApplyArgs). */
if (ic.frameSize.isStatic()) {
uint32 spOffset = sizeof(JSStackFrame) + initialFrameDepth * sizeof(Value);
uint32 spOffset = sizeof(StackFrame) + initialFrameDepth * sizeof(Value);
masm.addPtr(Imm32(spOffset), JSFrameReg, t0);
masm.storePtr(t0, FrameAddress(offsetof(VMFrame, regs.sp)));
}
/* Store fp. */
masm.storePtr(JSFrameReg, FrameAddress(offsetof(VMFrame, regs.fp)));
masm.storePtr(JSFrameReg, FrameAddress(VMFrame::offsetOfFp));
/* Grab cx. */
#ifdef JS_CPU_X86
@ -868,7 +868,7 @@ class CallCompiler : public BaseCompiler
#endif
MaybeRegisterID argcReg;
if (ic.frameSize.isStatic()) {
uint32 vpOffset = sizeof(JSStackFrame) + (vp - f.regs.fp->slots()) * sizeof(Value);
uint32 vpOffset = sizeof(StackFrame) + (vp - f.regs.fp()->slots()) * sizeof(Value);
masm.addPtr(Imm32(vpOffset), JSFrameReg, vpReg);
} else {
argcReg = tempRegs.takeAnyReg();
@ -972,7 +972,7 @@ class CallCompiler : public BaseCompiler
JSObject *callee = ucr.callee;
JS_ASSERT(callee);
uint32 flags = callingNew ? JSFRAME_CONSTRUCTING : 0;
uint32 flags = callingNew ? StackFrame::CONSTRUCTING : 0;
if (!ic.hit) {
ic.hit = true;
@ -1036,7 +1036,6 @@ ic::NativeNew(VMFrame &f, CallICInfo *ic)
}
static const unsigned MANY_ARGS = 1024;
static const unsigned MIN_SPACE = 500;
static bool
BumpStackFull(VMFrame &f, uintN inc)
@ -1045,12 +1044,8 @@ BumpStackFull(VMFrame &f, uintN inc)
if (inc < MANY_ARGS) {
if (f.regs.sp + inc < f.stackLimit)
return true;
StackSpace &stack = f.cx->stack();
if (!stack.bumpCommitAndLimit(f.entryfp, f.regs.sp, inc, &f.stackLimit)) {
js_ReportOverRecursed(f.cx);
return false;
}
return true;
StackSpace &space = f.cx->stack.space();
return space.bumpLimitWithinQuota(f.cx, f.entryfp, f.regs.sp, inc, &f.stackLimit);
}
/*
@ -1065,20 +1060,8 @@ BumpStackFull(VMFrame &f, uintN inc)
* However, since each apply call must consume at least MANY_ARGS slots,
* this sequence will quickly reach the end of the stack and OOM.
*/
uintN incWithSpace = inc + MIN_SPACE;
Value *bumpedWithSpace = f.regs.sp + incWithSpace;
if (bumpedWithSpace < f.stackLimit)
return true;
StackSpace &stack = f.cx->stack();
if (stack.bumpCommitAndLimit(f.entryfp, f.regs.sp, incWithSpace, &f.stackLimit))
return true;
if (!stack.ensureSpace(f.cx, f.regs.sp, incWithSpace))
return false;
f.stackLimit = bumpedWithSpace;
return true;
StackSpace &space = f.cx->stack.space();
return space.bumpLimit(f.cx, f.entryfp, f.regs.sp, inc, &f.stackLimit);
}
static JS_ALWAYS_INLINE bool
@ -1117,7 +1100,7 @@ ic::SplatApplyArgs(VMFrame &f)
Value *vp = f.regs.sp - 3;
JS_ASSERT(JS_CALLEE(cx, vp).toObject().getFunctionPrivate()->u.n.native == js_fun_apply);
JSStackFrame *fp = f.regs.fp;
StackFrame *fp = f.regs.fp();
if (!fp->hasOverriddenArgs()) {
uintN n;
if (!fp->hasArgsObj()) {

Просмотреть файл

@ -404,8 +404,8 @@ class SetPropCompiler : public PICStubCompiler
{
Address addr(pic.shapeReg, shape->setterOp() == SetCallArg
? JSStackFrame::offsetOfFormalArg(fun, slot)
: JSStackFrame::offsetOfFixed(slot));
? StackFrame::offsetOfFormalArg(fun, slot)
: StackFrame::offsetOfFixed(slot));
masm.storeValue(pic.u.vr, addr);
skipOver = masm.jump();
}
@ -907,7 +907,7 @@ class GetPropCompiler : public PICStubCompiler
* up in the fast path, or put this offset in PICInfo?
*/
uint32 thisvOffset = uint32(f.regs.sp - f.fp()->slots()) - 1;
Address thisv(JSFrameReg, sizeof(JSStackFrame) + thisvOffset * sizeof(Value));
Address thisv(JSFrameReg, sizeof(StackFrame) + thisvOffset * sizeof(Value));
masm.storeValueFromComponents(ImmType(JSVAL_TYPE_STRING),
pic.objReg, thisv);
@ -1247,7 +1247,7 @@ class ScopeNameCompiler : public PICStubCompiler
/* For GETXPROP, the object is already in objReg. */
if (pic.kind == ic::PICInfo::NAME)
masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
JS_ASSERT(obj == getprop.holder);
JS_ASSERT(getprop.holder == scopeChain->getGlobal());
@ -1315,7 +1315,7 @@ class ScopeNameCompiler : public PICStubCompiler
/* For GETXPROP, the object is already in objReg. */
if (pic.kind == ic::PICInfo::NAME)
masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
JS_ASSERT(obj == getprop.holder);
JS_ASSERT(getprop.holder != scopeChain->getGlobal());
@ -1352,8 +1352,8 @@ class ScopeNameCompiler : public PICStubCompiler
/* Not-escaped case. */
{
Address addr(pic.shapeReg, kind == ARG ? JSStackFrame::offsetOfFormalArg(fun, slot)
: JSStackFrame::offsetOfFixed(slot));
Address addr(pic.shapeReg, kind == ARG ? StackFrame::offsetOfFormalArg(fun, slot)
: StackFrame::offsetOfFixed(slot));
masm.loadPayload(addr, pic.objReg);
masm.loadTypeTag(addr, pic.shapeReg);
skipOver = masm.jump();
@ -1459,7 +1459,7 @@ class ScopeNameCompiler : public PICStubCompiler
/* Kludge to allow (typeof foo == "undefined") tests. */
disable("property not found");
if (pic.kind == ic::PICInfo::NAME) {
JSOp op2 = js_GetOpcode(cx, script, cx->regs->pc + JSOP_NAME_LENGTH);
JSOp op2 = js_GetOpcode(cx, script, cx->regs().pc + JSOP_NAME_LENGTH);
if (op2 == JSOP_TYPEOF) {
vp->setUndefined();
return true;
@ -1531,7 +1531,7 @@ class BindNameCompiler : public PICStubCompiler
BindNameLabels &labels = pic.bindNameLabels();
/* Guard on the shape of the scope chain. */
masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfScopeChain()), pic.objReg);
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfScopeChain()), pic.objReg);
masm.loadShape(pic.objReg, pic.shapeReg);
Jump firstShape = masm.branch32(Assembler::NotEqual, pic.shapeReg,
Imm32(scopeChain->shape()));
@ -1751,7 +1751,7 @@ void JS_FASTCALL
ic::CallProp(VMFrame &f, ic::PICInfo *pic)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
JSScript *script = f.fp()->script();
@ -2102,8 +2102,8 @@ GetElementIC::attachGetProp(JSContext *cx, JSObject *obj, const Value &v, jsid i
if (op == JSOP_CALLELEM) {
// Emit a write of |obj| to the top of the stack, before we lose it.
Value *thisVp = &cx->regs->sp[-1];
Address thisSlot(JSFrameReg, JSStackFrame::offsetOfFixed(thisVp - cx->fp()->slots()));
Value *thisVp = &cx->regs().sp[-1];
Address thisSlot(JSFrameReg, StackFrame::offsetOfFixed(thisVp - cx->fp()->slots()));
masm.storeValueFromComponents(ImmType(JSVAL_TYPE_OBJECT), objReg, thisSlot);
}

Просмотреть файл

@ -88,7 +88,7 @@ struct StateRemat {
// representation in a struct or union. This prevents bloating the IC
// structs by an extra 8 bytes in some cases. 16 bits are needed to encode
// the largest local:
// ((UINT16_LIMIT - 1) * sizeof(Value) + sizeof(JSStackFrame),
// ((UINT16_LIMIT - 1) * sizeof(Value) + sizeof(StackFrame),
// And an extra bit for the sign on arguments.
#define MIN_STATE_REMAT_BITS 21
@ -96,7 +96,7 @@ struct StateRemat {
bool inRegister() const { return offset_ >= 0 &&
offset_ <= int32(JSC::MacroAssembler::TotalRegisters); }
bool inMemory() const {
return offset_ >= int32(sizeof(JSStackFrame)) ||
return offset_ >= int32(sizeof(StackFrame)) ||
offset_ < 0;
}

Просмотреть файл

@ -123,8 +123,8 @@ Recompiler::recompile()
Vector<PatchableAddress> normalPatches(cx);
Vector<PatchableAddress> ctorPatches(cx);
JSStackFrame *firstCtorFrame = NULL;
JSStackFrame *firstNormalFrame = NULL;
StackFrame *firstCtorFrame = NULL;
StackFrame *firstNormalFrame = NULL;
// Find all JIT'd stack frames to account for return addresses that will
// need to be patched after recompilation.
@ -133,8 +133,8 @@ Recompiler::recompile()
f = f->previous) {
// Scan all frames owned by this VMFrame.
JSStackFrame *end = f->entryfp->prev();
for (JSStackFrame *fp = f->fp(); fp != end; fp = fp->prev()) {
StackFrame *end = f->entryfp->prev();
for (StackFrame *fp = f->fp(); fp != end; fp = fp->prev()) {
// Remember the latest frame for each type of JIT'd code, so the
// compiler will have a frame to re-JIT from.
if (!firstCtorFrame && fp->script() == script && fp->isConstructing())
@ -198,7 +198,7 @@ Recompiler::saveTraps(JITScript *jit, Vector<CallSite> *sites)
}
bool
Recompiler::recompile(JSStackFrame *fp, Vector<PatchableAddress> &patches,
Recompiler::recompile(StackFrame *fp, Vector<PatchableAddress> &patches,
Vector<CallSite> &sites)
{
/* If we get this far, the script is live, and we better be safe to re-jit. */

Просмотреть файл

@ -98,7 +98,7 @@ private:
PatchableAddress findPatch(JITScript *jit, void **location);
void applyPatch(Compiler& c, PatchableAddress& toPatch);
bool recompile(JSStackFrame *fp, Vector<PatchableAddress> &patches,
bool recompile(StackFrame *fp, Vector<PatchableAddress> &patches,
Vector<CallSite> &sites);
bool saveTraps(JITScript *jit, Vector<CallSite> *sites);
};

Просмотреть файл

@ -411,7 +411,7 @@ void JS_FASTCALL
stubs::GetElem(VMFrame &f)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
Value &lref = regs.sp[-2];
Value &rref = regs.sp[-1];
@ -451,7 +451,7 @@ stubs::GetElem(VMFrame &f)
if (arg < obj->getArgsInitialLength()) {
copyFrom = obj->addressOfArgsElement(arg);
if (!copyFrom->isMagic()) {
if (JSStackFrame *afp = (JSStackFrame *) obj->getPrivate())
if (StackFrame *afp = (StackFrame *) obj->getPrivate())
copyFrom = &afp->canonicalActualArg(arg);
goto end_getelem;
}
@ -496,7 +496,7 @@ void JS_FASTCALL
stubs::CallElem(VMFrame &f)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
/* Find the object on which to look for |this|'s properties. */
Value thisv = regs.sp[-2];
@ -531,7 +531,7 @@ void JS_FASTCALL
stubs::SetElem(VMFrame &f)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
Value &objval = regs.sp[-3];
Value &idval = regs.sp[-2];
@ -691,7 +691,7 @@ stubs::DefFun(VMFrame &f, JSFunction *fun)
JSObject *obj2;
JSContext *cx = f.cx;
JSStackFrame *fp = f.fp();
StackFrame *fp = f.fp();
/*
* A top-level function defined in Global or Eval code (see ECMA-262
@ -744,7 +744,7 @@ stubs::DefFun(VMFrame &f, JSFunction *fun)
* current scope chain even for the case of function expression statements
* and functions defined by eval inside let or with blocks.
*/
JSObject *parent = &fp->varobj(cx);
JSObject *parent = &cx->stack.currentVarObj();
/* ES5 10.5 (NB: with subsequent errata). */
jsid id = ATOM_TO_JSID(fun->atom);
@ -811,7 +811,7 @@ template void JS_FASTCALL stubs::DefFun<false>(VMFrame &f, JSFunction *fun);
#define RELATIONAL(OP) \
JS_BEGIN_MACRO \
JSContext *cx = f.cx; \
JSFrameRegs &regs = f.regs; \
FrameRegs &regs = f.regs; \
Value rval = regs.sp[-1]; \
Value lval = regs.sp[-2]; \
bool cond; \
@ -879,7 +879,7 @@ static inline bool
StubEqualityOp(VMFrame &f)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
Value rval = regs.sp[-1];
Value lval = regs.sp[-2];
@ -1006,7 +1006,7 @@ void JS_FASTCALL
stubs::Add(VMFrame &f)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
Value rval = regs.sp[-1];
Value lval = regs.sp[-2];
@ -1078,7 +1078,7 @@ void JS_FASTCALL
stubs::Sub(VMFrame &f)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
double d1, d2;
if (!ValueToNumber(cx, regs.sp[-2], &d1) ||
!ValueToNumber(cx, regs.sp[-1], &d2)) {
@ -1092,7 +1092,7 @@ void JS_FASTCALL
stubs::Mul(VMFrame &f)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
double d1, d2;
if (!ValueToNumber(cx, regs.sp[-2], &d1) ||
!ValueToNumber(cx, regs.sp[-1], &d2)) {
@ -1107,7 +1107,7 @@ stubs::Div(VMFrame &f)
{
JSContext *cx = f.cx;
JSRuntime *rt = cx->runtime;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
double d1, d2;
if (!ValueToNumber(cx, regs.sp[-2], &d1) ||
@ -1139,7 +1139,7 @@ void JS_FASTCALL
stubs::Mod(VMFrame &f)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
Value &lref = regs.sp[-2];
Value &rref = regs.sp[-1];
@ -1202,7 +1202,7 @@ void JS_FASTCALL
stubs::Trap(VMFrame &f, uint32 trapTypes)
{
Value rval;
jsbytecode *pc = f.cx->regs->pc;
jsbytecode *pc = f.cx->regs().pc;
/*
* Trap may be called for a single-step interrupt trap and/or a
@ -1296,7 +1296,7 @@ void JS_FASTCALL
stubs::InitElem(VMFrame &f, uint32 last)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
/* Pop the element's value into rval. */
JS_ASSERT(regs.sp - f.fp()->base() >= 3);
@ -1836,7 +1836,7 @@ static bool JS_FASTCALL
InlineGetProp(VMFrame &f)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
Value *vp = &f.regs.sp[-1];
JSObject *obj = ValueToObject(f.cx, vp);
@ -1913,7 +1913,7 @@ void JS_FASTCALL
stubs::CallProp(VMFrame &f, JSAtom *origAtom)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
Value lval;
lval = regs.sp[-1];
@ -2005,7 +2005,7 @@ stubs::CallProp(VMFrame &f, JSAtom *origAtom)
void JS_FASTCALL
stubs::Length(VMFrame &f)
{
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
Value *vp = &regs.sp[-1];
if (vp->isString()) {
@ -2042,7 +2042,7 @@ InitPropOrMethod(VMFrame &f, JSAtom *atom, JSOp op)
{
JSContext *cx = f.cx;
JSRuntime *rt = cx->runtime;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
/* Load the property's initial value into rval. */
JS_ASSERT(regs.sp - f.fp()->base() >= 2);
@ -2224,7 +2224,7 @@ JSBool JS_FASTCALL
stubs::InstanceOf(VMFrame &f)
{
JSContext *cx = f.cx;
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
const Value &rref = regs.sp[-1];
if (rref.isPrimitive()) {
@ -2263,7 +2263,7 @@ stubs::ArgCnt(VMFrame &f)
{
JSContext *cx = f.cx;
JSRuntime *rt = cx->runtime;
JSStackFrame *fp = f.fp();
StackFrame *fp = f.fp();
jsid id = ATOM_TO_JSID(rt->atomState.lengthAtom);
f.regs.sp++;
@ -2274,9 +2274,9 @@ stubs::ArgCnt(VMFrame &f)
void JS_FASTCALL
stubs::EnterBlock(VMFrame &f, JSObject *obj)
{
JSFrameRegs &regs = f.regs;
FrameRegs &regs = f.regs;
#ifdef DEBUG
JSStackFrame *fp = f.fp();
StackFrame *fp = f.fp();
#endif
JS_ASSERT(obj->isStaticBlock());
@ -2316,7 +2316,7 @@ void JS_FASTCALL
stubs::LeaveBlock(VMFrame &f, JSObject *blockChain)
{
JSContext *cx = f.cx;
JSStackFrame *fp = f.fp();
StackFrame *fp = f.fp();
#ifdef DEBUG
JS_ASSERT(blockChain->isStaticBlock());
@ -2552,9 +2552,9 @@ void JS_FASTCALL
stubs::DefVarOrConst(VMFrame &f, JSAtom *atom)
{
JSContext *cx = f.cx;
JSStackFrame *fp = f.fp();
StackFrame *fp = f.fp();
JSObject *obj = &fp->varobj(cx);
JSObject *obj = &cx->stack.currentVarObj();
JS_ASSERT(!obj->getOps()->defineProperty);
uintN attrs = JSPROP_ENUMERATE;
if (!fp->isEvalFrame())
@ -2598,9 +2598,8 @@ void JS_FASTCALL
stubs::SetConst(VMFrame &f, JSAtom *atom)
{
JSContext *cx = f.cx;
JSStackFrame *fp = f.fp();
JSObject *obj = &fp->varobj(cx);
JSObject *obj = &cx->stack.currentVarObj();
const Value &ref = f.regs.sp[-1];
if (!obj->defineProperty(cx, ATOM_TO_JSID(atom), ref,
PropertyStub, StrictPropertyStub,

Просмотреть файл

@ -118,20 +118,20 @@ TrampolineCompiler::generateForceReturn(Assembler &masm)
{
/* if (hasArgsObj() || hasCallObj()) stubs::PutActivationObjects() */
Jump noActObjs = masm.branchTest32(Assembler::Zero, FrameFlagsAddress(),
Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
Imm32(StackFrame::HAS_CALL_OBJ | StackFrame::HAS_ARGS_OBJ));
masm.fallibleVMCall(JS_FUNC_TO_DATA_PTR(void *, stubs::PutActivationObjects), NULL, 0);
noActObjs.linkTo(masm.label(), &masm);
/* Store any known return value */
masm.loadValueAsComponents(UndefinedValue(), JSReturnReg_Type, JSReturnReg_Data);
Jump rvalClear = masm.branchTest32(Assembler::Zero,
FrameFlagsAddress(), Imm32(JSFRAME_HAS_RVAL));
Address rvalAddress(JSFrameReg, JSStackFrame::offsetOfReturnValue());
FrameFlagsAddress(), Imm32(StackFrame::HAS_RVAL));
Address rvalAddress(JSFrameReg, StackFrame::offsetOfReturnValue());
masm.loadValueAsComponents(rvalAddress, JSReturnReg_Type, JSReturnReg_Data);
rvalClear.linkTo(masm.label(), &masm);
/* Return to the caller */
masm.loadPtr(Address(JSFrameReg, JSStackFrame::offsetOfncode()), Registers::ReturnReg);
masm.loadPtr(Address(JSFrameReg, StackFrame::offsetOfNcode()), Registers::ReturnReg);
masm.jump(Registers::ReturnReg);
return true;
}

Просмотреть файл

@ -43,7 +43,7 @@ extern PopActiveVMFrame:PROC
.CODE
; JSBool JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code,
; JSBool JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code,
; Value *stackLimit, void *safePoint);
JaegerTrampoline PROC FRAME
push rbp

Просмотреть файл

@ -44,7 +44,7 @@
.text
.intel_syntax noprefix
# JSBool JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code,
# JSBool JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code,
# Value *stackLimit, void *safePoint)#
.globl JaegerTrampoline
.def JaegerTrampoline

Просмотреть файл

@ -37,8 +37,8 @@
.text
/ JSBool JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code,
/ JSFrameRegs *regs, uintptr_t inlineCallCount)
/ JSBool JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code,
/ FrameRegs *regs, uintptr_t inlineCallCount)
.global JaegerTrampoline
.type JaegerTrampoline, @function
JaegerTrampoline:

Просмотреть файл

@ -37,8 +37,8 @@
.text
/ JSBool JaegerTrampoline(JSContext *cx, JSStackFrame *fp, void *code,
/ JSFrameRegs *regs, uintptr_t inlineCallCount)
/ JSBool JaegerTrampoline(JSContext *cx, StackFrame *fp, void *code,
/ FrameRegs *regs, uintptr_t inlineCallCount)
.global JaegerTrampoline
.type JaegerTrampoline, @function
JaegerTrampoline:

Просмотреть файл

@ -1800,7 +1800,7 @@ GetTrapArgs(JSContext *cx, uintN argc, jsval *argv, JSScript **scriptp,
uintN intarg;
JSScript *script;
*scriptp = JS_GetScriptedCaller(cx, NULL)->script();
*scriptp = JS_GetFrameScript(cx, JS_GetScriptedCaller(cx, NULL));
*ip = 0;
if (argc != 0) {
v = argv[0];
@ -1823,11 +1823,12 @@ GetTrapArgs(JSContext *cx, uintN argc, jsval *argv, JSScript **scriptp,
}
static JSTrapStatus
TrapHandler(JSContext *cx, JSScript *script, jsbytecode *pc, jsval *rval,
TrapHandler(JSContext *cx, JSScript *, jsbytecode *pc, jsval *rval,
jsval closure)
{
JSString *str = JSVAL_TO_STRING(closure);
JSStackFrame *caller = JS_GetScriptedCaller(cx, NULL);
JSScript *script = JS_GetFrameScript(cx, caller);
size_t length;
const jschar *chars = JS_GetStringCharsAndLength(cx, str, &length);
@ -1835,8 +1836,8 @@ TrapHandler(JSContext *cx, JSScript *script, jsbytecode *pc, jsval *rval,
return JSTRAP_ERROR;
if (!JS_EvaluateUCInStackFrame(cx, caller, chars, length,
caller->script()->filename,
caller->script()->lineno,
script->filename,
script->lineno,
rval)) {
return JSTRAP_ERROR;
}
@ -1938,7 +1939,7 @@ LineToPC(JSContext *cx, uintN argc, jsval *vp)
JS_ReportErrorNumber(cx, my_GetErrorMessage, NULL, JSSMSG_LINE2PC_USAGE);
return JS_FALSE;
}
script = JS_GetScriptedCaller(cx, NULL)->script();
script = JS_GetFrameScript(cx, JS_GetScriptedCaller(cx, NULL));
if (!GetTrapArgs(cx, argc, JS_ARGV(cx, vp), &script, &i))
return JS_FALSE;
lineno = (i == 0) ? script->lineno : (uintN)i;
@ -3573,6 +3574,8 @@ EvalInContext(JSContext *cx, uintN argc, jsval *vp)
return true;
JSStackFrame *fp = JS_GetScriptedCaller(cx, NULL);
JSScript *script = JS_GetFrameScript(cx, fp);
jsbytecode *pc = JS_GetFramePC(cx, fp);
{
JSAutoEnterCompartment ac;
uintN flags;
@ -3591,8 +3594,8 @@ EvalInContext(JSContext *cx, uintN argc, jsval *vp)
return false;
}
if (!JS_EvaluateUCScript(cx, sobj, src, srclen,
fp->script()->filename,
JS_PCToLineNumber(cx, fp->script(), fp->pc(cx)),
script->filename,
JS_PCToLineNumber(cx, script, pc),
vp)) {
return false;
}
@ -3618,7 +3621,7 @@ EvalInFrame(JSContext *cx, uintN argc, jsval *vp)
? !!(JSVAL_TO_BOOLEAN(argv[2]))
: false;
JS_ASSERT(cx->hasfp());
JS_ASSERT(cx->running());
FrameRegsIter fi(cx);
for (uint32 i = 0; i < upCount; ++i, ++fi) {
@ -3626,8 +3629,8 @@ EvalInFrame(JSContext *cx, uintN argc, jsval *vp)
break;
}
JSStackFrame *const fp = fi.fp();
if (!JS_IsScriptFrame(cx, fp)) {
StackFrame *const fp = fi.fp();
if (!fp->isScriptFrame()) {
JS_ReportError(cx, "cannot eval in non-script frame");
return JS_FALSE;
}
@ -3641,7 +3644,7 @@ EvalInFrame(JSContext *cx, uintN argc, jsval *vp)
if (!chars)
return JS_FALSE;
JSBool ok = JS_EvaluateUCInStackFrame(cx, fp, chars, length,
JSBool ok = JS_EvaluateUCInStackFrame(cx, Jsvalify(fp), chars, length,
fp->script()->filename,
JS_PCToLineNumber(cx, fp->script(),
fi.pc()),
@ -4463,7 +4466,6 @@ Snarf(JSContext *cx, uintN argc, jsval *vp)
{
JSString *str;
const char *pathname;
JSStackFrame *fp;
if (!argc)
return JS_FALSE;
@ -4476,10 +4478,11 @@ Snarf(JSContext *cx, uintN argc, jsval *vp)
return JS_FALSE;
/* Get the currently executing script's name. */
fp = JS_GetScriptedCaller(cx, NULL);
JS_ASSERT(fp && fp->script()->filename);
JSStackFrame *fp = JS_GetScriptedCaller(cx, NULL);
JSScript *script = JS_GetFrameScript(cx, fp);
JS_ASSERT(fp && script->filename);
#ifdef XP_UNIX
pathname = MakeAbsolutePathname(cx, fp->script()->filename, filename.ptr());
pathname = MakeAbsolutePathname(cx, script->filename, filename.ptr());
if (!pathname)
return JS_FALSE;
#else

Просмотреть файл

@ -370,17 +370,17 @@ void ValidateWriter::checkAccSet(LOpcode op, LIns *base, int32_t disp, AccSet ac
case ACCSET_FRAMEREGS:
// base = ldp.cx ...[offsetof(JSContext, regs)]
// ins = ldp.regs base[<disp within JSFrameRegs>]
// ins = ldp.regs base[<disp within FrameRegs>]
ok = op == LIR_ldp &&
dispWithin(JSFrameRegs) &&
match(base, LIR_ldp, ACCSET_CX, offsetof(JSContext, regs));
dispWithin(FrameRegs) &&
match(base, LIR_ldp, ACCSET_CX, offsetof(JSContext, stack) + ContextStack::offsetOfRegs());
break;
case ACCSET_STACKFRAME:
// base = ldp.regs ...[offsetof(JSFrameRegs, fp)]
// ins = {ld,st}X.sf base[<disp within JSStackFrame>]
ok = dispWithin(JSStackFrame) &&
match(base, LIR_ldp, ACCSET_FRAMEREGS, offsetof(JSFrameRegs, fp));
// base = ldp.regs ...[offsetof(FrameRegs, fp)]
// ins = {ld,st}X.sf base[<disp within StackFrame>]
ok = dispWithin(StackFrame) &&
match(base, LIR_ldp, ACCSET_FRAMEREGS, FrameRegs::offsetOfFp);
break;
case ACCSET_RUNTIME:

Просмотреть файл

@ -106,8 +106,8 @@ enum LC_TMBits {
* - ACCSET_EOS: The globals area.
* - ACCSET_ALLOC: All memory blocks allocated with LIR_allocp (in
* other words, this region is the AR space).
* - ACCSET_FRAMEREGS: All JSFrameRegs structs.
* - ACCSET_STACKFRAME: All JSStackFrame objects.
* - ACCSET_FRAMEREGS: All FrameRegs structs.
* - ACCSET_STACKFRAME: All StackFrame objects.
* - ACCSET_RUNTIME: The JSRuntime object.
* - ACCSET_OBJ_CLASP: The 'clasp' field of all JSObjects.
* - ACCSET_OBJ_FLAGS: The 'flags' field of all JSObjects.
@ -427,6 +427,10 @@ class Writer
#define ldpConstContextField(fieldname) \
name(w.ldpContextFieldHelper(cx_ins, offsetof(JSContext, fieldname), LOAD_CONST), \
#fieldname)
nj::LIns *ldpContextRegs(nj::LIns *cx) const {
int32 offset = offsetof(JSContext, stack) + ContextStack::offsetOfRegs();
return name(ldpContextFieldHelper(cx, offset, nj::LOAD_NORMAL),"regs");
}
nj::LIns *stContextField(nj::LIns *value, nj::LIns *cx, int32 offset) const {
return lir->insStore(value, cx, offset, ACCSET_CX);
@ -457,11 +461,11 @@ class Writer
}
nj::LIns *ldpFrameFp(nj::LIns *regs) const {
return lir->insLoad(nj::LIR_ldp, regs, offsetof(JSFrameRegs, fp), ACCSET_FRAMEREGS);
return lir->insLoad(nj::LIR_ldp, regs, FrameRegs::offsetOfFp, ACCSET_FRAMEREGS);
}
nj::LIns *ldpStackFrameScopeChain(nj::LIns *frame) const {
return lir->insLoad(nj::LIR_ldp, frame, JSStackFrame::offsetOfScopeChain(),
return lir->insLoad(nj::LIR_ldp, frame, StackFrame::offsetOfScopeChain(),
ACCSET_STACKFRAME);
}

1084
js/src/vm/Stack-inl.h Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

739
js/src/vm/Stack.cpp Normal file
Просмотреть файл

@ -0,0 +1,739 @@
/* -*- Mode: C; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=79 ft=cpp:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is SpiderMonkey JavaScript engine.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation.
* Portions created by the Initial Developer are Copyright (C) 2009
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Luke Wagner <luke@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#include "jsgcmark.h"
#include "methodjit/MethodJIT.h"
#include "Stack.h"
#include "jsgcinlines.h"
#include "jsobjinlines.h"
#include "Stack-inl.h"
/* Includes to get to low-level memory-mapping functionality. */
#ifdef XP_WIN
# include "jswin.h"
#elif defined(XP_OS2)
# define INCL_DOSMEMMGR
# include <os2.h>
#else
# include <unistd.h>
# include <sys/mman.h>
# if !defined(MAP_ANONYMOUS)
# if defined(MAP_ANON)
# define MAP_ANONYMOUS MAP_ANON
# else
# define MAP_ANONYMOUS 0
# endif
# endif
#endif
using namespace js;
/*****************************************************************************/
#ifdef DEBUG
JSObject *const StackFrame::sInvalidScopeChain = (JSObject *)0xbeef;
#endif
jsbytecode *
StackFrame::prevpcSlow()
{
JS_ASSERT(!(flags_ & HAS_PREVPC));
#if defined(JS_METHODJIT) && defined(JS_MONOIC)
StackFrame *p = prev();
js::mjit::JITScript *jit = p->script()->getJIT(p->isConstructing());
prevpc_ = jit->nativeToPC(ncode_);
flags_ |= HAS_PREVPC;
return prevpc_;
#else
JS_NOT_REACHED("Unknown PC for frame");
return NULL;
#endif
}
jsbytecode *
StackFrame::pc(JSContext *cx, StackFrame *next)
{
JS_ASSERT_IF(next, next->prev() == this);
StackSegment &seg = cx->stack.space().containingSegment(this);
FrameRegs &regs = seg.currentRegs();
if (regs.fp() == this)
return regs.pc;
if (!next)
next = seg.computeNextFrame(this);
return next->prevpc();
}
/*****************************************************************************/
JS_REQUIRES_STACK bool
StackSegment::contains(const StackFrame *fp) const
{
JS_ASSERT(!empty());
if (fp < initialFrame_)
return false;
StackFrame *start;
if (isActive())
start = stack_->fp();
else
start = suspendedRegs_->fp();
if (fp > start)
return false;
#ifdef DEBUG
bool found = false;
StackFrame *stop = initialFrame_->prev();
for (StackFrame *f = start; !found && f != stop; f = f->prev()) {
if (f == fp) {
found = true;
break;
}
}
JS_ASSERT(found);
#endif
return true;
}
StackFrame *
StackSegment::computeNextFrame(StackFrame *fp) const
{
JS_ASSERT(contains(fp));
JS_ASSERT(fp != currentFrame());
StackFrame *next = currentFrame();
StackFrame *prev;
while ((prev = next->prev()) != fp)
next = prev;
return next;
}
/*****************************************************************************/
StackSpace::StackSpace()
: base_(NULL),
#ifdef XP_WIN
commitEnd_(NULL),
#endif
end_(NULL),
seg_(NULL)
{
override_.top = NULL;
#ifdef DEBUG
override_.seg = NULL;
override_.frame = NULL;
#endif
}
bool
StackSpace::init()
{
void *p;
#ifdef XP_WIN
p = VirtualAlloc(NULL, CAPACITY_BYTES, MEM_RESERVE, PAGE_READWRITE);
if (!p)
return false;
void *check = VirtualAlloc(p, COMMIT_BYTES, MEM_COMMIT, PAGE_READWRITE);
if (p != check)
return false;
base_ = reinterpret_cast<Value *>(p);
commitEnd_ = base_ + COMMIT_VALS;
end_ = base_ + CAPACITY_VALS;
#elif defined(XP_OS2)
if (DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE | OBJ_ANY) &&
DosAllocMem(&p, CAPACITY_BYTES, PAG_COMMIT | PAG_READ | PAG_WRITE))
return false;
base_ = reinterpret_cast<Value *>(p);
end_ = base_ + CAPACITY_VALS;
#else
JS_ASSERT(CAPACITY_BYTES % getpagesize() == 0);
p = mmap(NULL, CAPACITY_BYTES, PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
if (p == MAP_FAILED)
return false;
base_ = reinterpret_cast<Value *>(p);
end_ = base_ + CAPACITY_VALS;
#endif
return true;
}
StackSpace::~StackSpace()
{
JS_ASSERT(!seg_);
if (!base_)
return;
#ifdef XP_WIN
VirtualFree(base_, (commitEnd_ - base_) * sizeof(Value), MEM_DECOMMIT);
VirtualFree(base_, 0, MEM_RELEASE);
#elif defined(XP_OS2)
DosFreeMem(base_);
#else
#ifdef SOLARIS
munmap((caddr_t)base_, CAPACITY_BYTES);
#else
munmap(base_, CAPACITY_BYTES);
#endif
#endif
}
Value *
StackSpace::firstUnused() const
{
if (!seg_) {
JS_ASSERT(override_.top == NULL);
return base_;
}
if (!seg_->empty()) {
Value *sp = seg_->currentRegs().sp;
if (override_.top > sp) {
JS_ASSERT(override_.seg == seg_);
JS_ASSERT_IF(seg_->isActive(), override_.frame == seg_->stack().fp());
JS_ASSERT_IF(!seg_->isActive(), override_.frame == seg_->suspendedFrame());
return override_.top;
}
return sp;
}
JS_ASSERT(override_.seg == seg_);
return override_.top;
}
StackSegment &
StackSpace::containingSegment(const StackFrame *target) const
{
for (StackSegment *s = seg_; s; s = s->previousInMemory()) {
if (s->contains(target))
return *s;
}
JS_NOT_REACHED("frame not in stack space");
return *(StackSegment *)NULL;
}
JSObject &
StackSpace::varObjForFrame(const StackFrame *fp)
{
if (fp->hasCallObj())
return fp->callObj();
return containingSegment(fp).initialVarObj();
}
void
StackSpace::mark(JSTracer *trc)
{
/*
* JIT code can leave values in an incoherent (i.e., unsafe for precise
* marking) state, hence MarkStackRangeConservatively.
*/
Value *end = firstUnused();
for (StackSegment *seg = seg_; seg; seg = seg->previousInMemory()) {
STATIC_ASSERT(ubound(end) >= 0);
if (seg->empty()) {
/* Mark slots/args trailing off segment. */
MarkStackRangeConservatively(trc, seg->valueRangeBegin(), end);
} else {
/* This may be the only pointer to the initialVarObj. */
if (seg->hasInitialVarObj())
gc::MarkObject(trc, seg->initialVarObj(), "varobj");
/* Mark slots/args trailing off of the last stack frame. */
StackFrame *fp = seg->currentFrame();
MarkStackRangeConservatively(trc, fp->slots(), end);
/* Mark stack frames and slots/args between stack frames. */
StackFrame *initial = seg->initialFrame();
for (StackFrame *f = fp; f != initial; f = f->prev()) {
js_TraceStackFrame(trc, f);
MarkStackRangeConservatively(trc, f->prev()->slots(), (Value *)f);
}
/* Mark initial stack frame and leading args. */
js_TraceStackFrame(trc, initial);
MarkStackRangeConservatively(trc, seg->valueRangeBegin(), (Value *)initial);
}
end = (Value *)seg;
}
}
#ifdef XP_WIN
JS_FRIEND_API(bool)
StackSpace::bumpCommit(JSContext *maybecx, Value *from, ptrdiff_t nvals) const
{
if (end_ - from < nvals) {
js_ReportOutOfScriptQuota(maybecx);
return false;
}
Value *newCommit = commitEnd_;
Value *request = from + nvals;
/* Use a dumb loop; will probably execute once. */
JS_ASSERT((end_ - newCommit) % COMMIT_VALS == 0);
do {
newCommit += COMMIT_VALS;
JS_ASSERT((end_ - newCommit) >= 0);
} while (newCommit < request);
/* The cast is safe because CAPACITY_BYTES is small. */
int32 size = static_cast<int32>(newCommit - commitEnd_) * sizeof(Value);
if (!VirtualAlloc(commitEnd_, size, MEM_COMMIT, PAGE_READWRITE)) {
js_ReportOutOfScriptQuota(maybecx);
return false;
}
commitEnd_ = newCommit;
return true;
}
#endif
bool
StackSpace::bumpLimitWithinQuota(JSContext *maybecx, StackFrame *fp, Value *sp,
uintN nvals, Value **limit) const
{
JS_ASSERT(sp >= firstUnused());
JS_ASSERT(sp + nvals >= *limit);
#ifdef XP_WIN
Value *quotaEnd = (Value *)fp + STACK_QUOTA;
if (sp + nvals < quotaEnd) {
if (!ensureSpace(NULL, sp, nvals))
goto fail;
*limit = Min(quotaEnd, commitEnd_);
return true;
}
fail:
#endif
js_ReportOverRecursed(maybecx);
return false;
}
bool
StackSpace::bumpLimit(JSContext *cx, StackFrame *fp, Value *sp,
uintN nvals, Value **limit) const
{
JS_ASSERT(*limit > base_);
JS_ASSERT(sp < *limit);
/*
* Ideally, we would only ensure space for 'nvals', not 'nvals + remain',
* since this is ~500K. However, this whole call should be a rare case: some
* script is passing a obscene number of args to 'apply' and we are just
* trying to keep the stack limit heuristic from breaking the script.
*/
Value *quota = (Value *)fp + STACK_QUOTA;
uintN remain = quota - sp;
uintN inc = nvals + remain;
if (!ensureSpace(NULL, sp, inc))
return false;
*limit = sp + inc;
return true;
}
void
StackSpace::popSegment()
{
JS_ASSERT(seg_->empty());
seg_ = seg_->previousInMemory();
}
void
StackSpace::pushSegment(StackSegment &seg)
{
JS_ASSERT(seg.empty());
seg.setPreviousInMemory(seg_);
seg_ = &seg;
}
/*****************************************************************************/
ContextStack::ContextStack(JSContext *cx)
: regs_(NULL),
seg_(NULL),
space_(&JS_THREAD_DATA(cx)->stackSpace),
cx_(cx)
{
threadReset();
}
ContextStack::~ContextStack()
{
JS_ASSERT(!regs_);
JS_ASSERT(!seg_);
}
void
ContextStack::threadReset()
{
#ifdef JS_THREADSAFE
if (cx_->thread())
space_ = &JS_THREAD_DATA(cx_)->stackSpace;
else
space_ = NULL;
#else
space_ = &JS_THREAD_DATA(cx_)->stackSpace;
#endif
}
#ifdef DEBUG
void
ContextStack::assertSegmentsInSync() const
{
if (regs_) {
JS_ASSERT(seg_->isActive());
if (StackSegment *prev = seg_->previousInContext())
JS_ASSERT(!prev->isActive());
} else {
JS_ASSERT_IF(seg_, !seg_->isActive());
}
}
void
ContextStack::assertSpaceInSync() const
{
JS_ASSERT(space_);
JS_ASSERT(space_ == &JS_THREAD_DATA(cx_)->stackSpace);
}
bool
ContextStack::contains(const StackFrame *fp) const
{
return &space().containingSegment(fp).stack() == this;
}
#endif
void
ContextStack::saveActiveSegment()
{
JS_ASSERT(regs_);
seg_->save(*regs_);
regs_ = NULL;
cx_->resetCompartment();
}
void
ContextStack::restoreSegment()
{
regs_ = &seg_->suspendedRegs();
seg_->restore();
cx_->resetCompartment();
}
bool
ContextStack::getSegmentAndFrame(JSContext *cx, uintN vplen, uintN nslots,
FrameGuard *frameGuard) const
{
Value *start = space().firstUnused();
uintN nvals = VALUES_PER_STACK_SEGMENT + vplen + VALUES_PER_STACK_FRAME + nslots;
if (!space().ensureSpace(cx, start, nvals))
return false;
StackSegment *seg = new(start) StackSegment;
Value *vp = seg->valueRangeBegin();
frameGuard->seg_ = seg;
frameGuard->vp_ = vp;
frameGuard->fp_ = reinterpret_cast<StackFrame *>(vp + vplen);
return true;
}
void
ContextStack::pushSegmentAndFrameImpl(FrameRegs &regs, StackSegment &seg)
{
JS_ASSERT(&seg == space().currentSegment());
if (regs_)
seg_->suspend(*regs_);
regs_ = &regs;
seg.setPreviousInContext(seg_);
seg_ = &seg;
seg.joinContext(*this, *regs.fp());
}
void
ContextStack::pushSegmentAndFrame(FrameRegs &regs, FrameGuard *frameGuard)
{
space().pushSegment(*frameGuard->seg_);
pushSegmentAndFrameImpl(regs, *frameGuard->seg_);
frameGuard->stack_ = this;
}
void
ContextStack::popSegmentAndFrameImpl()
{
JS_ASSERT(isCurrentAndActive());
JS_ASSERT(&seg_->stack() == this);
JS_ASSERT(seg_->initialFrame() == regs_->fp());
regs_->fp()->putActivationObjects();
seg_->leaveContext();
seg_ = seg_->previousInContext();
if (seg_) {
if (seg_->isSaved()) {
regs_ = NULL;
} else {
regs_ = &seg_->suspendedRegs();
seg_->resume();
}
} else {
JS_ASSERT(regs_->fp()->prev() == NULL);
regs_ = NULL;
}
}
void
ContextStack::popSegmentAndFrame()
{
popSegmentAndFrameImpl();
space().popSegment();
notifyIfNoCodeRunning();
}
FrameGuard::~FrameGuard()
{
if (!pushed())
return;
JS_ASSERT(stack_->currentSegment() == seg_);
JS_ASSERT(stack_->currentSegment()->currentFrame() == fp_);
stack_->popSegmentAndFrame();
}
bool
ContextStack::getExecuteFrame(JSContext *cx, JSScript *script,
ExecuteFrameGuard *frameGuard) const
{
if (!getSegmentAndFrame(cx, 2, script->nslots, frameGuard))
return false;
frameGuard->regs_.prepareToRun(frameGuard->fp(), script);
return true;
}
void
ContextStack::pushExecuteFrame(JSObject *initialVarObj,
ExecuteFrameGuard *frameGuard)
{
pushSegmentAndFrame(frameGuard->regs_, frameGuard);
frameGuard->seg_->setInitialVarObj(initialVarObj);
}
bool
ContextStack::pushDummyFrame(JSContext *cx, JSObject &scopeChain,
DummyFrameGuard *frameGuard)
{
if (!getSegmentAndFrame(cx, 0 /*vplen*/, 0 /*nslots*/, frameGuard))
return false;
StackFrame *fp = frameGuard->fp();
fp->initDummyFrame(cx, scopeChain);
frameGuard->regs_.initDummyFrame(fp);
pushSegmentAndFrame(frameGuard->regs_, frameGuard);
return true;
}
bool
ContextStack::getGeneratorFrame(JSContext *cx, uintN vplen, uintN nslots,
GeneratorFrameGuard *frameGuard)
{
/* The regs will be set by SendToGenerator. */
return getSegmentAndFrame(cx, vplen, nslots, frameGuard);
}
void
ContextStack::pushGeneratorFrame(FrameRegs &regs,
GeneratorFrameGuard *frameGuard)
{
JS_ASSERT(regs.fp() == frameGuard->fp());
JS_ASSERT(regs.fp()->prev() == regs_->fp());
pushSegmentAndFrame(regs, frameGuard);
}
bool
ContextStack::pushInvokeArgsSlow(JSContext *cx, uintN argc,
InvokeArgsGuard *argsGuard)
{
/*
* Either there is no code running on this context or its not at the top of
* the contiguous stack. Either way, push a new empty segment which will
* root the args for invoke and later contain the frame pushed by Invoke.
*/
JS_ASSERT(!isCurrentAndActive());
Value *start = space().firstUnused();
size_t vplen = 2 + argc;
ptrdiff_t nvals = VALUES_PER_STACK_SEGMENT + vplen;
if (!space().ensureSpace(cx, start, nvals))
return false;
StackSegment *seg = new(start) StackSegment;
argsGuard->seg_ = seg;
Value *vp = seg->valueRangeBegin();
ImplicitCast<CallArgs>(*argsGuard) = CallArgsFromVp(argc, vp);
/*
* Use stack override to root vp until the frame is pushed. Don't need to
* MakeRangeGCSafe: the VM stack is conservatively marked.
*/
space().pushSegment(*seg);
space().pushOverride(vp + vplen, &argsGuard->prevOverride_);
argsGuard->stack_ = this;
return true;
}
void
ContextStack::popInvokeArgsSlow(const InvokeArgsGuard &argsGuard)
{
JS_ASSERT(space().currentSegment()->empty());
space().popOverride(argsGuard.prevOverride_);
space().popSegment();
notifyIfNoCodeRunning();
}
void
ContextStack::pushInvokeFrameSlow(InvokeFrameGuard *frameGuard)
{
JS_ASSERT(space().seg_->empty());
pushSegmentAndFrameImpl(frameGuard->regs_, *space().seg_);
frameGuard->stack_ = this;
}
void
ContextStack::popInvokeFrameSlow(const InvokeFrameGuard &frameGuard)
{
JS_ASSERT(frameGuard.regs_.fp() == seg_->initialFrame());
popSegmentAndFrameImpl();
}
/*
* NB: this function can call out and observe the stack (e.g., through GC), so
* it should only be called from a consistent stack state.
*/
void
ContextStack::notifyIfNoCodeRunning()
{
if (regs_)
return;
cx_->resetCompartment();
cx_->maybeMigrateVersionOverride();
}
/*****************************************************************************/
void
FrameRegsIter::initSlow()
{
if (!seg_) {
fp_ = NULL;
sp_ = NULL;
pc_ = NULL;
return;
}
JS_ASSERT(seg_->isSuspended());
fp_ = seg_->suspendedFrame();
sp_ = seg_->suspendedRegs().sp;
pc_ = seg_->suspendedRegs().pc;
}
/*
* Using the invariant described in the js::StackSegment comment, we know that,
* when a pair of prev-linked stack frames are in the same segment, the
* first frame's address is the top of the prev-frame's stack, modulo missing
* arguments.
*/
void
FrameRegsIter::incSlow(StackFrame *oldfp)
{
JS_ASSERT(oldfp == seg_->initialFrame());
JS_ASSERT(fp_ == oldfp->prev());
/*
* Segments from arbitrary context stacks can interleave so we must do a
* linear scan over segments in this context stack. Furthermore, 'prev' can
* be any frame in the segment (not only the suspendedFrame), so we must
* scan each stack frame in each segment. Fortunately, this is not hot code.
*/
seg_ = seg_->previousInContext();
sp_ = seg_->suspendedRegs().sp;
pc_ = seg_->suspendedRegs().pc;
StackFrame *f = seg_->suspendedFrame();
while (f != fp_) {
if (f == seg_->initialFrame()) {
seg_ = seg_->previousInContext();
sp_ = seg_->suspendedRegs().sp;
pc_ = seg_->suspendedRegs().pc;
f = seg_->suspendedFrame();
} else {
sp_ = f->formalArgsEnd();
pc_ = f->prevpc();
f = f->prev();
}
}
}
/*****************************************************************************/
AllFramesIter::AllFramesIter(JSContext *cx)
: seg_(cx->stack.currentSegment()),
fp_(seg_ ? seg_->currentFrame() : NULL)
{
}
AllFramesIter&
AllFramesIter::operator++()
{
JS_ASSERT(!done());
if (fp_ == seg_->initialFrame()) {
seg_ = seg_->previousInMemory();
fp_ = seg_ ? seg_->currentFrame() : NULL;
} else {
fp_ = fp_->prev();
}
return *this;
}

1449
js/src/vm/Stack.h Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -398,13 +398,14 @@ nsXPConnect::Collect()
// cycle collection. So to compensate for JS_BeginRequest in
// XPCCallContext::Init we disable the conservative scanner if that call
// has started the request on this thread.
JS_ASSERT(cx->thread->data.requestDepth >= 1);
JS_ASSERT(!cx->thread->data.conservativeGC.requestThreshold);
if(cx->thread->data.requestDepth == 1)
cx->thread->data.conservativeGC.requestThreshold = 1;
js::ThreadData &threadData = cx->thread()->data;
JS_ASSERT(threadData.requestDepth >= 1);
JS_ASSERT(!threadData.conservativeGC.requestThreshold);
if(threadData.requestDepth == 1)
threadData.conservativeGC.requestThreshold = 1;
JS_GC(cx);
if(cx->thread->data.requestDepth == 1)
cx->thread->data.conservativeGC.requestThreshold = 0;
if(threadData.requestDepth == 1)
threadData.conservativeGC.requestThreshold = 0;
}
NS_IMETHODIMP
@ -2612,7 +2613,7 @@ nsXPConnect::Push(JSContext * cx)
bool runningJS = false;
for (PRUint32 i = 0; i < stack->Length(); ++i) {
JSContext *cx = (*stack)[i].cx;
if (cx && cx->getCurrentSegment()) {
if (cx && !cx->stack.empty()) {
runningJS = true;
break;
}

Просмотреть файл

@ -3626,9 +3626,9 @@ public:
{
if(cx)
{
NS_ASSERTION(cx->thread, "Uh, JS context w/o a thread?");
NS_ASSERTION(cx->thread(), "Uh, JS context w/o a thread?");
if(cx->thread == sMainJSThread)
if(cx->thread() == sMainJSThread)
return sMainThreadData;
}
else if(sMainThreadData && sMainThreadData->mThread == PR_GetCurrentThread())
@ -3733,7 +3733,7 @@ public:
{sMainJSThread = nsnull; sMainThreadData = nsnull;}
static PRBool IsMainThread(JSContext *cx)
{ return cx->thread == sMainJSThread; }
{ return cx->thread() == sMainJSThread; }
private:
XPCPerThreadData();

Просмотреть файл

@ -141,9 +141,10 @@ XPCJSStackFrame::CreateStack(JSContext* cx, JSStackFrame* fp,
{
NS_ADDREF(self);
if(fp->prev())
JSStackFrame *tmp = fp;
if(JSStackFrame *prev = JS_FrameIterator(cx, &tmp))
{
if(NS_FAILED(CreateStack(cx, fp->prev(),
if(NS_FAILED(CreateStack(cx, prev,
(XPCJSStackFrame**) &self->mCaller)))
failed = JS_TRUE;
}

Просмотреть файл

@ -498,7 +498,7 @@ XPCPerThreadData::GetDataImpl(JSContext *cx)
if(cx && !sMainJSThread && NS_IsMainThread())
{
sMainJSThread = cx->thread;
sMainJSThread = cx->thread();
sMainThreadData = data;

Просмотреть файл

@ -575,7 +575,7 @@ GetContextFromObject(JSObject *obj)
if(xpcc)
{
JSContext *cx = xpcc->GetJSContext();
if(cx->thread->id == js_CurrentThreadId())
if(cx->thread()->id == js_CurrentThreadId())
return cx;
}

Просмотреть файл

@ -49,6 +49,7 @@
#include "FilteringWrapper.h"
#include "WrapperFactory.h"
#include "jsfriendapi.h"
#include "jsstr.h"
namespace xpc {
@ -240,13 +241,13 @@ AccessCheck::documentDomainMakesSameOrigin(JSContext *cx, JSObject *obj)
JSStackFrame *fp = nsnull;
JS_FrameIterator(cx, &fp);
if (fp) {
while (fp->isDummyFrame()) {
while (!JS_IsScriptFrame(cx, fp)) {
if (!JS_FrameIterator(cx, &fp))
break;
}
if (fp)
scope = &fp->scopeChain();
scope = JS_GetFrameScopeChainRaw(fp);
}
if (!scope)