Merge tracemonkey to mozilla-central.

This commit is contained in:
Robert Sayre 2010-08-25 12:24:39 -04:00
Родитель 70521ae332 6befcdda29
Коммит 12748c40f6
65 изменённых файлов: 2965 добавлений и 2233 удалений

Просмотреть файл

@ -359,7 +359,7 @@ MozAxAutoPushJSContext::MozAxAutoPushJSContext(JSContext *cx,
// See if there are any scripts on the stack.
// If not, we need to add a dummy frame with a principal.
PRBool hasScript = PR_FALSE;
JSStackFrame* tempFP = cx->fp;
JSStackFrame* tempFP = cx->fp();
while (tempFP)
{
if (tempFP->script)

Просмотреть файл

@ -85,7 +85,7 @@ Narcissus.interpreter = (function() {
x2.callee = x.callee;
x2.scope = x.scope;
try {
x2.execute(parser.parse(new parser.VanillaBuilder, s));
x2.execute(parser.parse(new parser.DefaultBuilder, s));
return x2.result;
} catch (e if e instanceof SyntaxError || isStackOverflow(e)) {
/*
@ -119,7 +119,7 @@ Narcissus.interpreter = (function() {
// NB: Use the STATEMENT_FORM constant since we don't want to push this
// function onto the fake compilation context.
var x = { builder: new parser.VanillaBuilder };
var x = { builder: new parser.DefaultBuilder };
var f = parser.FunctionDefinition(t, x, false, parser.STATEMENT_FORM);
var s = {object: global, parent: null};
return newFunction(f,{scope:s});
@ -1023,7 +1023,7 @@ Narcissus.interpreter = (function() {
return s;
var x = new ExecutionContext(GLOBAL_CODE);
x.execute(parser.parse(new parser.VanillaBuilder, s, f, l));
x.execute(parser.parse(new parser.DefaultBuilder, s, f, l));
return x.result;
}
@ -1059,7 +1059,7 @@ Narcissus.interpreter = (function() {
}
}
var b = new parser.VanillaBuilder;
var b = new parser.DefaultBuilder;
var x = new ExecutionContext(GLOBAL_CODE);
ExecutionContext.current = x;

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -88,11 +88,7 @@ MODULE_OPTIMIZE_FLAGS = -O2 -ip
#XXX: do we want different INTERP_OPTIMIZER flags here?
endif
else # not INTEL_CXX
MODULE_OPTIMIZE_FLAGS = -O3 -fstrict-aliasing $(MOZ_OPTIMIZE_SIZE_TWEAK)
ifeq ($(OS_ARCH),Linux)
#TODO: move this up a line when we fix OS X (bug 517832)
MODULE_OPTIMIZE_FLAGS += -fomit-frame-pointer
endif
MODULE_OPTIMIZE_FLAGS = -O3 -fstrict-aliasing -fomit-frame-pointer $(MOZ_OPTIMIZE_SIZE_TWEAK)
# Special optimization flags for jsinterp.c
INTERP_OPTIMIZER = -O3 -fstrict-aliasing
endif
@ -143,6 +139,7 @@ CPPSRCS = \
jsfun.cpp \
jsgc.cpp \
jsgcchunk.cpp \
jsgcstats.cpp \
jshash.cpp \
jsinterp.cpp \
jsinvoke.cpp \
@ -202,6 +199,7 @@ INSTALLED_HEADERS = \
jsfun.h \
jsgc.h \
jsgcchunk.h \
jsgcstats.h \
jshash.h \
jsinterp.h \
jsinttypes.h \

Просмотреть файл

@ -64,7 +64,7 @@ static void printf_stderr_common(const char* format, ...)
static void printCallSite(const char* file, int line, const char* function)
{
#if WTF_PLATFORM_WIN && !WTF_PLATFORM_WINCE && defined _DEBUG
#if WTF_COMPILER_MSVC && !WTF_PLATFORM_WINCE && defined _DEBUG
_CrtDbgReport(_CRT_WARN, file, line, NULL, "%s\n", function);
#else
printf_stderr_common("(%s:%d %s)\n", file, line, function);

Просмотреть файл

@ -68,6 +68,8 @@ BEGIN_TEST(testContexts_bug561444)
JS_BeginRequest(cx);
{
jsvalRoot v(cx);
JSAutoCrossCompartmentCall crossCall;
crossCall.enter(cx, d->obj);
if (!JS_EvaluateScript(cx, d->obj, d->code, strlen(d->code), __FILE__, __LINE__, v.addr()))
return;
}

Просмотреть файл

@ -1208,6 +1208,20 @@ JS_GetCompartmentPrivate(JSContext *cx, JSCompartment *compartment)
return compartment->data;
}
JS_PUBLIC_API(JSBool)
JS_WrapObject(JSContext *cx, JSObject **objp)
{
CHECK_REQUEST(cx);
return cx->compartment->wrap(cx, objp);
}
JS_PUBLIC_API(JSBool)
JS_WrapValue(JSContext *cx, jsval *vp)
{
CHECK_REQUEST(cx);
return cx->compartment->wrap(cx, Valueify(vp));
}
JS_PUBLIC_API(JSObject *)
JS_GetGlobalObject(JSContext *cx)
{
@ -1822,8 +1836,8 @@ JS_GetGlobalForScopeChain(JSContext *cx)
*/
VOUCH_DOES_NOT_REQUIRE_STACK();
if (cx->fp)
return cx->fp->getScopeChain()->getGlobal();
if (cx->hasfp())
return cx->fp()->getScopeChain()->getGlobal();
JSObject *scope = cx->globalObject;
if (!scope) {
@ -4090,8 +4104,8 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
CHECK_REQUEST(cx);
assertSameCompartment(cx, parent); // XXX no funobj for now
if (!parent) {
if (cx->fp)
parent = js_GetScopeChain(cx, cx->fp);
if (cx->hasfp())
parent = js_GetScopeChain(cx, cx->fp());
if (!parent)
parent = cx->globalObject;
JS_ASSERT(parent);
@ -4298,7 +4312,7 @@ js_generic_native_method_dispatcher(JSContext *cx, JSObject *obj,
if (!ComputeThisFromArgv(cx, argv))
return JS_FALSE;
js_GetTopStackFrame(cx)->setThisValue(argv[-1]);
JS_ASSERT(cx->fp->argv == argv);
JS_ASSERT(cx->fp()->argv == argv);
/* Clear the last parameter in case too few arguments were passed. */
argv[--argc].setUndefined();
@ -4414,6 +4428,10 @@ JS_CompileUCScriptForPrincipals(JSContext *cx, JSObject *obj, JSPrincipals *prin
uint32 tcflags = JS_OPTIONS_TO_TCFLAGS(cx) | TCF_NEED_MUTABLE_SCRIPT;
JSScript *script = Compiler::compileScript(cx, obj, NULL, principals, tcflags,
chars, length, NULL, filename, lineno);
if (script && !js_NewScriptObject(cx, script)) {
js_DestroyScript(cx, script);
script = NULL;
}
LAST_FRAME_CHECKS(cx, script);
return script;
}
@ -4509,11 +4527,15 @@ JS_CompileFile(JSContext *cx, JSObject *obj, const char *filename)
}
}
tcflags = JS_OPTIONS_TO_TCFLAGS(cx);
tcflags = JS_OPTIONS_TO_TCFLAGS(cx) | TCF_NEED_MUTABLE_SCRIPT;
script = Compiler::compileScript(cx, obj, NULL, NULL, tcflags,
NULL, 0, fp, filename, 1);
if (fp != stdin)
fclose(fp);
if (script && !js_NewScriptObject(cx, script)) {
js_DestroyScript(cx, script);
script = NULL;
}
LAST_FRAME_CHECKS(cx, script);
return script;
}
@ -4527,9 +4549,13 @@ JS_CompileFileHandleForPrincipals(JSContext *cx, JSObject *obj, const char *file
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj, principals);
tcflags = JS_OPTIONS_TO_TCFLAGS(cx);
tcflags = JS_OPTIONS_TO_TCFLAGS(cx) | TCF_NEED_MUTABLE_SCRIPT;
script = Compiler::compileScript(cx, obj, NULL, principals, tcflags,
NULL, 0, file, filename, 1);
if (script && !js_NewScriptObject(cx, script)) {
js_DestroyScript(cx, script);
script = NULL;
}
LAST_FRAME_CHECKS(cx, script);
return script;
}
@ -4543,34 +4569,29 @@ JS_CompileFileHandle(JSContext *cx, JSObject *obj, const char *filename, FILE *f
JS_PUBLIC_API(JSObject *)
JS_NewScriptObject(JSContext *cx, JSScript *script)
{
JSObject *obj;
CHECK_REQUEST(cx);
assertSameCompartment(cx, script);
if (!script)
return NewNonFunction<WithProto::Class>(cx, &js_ScriptClass, NULL, NULL);
JS_ASSERT(!script->u.object);
{
AutoScriptRooter root(cx, script);
obj = NewNonFunction<WithProto::Class>(cx, &js_ScriptClass, NULL, NULL);
if (obj) {
obj->setPrivate(script);
script->u.object = obj;
#ifdef CHECK_SCRIPT_OWNER
script->owner = NULL;
#endif
}
}
return obj;
/*
* This function should only ever be applied to JSScripts that had
* script objects allocated for them when they were created, as
* described in the comment for JSScript::u.object.
*/
JS_ASSERT(script->u.object);
return script->u.object;
}
JS_PUBLIC_API(JSObject *)
JS_GetScriptObject(JSScript *script)
{
/*
* This function should only ever be applied to JSScripts that had
* script objects allocated for them when they were created, as
* described in the comment for JSScript::u.object.
*/
JS_ASSERT(script->u.object);
return script->u.object;
}
@ -4578,8 +4599,17 @@ JS_PUBLIC_API(void)
JS_DestroyScript(JSContext *cx, JSScript *script)
{
CHECK_REQUEST(cx);
assertSameCompartment(cx, script);
js_DestroyScript(cx, script);
/*
* Originally, JSScript lifetimes were managed explicitly, and this function
* was used to free a JSScript. Now, this function does nothing, and the
* garbage collector manages JSScripts; you must root the JSScript's script
* object (obtained via JS_GetScriptObject) to keep it alive.
*
* However, since the script objects have taken over this responsibility, it
* follows that every script passed here must have a script object.
*/
JS_ASSERT(script->u.object);
}
JS_PUBLIC_API(JSFunction *)
@ -4741,6 +4771,8 @@ JS_ExecuteScript(JSContext *cx, JSObject *obj, JSScript *script, jsval *rval)
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj, script);
/* This should receive only scripts handed out via the JSAPI. */
JS_ASSERT(script == JSScript::emptyScript() || script->u.object);
ok = Execute(cx, obj, script, NULL, 0, Valueify(rval));
LAST_FRAME_CHECKS(cx, ok);
return ok;
@ -4768,7 +4800,7 @@ JS_EvaluateUCScriptForPrincipals(JSContext *cx, JSObject *obj,
}
ok = Execute(cx, obj, script, NULL, 0, Valueify(rval));
LAST_FRAME_CHECKS(cx, ok);
JS_DestroyScript(cx, script);
js_DestroyScript(cx, script);
return ok;
}
@ -4924,9 +4956,9 @@ JS_IsRunning(JSContext *cx)
VOUCH_DOES_NOT_REQUIRE_STACK();
#ifdef JS_TRACER
JS_ASSERT_IF(JS_TRACE_MONITOR(cx).tracecx == cx, cx->fp);
JS_ASSERT_IF(JS_TRACE_MONITOR(cx).tracecx == cx, cx->hasfp());
#endif
JSStackFrame *fp = cx->fp;
JSStackFrame *fp = cx->maybefp();
while (fp && fp->isDummyFrame())
fp = fp->down;
return fp != NULL;
@ -4954,7 +4986,7 @@ JS_RestoreFrameChain(JSContext *cx, JSStackFrame *fp)
{
CHECK_REQUEST(cx);
JS_ASSERT_NOT_ON_TRACE(cx);
JS_ASSERT(!cx->fp);
JS_ASSERT(!cx->hasfp());
if (!fp)
return;
cx->restoreSegment();

Просмотреть файл

@ -941,6 +941,12 @@ JS_SetCompartmentPrivate(JSContext *cx, JSCompartment *compartment, void *data);
extern JS_PUBLIC_API(void *)
JS_GetCompartmentPrivate(JSContext *cx, JSCompartment *compartment);
extern JS_PUBLIC_API(JSBool)
JS_RewrapObject(JSContext *cx, JSObject **objp);
extern JS_PUBLIC_API(JSBool)
JS_RewrapValue(JSContext *cx, jsval *p);
#ifdef __cplusplus
JS_END_EXTERN_C

Просмотреть файл

@ -335,8 +335,8 @@ JS_DEFINE_CALLINFO_4(extern, OBJECT, js_NewNullClosure, CONTEXT, OBJECT, OBJECT,
JS_REQUIRES_STACK JSBool FASTCALL
js_PopInterpFrame(JSContext* cx, TracerState* state)
{
JS_ASSERT(cx->fp && cx->fp->down);
JSStackFrame* const fp = cx->fp;
JS_ASSERT(cx->hasfp() && cx->fp()->down);
JSStackFrame* const fp = cx->fp();
/*
* Mirror frame popping code from inline_return in js_Interpret. There are

Просмотреть файл

@ -110,10 +110,12 @@ StackSegment::contains(const JSStackFrame *fp) const
JSStackFrame *start;
JSStackFrame *stop;
if (isActive()) {
start = cx->fp;
JS_ASSERT(cx->hasfp());
start = cx->fp();
stop = cx->activeSegment()->initialFrame->down;
} else {
start = suspendedFrame;
JS_ASSERT(suspendedRegs && suspendedRegs->fp);
start = suspendedRegs->fp;
stop = initialFrame->down;
}
for (JSStackFrame *f = start; f != stop; f = f->down) {
@ -286,7 +288,7 @@ StackSpace::popSegmentForInvoke(const InvokeArgsGuard &ag)
JS_REQUIRES_STACK bool
StackSpace::getExecuteFrame(JSContext *cx, JSStackFrame *down,
uintN vplen, uintN nfixed,
ExecuteFrameGuard &fg) const
FrameGuard &fg) const
{
Value *start = firstUnused();
ptrdiff_t nvals = VALUES_PER_STACK_SEGMENT + vplen + VALUES_PER_STACK_FRAME + nfixed;
@ -301,20 +303,22 @@ StackSpace::getExecuteFrame(JSContext *cx, JSStackFrame *down,
}
JS_REQUIRES_STACK void
StackSpace::pushExecuteFrame(JSContext *cx, ExecuteFrameGuard &fg,
StackSpace::pushExecuteFrame(JSContext *cx, FrameGuard &fg,
JSFrameRegs &regs, JSObject *initialVarObj)
{
fg.fp->down = fg.down;
StackSegment *seg = fg.seg;
seg->setPreviousInMemory(currentSegment);
currentSegment = seg;
cx->pushSegmentAndFrame(seg, fg.fp, regs);
regs.fp = fg.fp;
cx->pushSegmentAndFrame(seg, regs);
seg->setInitialVarObj(initialVarObj);
fg.cx = cx;
}
JS_REQUIRES_STACK void
StackSpace::popExecuteFrame(JSContext *cx)
StackSpace::popFrame(JSContext *cx)
{
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->hasActiveSegment());
@ -323,13 +327,13 @@ StackSpace::popExecuteFrame(JSContext *cx)
}
JS_REQUIRES_STACK
ExecuteFrameGuard::~ExecuteFrameGuard()
FrameGuard::~FrameGuard()
{
if (!pushed())
return;
JS_ASSERT(cx->activeSegment() == seg);
JS_ASSERT(cx->fp == fp);
cx->stack().popExecuteFrame(cx);
JS_ASSERT(cx->maybefp() == fp);
cx->stack().popFrame(cx);
}
JS_REQUIRES_STACK void
@ -342,14 +346,13 @@ StackSpace::getSynthesizedSlowNativeFrame(JSContext *cx, StackSegment *&seg, JSS
}
JS_REQUIRES_STACK void
StackSpace::pushSynthesizedSlowNativeFrame(JSContext *cx, StackSegment *seg, JSStackFrame *fp,
JSFrameRegs &regs)
StackSpace::pushSynthesizedSlowNativeFrame(JSContext *cx, StackSegment *seg, JSFrameRegs &regs)
{
JS_ASSERT(!fp->hasScript() && FUN_SLOW_NATIVE(fp->getFunction()));
fp->down = cx->fp;
JS_ASSERT(!regs.fp->hasScript() && FUN_SLOW_NATIVE(regs.fp->getFunction()));
regs.fp->down = cx->maybefp();
seg->setPreviousInMemory(currentSegment);
currentSegment = seg;
cx->pushSegmentAndFrame(seg, fp, regs);
cx->pushSegmentAndFrame(seg, regs);
seg->setInitialVarObj(NULL);
}
@ -358,12 +361,30 @@ StackSpace::popSynthesizedSlowNativeFrame(JSContext *cx)
{
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->hasActiveSegment());
JS_ASSERT(currentSegment->getInitialFrame() == cx->fp);
JS_ASSERT(!cx->fp->hasScript() && FUN_SLOW_NATIVE(cx->fp->getFunction()));
JS_ASSERT(currentSegment->getInitialFrame() == cx->fp());
JS_ASSERT(!cx->fp()->hasScript() && FUN_SLOW_NATIVE(cx->fp()->getFunction()));
cx->popSegmentAndFrame();
currentSegment = currentSegment->getPreviousInMemory();
}
JS_REQUIRES_STACK bool
StackSpace::pushDummyFrame(JSContext *cx, FrameGuard &fg, JSFrameRegs &regs, JSObject *scopeChain)
{
if (!getExecuteFrame(cx, cx->maybefp(), 0, 0, fg))
return false;
JSStackFrame *fp = fg.getFrame();
PodZero(fp);
fp->setScopeChain(scopeChain);
fp->flags = JSFRAME_DUMMY;
regs.pc = NULL;
regs.sp = fp->slots();
pushExecuteFrame(cx, fg, regs, NULL);
return true;
}
void
FrameRegsIter::initSlow()
{
@ -1893,7 +1914,7 @@ js_GetCurrentBytecodePC(JSContext* cx)
pc = cx->regs ? cx->regs->pc : NULL;
if (!pc)
return NULL;
imacpc = cx->fp->maybeIMacroPC();
imacpc = cx->fp()->maybeIMacroPC();
}
/*
@ -1911,7 +1932,7 @@ js_CurrentPCIsInImacro(JSContext *cx)
VOUCH_DOES_NOT_REQUIRE_STACK();
if (JS_ON_TRACE(cx))
return cx->bailExit->imacpc != NULL;
return cx->fp->hasIMacroPC();
return cx->fp()->hasIMacroPC();
#else
return false;
#endif
@ -1956,54 +1977,48 @@ DSTOffsetCache::DSTOffsetCache()
JSContext::JSContext(JSRuntime *rt)
: runtime(rt),
compartment(rt->defaultCompartment),
fp(NULL),
regs(NULL),
regExpStatics(this),
busyArrays(this)
{}
void
JSContext::pushSegmentAndFrame(js::StackSegment *newseg, JSStackFrame *newfp,
JSFrameRegs &newregs)
JSContext::pushSegmentAndFrame(js::StackSegment *newseg, JSFrameRegs &newregs)
{
if (hasActiveSegment()) {
JS_ASSERT(fp->savedPC == JSStackFrame::sInvalidPC);
fp->savedPC = regs->pc;
currentSegment->suspend(fp, regs);
JS_ASSERT(regs->fp->savedPC == JSStackFrame::sInvalidPC);
regs->fp->savedPC = regs->pc;
currentSegment->suspend(regs);
}
newseg->setPreviousInContext(currentSegment);
currentSegment = newseg;
#ifdef DEBUG
newfp->savedPC = JSStackFrame::sInvalidPC;
newregs.fp->savedPC = JSStackFrame::sInvalidPC;
#endif
setCurrentFrame(newfp);
setCurrentRegs(&newregs);
newseg->joinContext(this, newfp);
newseg->joinContext(this, newregs.fp);
}
void
JSContext::popSegmentAndFrame()
{
JS_ASSERT(currentSegment->maybeContext() == this);
JS_ASSERT(currentSegment->getInitialFrame() == fp);
JS_ASSERT(fp->savedPC == JSStackFrame::sInvalidPC);
JS_ASSERT(currentSegment->getInitialFrame() == regs->fp);
JS_ASSERT(regs->fp->savedPC == JSStackFrame::sInvalidPC);
currentSegment->leaveContext();
currentSegment = currentSegment->getPreviousInContext();
if (currentSegment) {
if (currentSegment->isSaved()) {
setCurrentFrame(NULL);
setCurrentRegs(NULL);
} else {
setCurrentFrame(currentSegment->getSuspendedFrame());
setCurrentRegs(currentSegment->getSuspendedRegs());
currentSegment->resume();
#ifdef DEBUG
fp->savedPC = JSStackFrame::sInvalidPC;
regs->fp->savedPC = JSStackFrame::sInvalidPC;
#endif
}
} else {
JS_ASSERT(fp->down == NULL);
setCurrentFrame(NULL);
JS_ASSERT(regs->fp->down == NULL);
setCurrentRegs(NULL);
}
}
@ -2012,10 +2027,9 @@ void
JSContext::saveActiveSegment()
{
JS_ASSERT(hasActiveSegment());
currentSegment->save(fp, regs);
JS_ASSERT(fp->savedPC == JSStackFrame::sInvalidPC);
fp->savedPC = regs->pc;
setCurrentFrame(NULL);
currentSegment->save(regs);
JS_ASSERT(regs->fp->savedPC == JSStackFrame::sInvalidPC);
regs->fp->savedPC = regs->pc;
setCurrentRegs(NULL);
}
@ -2023,11 +2037,10 @@ void
JSContext::restoreSegment()
{
js::StackSegment *ccs = currentSegment;
setCurrentFrame(ccs->getSuspendedFrame());
setCurrentRegs(ccs->getSuspendedRegs());
ccs->restore();
#ifdef DEBUG
fp->savedPC = JSStackFrame::sInvalidPC;
regs->fp->savedPC = JSStackFrame::sInvalidPC;
#endif
}
@ -2058,10 +2071,11 @@ JSContext::containingSegment(const JSStackFrame *target)
if (!seg)
return NULL;
/* The active segments's top frame is cx->fp. */
if (fp) {
/* The active segments's top frame is cx->regs->fp. */
if (regs) {
JS_ASSERT(regs->fp);
JS_ASSERT(activeSegment() == seg);
JSStackFrame *f = fp;
JSStackFrame *f = regs->fp;
JSStackFrame *stop = seg->getInitialFrame()->down;
for (; f != stop; f = f->down) {
if (f == target)

Просмотреть файл

@ -266,16 +266,16 @@ struct GlobalState {
* The frames of a non-empty segment must all be in the same context and thus
* each non-empty segment is referred to as being "in" a context. Segments in a
* context have an additional state of being either "active" or "suspended". A
* suspended segment |ss| has a "suspended frame" which is snapshot of |cx->fp|
* suspended segment |ss| has a "suspended frame" which is snapshot of |cx->regs|
* when the segment was suspended and serves as the current frame of |ss|.
* There is at most one active segment in a given context. Segments in a
* context execute LIFO and are maintained in a stack. The top of this stack
* is the context's "current segment". If a context |cx| has an active segment
* |ss|, then:
* 1. |ss| is |cx|'s current segment,
* 2. |cx->fp != NULL|, and
* 3. |ss|'s current frame is |cx->fp|.
* Moreover, |cx->fp != NULL| iff |cx| has an active segment.
* 2. |cx->regs != NULL|, and
* 3. |ss|'s current frame is |cx->regs->fp|.
* Moreover, |cx->regs != NULL| iff |cx| has an active segment.
*
* An empty segment is not associated with any context. Empty segments are
* created when there is not an active segment for a context at the top of the
@ -306,9 +306,6 @@ class StackSegment
/* The first frame executed in this segment. null iff cx is null */
JSStackFrame *initialFrame;
/* If this segment is suspended, the top of the segment. */
JSStackFrame *suspendedFrame;
/* If this segment is suspended, |cx->regs| when it was suspended. */
JSFrameRegs *suspendedRegs;
@ -318,17 +315,22 @@ class StackSegment
/* Whether this segment was suspended by JS_SaveFrameChain. */
bool saved;
/* Align at 8 bytes on all platforms. */
#if JS_BITS_PER_WORD == 32
void *padding;
#endif
/*
* To make isActive a single null-ness check, this non-null constant is
* assigned to suspendedFrame when !inContext.
* assigned to suspendedRegs when !inContext.
*/
#define NON_NULL_SUSPENDED_FRAME ((JSStackFrame *)0x1)
#define NON_NULL_SUSPENDED_REGS ((JSFrameRegs *)0x1)
public:
StackSegment()
: cx(NULL), previousInContext(NULL), previousInMemory(NULL),
initialFrame(NULL), suspendedFrame(NON_NULL_SUSPENDED_FRAME),
suspendedRegs(NULL), initialVarObj(NULL), saved(false)
initialFrame(NULL), suspendedRegs(NON_NULL_SUSPENDED_REGS),
initialVarObj(NULL), saved(false)
{
JS_ASSERT(!inContext());
}
@ -356,20 +358,20 @@ class StackSegment
bool inContext() const {
JS_ASSERT(!!cx == !!initialFrame);
JS_ASSERT_IF(!cx, suspendedFrame == NON_NULL_SUSPENDED_FRAME && !saved);
JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS && !saved);
return cx;
}
bool isActive() const {
JS_ASSERT_IF(!suspendedFrame, cx && !saved);
JS_ASSERT_IF(!cx, suspendedFrame == NON_NULL_SUSPENDED_FRAME);
return !suspendedFrame;
JS_ASSERT_IF(!suspendedRegs, cx && !saved);
JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS);
return !suspendedRegs;
}
bool isSuspended() const {
JS_ASSERT_IF(!cx || !suspendedFrame, !saved);
JS_ASSERT_IF(!cx, suspendedFrame == NON_NULL_SUSPENDED_FRAME);
return cx && suspendedFrame;
JS_ASSERT_IF(!cx || !suspendedRegs, !saved);
JS_ASSERT_IF(!cx, suspendedRegs == NON_NULL_SUSPENDED_REGS);
return cx && suspendedRegs;
}
/* Substate of suspended, queryable in any state. */
@ -385,7 +387,7 @@ class StackSegment
JS_ASSERT(!inContext());
this->cx = cx;
initialFrame = f;
suspendedFrame = NULL;
suspendedRegs = NULL;
JS_ASSERT(isActive());
}
@ -393,7 +395,7 @@ class StackSegment
JS_ASSERT(isActive());
this->cx = NULL;
initialFrame = NULL;
suspendedFrame = NON_NULL_SUSPENDED_FRAME;
suspendedRegs = NON_NULL_SUSPENDED_REGS;
JS_ASSERT(!inContext());
}
@ -401,29 +403,28 @@ class StackSegment
return cx;
}
#undef NON_NULL_SUSPENDED_FRAME
#undef NON_NULL_SUSPENDED_REGS
/* Transitioning between isActive <--> isSuspended */
void suspend(JSStackFrame *fp, JSFrameRegs *regs) {
void suspend(JSFrameRegs *regs) {
JS_ASSERT(isActive());
JS_ASSERT(fp && contains(fp));
suspendedFrame = fp;
JS_ASSERT(isSuspended());
JS_ASSERT(regs && regs->fp && contains(regs->fp));
suspendedRegs = regs;
JS_ASSERT(isSuspended());
}
void resume() {
JS_ASSERT(isSuspended());
suspendedFrame = NULL;
suspendedRegs = NULL;
JS_ASSERT(isActive());
}
/* When isSuspended, transitioning isSaved <--> !isSaved */
void save(JSStackFrame *fp, JSFrameRegs *regs) {
void save(JSFrameRegs *regs) {
JS_ASSERT(!isSuspended());
suspend(fp, regs);
suspend(regs);
saved = true;
JS_ASSERT(isSaved());
}
@ -442,21 +443,20 @@ class StackSegment
return initialFrame;
}
inline JSStackFrame *getCurrentFrame() const;
inline JSFrameRegs *getCurrentRegs() const;
inline JSStackFrame *getCurrentFrame() const;
/* Data available when isSuspended. */
JSStackFrame *getSuspendedFrame() const {
JS_ASSERT(isSuspended());
return suspendedFrame;
}
JSFrameRegs *getSuspendedRegs() const {
JS_ASSERT(isSuspended());
return suspendedRegs;
}
JSStackFrame *getSuspendedFrame() const {
return suspendedRegs->fp;
}
/* JSContext / js::StackSpace bookkeeping. */
void setPreviousInContext(StackSegment *seg) {
@ -525,19 +525,17 @@ class InvokeFrameGuard
{
friend class StackSpace;
JSContext *cx; /* null implies nothing pushed */
JSStackFrame *fp;
JSFrameRegs regs;
JSFrameRegs *prevRegs;
public:
InvokeFrameGuard() : cx(NULL), fp(NULL) {}
InvokeFrameGuard() : cx(NULL) {}
JS_REQUIRES_STACK ~InvokeFrameGuard();
bool pushed() const { return cx != NULL; }
JSStackFrame *getFrame() { return fp; }
JSFrameRegs &getRegs() { return regs; }
};
/* See StackSpace::pushExecuteFrame. */
class ExecuteFrameGuard
class FrameGuard
{
friend class StackSpace;
JSContext *cx; /* null implies nothing pushed */
@ -546,8 +544,8 @@ class ExecuteFrameGuard
JSStackFrame *fp;
JSStackFrame *down;
public:
ExecuteFrameGuard() : cx(NULL), vp(NULL), fp(NULL) {}
JS_REQUIRES_STACK ~ExecuteFrameGuard();
FrameGuard() : cx(NULL), vp(NULL), fp(NULL) {}
JS_REQUIRES_STACK ~FrameGuard();
bool pushed() const { return cx != NULL; }
Value *getvp() const { return vp; }
JSStackFrame *getFrame() const { return fp; }
@ -563,8 +561,9 @@ class ExecuteFrameGuard
* than explicitly stored as pointers. To maintain useful invariants, stack
* space is not given out arbitrarily, but rather allocated/deallocated for
* specific purposes. The use cases currently supported are: calling a function
* with arguments (e.g. Invoke), executing a script (e.g. Execute) and inline
* interpreter calls. See associated member functions below.
* with arguments (e.g. Invoke), executing a script (e.g. Execute), inline
* interpreter calls, and pushing "dummy" frames for bookkeeping purposes. See
* associated member functions below.
*
* First, we consider the layout of individual segments. (See the
* js::StackSegment comment for terminology.) A non-empty segment (i.e., a
@ -652,8 +651,8 @@ class StackSpace
JS_REQUIRES_STACK inline void popInvokeArgs(const InvokeArgsGuard &args);
friend class InvokeFrameGuard;
JS_REQUIRES_STACK void popInvokeFrame(const InvokeFrameGuard &ag);
friend class ExecuteFrameGuard;
JS_REQUIRES_STACK void popExecuteFrame(JSContext *cx);
friend class FrameGuard;
JS_REQUIRES_STACK void popFrame(JSContext *cx);
/* Return a pointer to the first unused slot. */
JS_REQUIRES_STACK
@ -711,7 +710,7 @@ class StackSpace
JS_REQUIRES_STACK void mark(JSTracer *trc);
/*
* For all three use cases below:
* For all four use cases below:
* - The boolean-valued functions call js_ReportOutOfScriptQuota on OOM.
* - The "get*Frame" functions do not change any global state, they just
* check OOM and return pointers to an uninitialized frame with the
@ -751,9 +750,9 @@ class StackSpace
JS_REQUIRES_STACK
bool getExecuteFrame(JSContext *cx, JSStackFrame *down,
uintN vplen, uintN nfixed,
ExecuteFrameGuard &fg) const;
FrameGuard &fg) const;
JS_REQUIRES_STACK
void pushExecuteFrame(JSContext *cx, ExecuteFrameGuard &fg,
void pushExecuteFrame(JSContext *cx, FrameGuard &fg,
JSFrameRegs &regs, JSObject *initialVarObj);
/*
@ -779,11 +778,16 @@ class StackSpace
void getSynthesizedSlowNativeFrame(JSContext *cx, StackSegment *&seg, JSStackFrame *&fp);
JS_REQUIRES_STACK
void pushSynthesizedSlowNativeFrame(JSContext *cx, StackSegment *seg, JSStackFrame *fp,
JSFrameRegs &regs);
void pushSynthesizedSlowNativeFrame(JSContext *cx, StackSegment *seg, JSFrameRegs &regs);
JS_REQUIRES_STACK
void popSynthesizedSlowNativeFrame(JSContext *cx);
/*
* For pushing a bookkeeping frame.
*/
JS_REQUIRES_STACK
bool pushDummyFrame(JSContext *cx, FrameGuard &fg, JSFrameRegs &regs, JSObject *scopeChain);
};
JS_STATIC_ASSERT(StackSpace::CAPACITY_VALS % StackSpace::COMMIT_VALS == 0);
@ -1612,6 +1616,7 @@ struct JSRuntime {
#ifdef JS_GCMETER
JSGCStats gcStats;
JSGCArenaStats gcArenaStats[FINALIZE_LIMIT];
#endif
#ifdef DEBUG
@ -1887,26 +1892,32 @@ struct JSContext
/* GC heap compartment. */
JSCompartment *compartment;
/* Currently executing frame, set by stack operations. */
JS_REQUIRES_STACK
JSStackFrame *fp;
/*
* Currently executing frame's regs, set by stack operations.
* |fp != NULL| iff |regs != NULL| (although regs->pc can be NULL)
*/
/* Currently executing frame and regs, set by stack operations. */
JS_REQUIRES_STACK
JSFrameRegs *regs;
/* Current frame accessors. */
JSStackFrame* fp() {
JS_ASSERT(regs && regs->fp);
return regs->fp;
}
JSStackFrame* maybefp() {
JS_ASSERT_IF(regs, regs->fp);
return regs ? regs->fp : NULL;
}
bool hasfp() {
JS_ASSERT_IF(regs, regs->fp);
return !!regs;
}
private:
friend class js::StackSpace;
friend bool js::Interpret(JSContext *);
/* 'fp' and 'regs' must only be changed by calling these functions. */
void setCurrentFrame(JSStackFrame *fp) {
this->fp = fp;
}
/* 'regs' must only be changed by calling this function. */
void setCurrentRegs(JSFrameRegs *regs) {
this->regs = regs;
}
@ -1958,7 +1969,7 @@ struct JSContext
public:
void assertSegmentsInSync() const {
#ifdef DEBUG
if (fp) {
if (regs) {
JS_ASSERT(currentSegment->isActive());
if (js::StackSegment *prev = currentSegment->getPreviousInContext())
JS_ASSERT(!prev->isActive());
@ -1971,7 +1982,7 @@ struct JSContext
/* Return whether this context has an active segment. */
bool hasActiveSegment() const {
assertSegmentsInSync();
return !!fp;
return !!regs;
}
/* Assuming there is an active segment, return it. */
@ -1987,8 +1998,7 @@ struct JSContext
}
/* Add the given segment to the list as the new active segment. */
void pushSegmentAndFrame(js::StackSegment *newseg, JSStackFrame *newfp,
JSFrameRegs &regs);
void pushSegmentAndFrame(js::StackSegment *newseg, JSFrameRegs &regs);
/* Remove the active segment and make the next segment active. */
void popSegmentAndFrame();
@ -2009,7 +2019,7 @@ struct JSContext
* Search the call stack for the nearest frame with static level targetLevel.
*/
JSStackFrame *findFrameAtLevel(uintN targetLevel) {
JSStackFrame *fp = this->fp;
JSStackFrame *fp = this->regs->fp;
while (true) {
JS_ASSERT(fp && fp->hasScript());
if (fp->getScript()->staticLevel == targetLevel)
@ -2268,8 +2278,8 @@ struct JSContext
#ifdef DEBUG
void assertValidStackDepth(uintN depth) {
JS_ASSERT(0 <= regs->sp - fp->base());
JS_ASSERT(depth <= uintptr_t(regs->sp - fp->base()));
JS_ASSERT(0 <= regs->sp - regs->fp->base());
JS_ASSERT(depth <= uintptr_t(regs->sp - regs->fp->base()));
}
#else
void assertValidStackDepth(uintN /*depth*/) {}
@ -2309,8 +2319,8 @@ JSStackFrame::varobj(JSContext *cx) const
JS_ALWAYS_INLINE jsbytecode *
JSStackFrame::pc(JSContext *cx) const
{
JS_ASSERT(cx->containingSegment(this) != NULL);
return (cx->fp == this) ? cx->regs->pc : savedPC;
JS_ASSERT(cx->regs && cx->containingSegment(this) != NULL);
return (cx->regs->fp == this) ? cx->regs->pc : savedPC;
}
#ifdef JS_THREADSAFE
@ -3187,8 +3197,8 @@ SetPendingException(JSContext *cx, const Value &v);
} /* namespace js */
/*
* Get the current cx->fp, first lazily instantiating stack frames if needed.
* (Do not access cx->fp directly except in JS_REQUIRES_STACK code.)
* Get the current frame, first lazily instantiating stack frames if needed.
* (Do not access cx->fp() directly except in JS_REQUIRES_STACK code.)
*
* Defined in jstracer.cpp if JS_TRACER is defined.
*/
@ -3196,7 +3206,7 @@ static JS_FORCES_STACK JS_INLINE JSStackFrame *
js_GetTopStackFrame(JSContext *cx)
{
js::LeaveTrace(cx);
return cx->fp;
return cx->maybefp();
}
static JS_INLINE JSBool

Просмотреть файл

@ -56,13 +56,6 @@ JSContext::ensureGeneratorStackSpace()
namespace js {
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
StackSegment::getCurrentFrame() const
{
JS_ASSERT(inContext());
return isActive() ? cx->fp : getSuspendedFrame();
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSFrameRegs *
StackSegment::getCurrentRegs() const
{
@ -70,6 +63,12 @@ StackSegment::getCurrentRegs() const
return isActive() ? cx->regs : getSuspendedRegs();
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
StackSegment::getCurrentFrame() const
{
return getCurrentRegs()->fp;
}
JS_REQUIRES_STACK inline Value *
StackSpace::firstUnused() const
{
@ -82,7 +81,8 @@ StackSpace::firstUnused() const
Value *sp = seg->getCurrentRegs()->sp;
if (invokeArgEnd > sp) {
JS_ASSERT(invokeSegment == currentSegment);
JS_ASSERT_IF(seg->maybeContext()->fp, invokeFrame == seg->maybeContext()->fp);
JS_ASSERT_IF(seg->maybeContext()->hasfp(),
invokeFrame == seg->maybeContext()->fp());
return invokeArgEnd;
}
return sp;
@ -169,7 +169,7 @@ StackSpace::pushInvokeArgs(JSContext *cx, uintN argc, InvokeArgsGuard &ag)
ag.prevInvokeSegment = invokeSegment;
invokeSegment = currentSegment;
ag.prevInvokeFrame = invokeFrame;
invokeFrame = cx->fp;
invokeFrame = cx->maybefp();
#endif
ag.cx = cx;
@ -188,7 +188,7 @@ StackSpace::popInvokeArgs(const InvokeArgsGuard &ag)
JS_ASSERT(isCurrentAndActive(ag.cx));
JS_ASSERT(invokeSegment == currentSegment);
JS_ASSERT(invokeFrame == ag.cx->fp);
JS_ASSERT(invokeFrame == ag.cx->maybefp());
JS_ASSERT(invokeArgEnd == ag.argv() + ag.argc());
#ifdef DEBUG
@ -217,7 +217,7 @@ StackSpace::getInvokeFrame(JSContext *cx, const CallArgs &args,
ptrdiff_t nvals = nmissing + VALUES_PER_STACK_FRAME + nfixed;
if (!ensureSpace(cx, start, nvals))
return false;
fg.fp = reinterpret_cast<JSStackFrame *>(start + nmissing);
fg.regs.fp = reinterpret_cast<JSStackFrame *>(start + nmissing);
return true;
}
@ -227,18 +227,17 @@ StackSpace::pushInvokeFrame(JSContext *cx, const CallArgs &args,
{
JS_ASSERT(firstUnused() == args.argv() + args.argc());
JSStackFrame *fp = fg.fp;
JSStackFrame *down = cx->fp;
JSStackFrame *fp = fg.regs.fp;
JSStackFrame *down = cx->maybefp();
fp->down = down;
if (JS_UNLIKELY(!currentSegment->inContext())) {
cx->pushSegmentAndFrame(currentSegment, fp, fg.regs);
cx->pushSegmentAndFrame(currentSegment, fg.regs);
} else {
#ifdef DEBUG
fp->savedPC = JSStackFrame::sInvalidPC;
JS_ASSERT(down->savedPC == JSStackFrame::sInvalidPC);
#endif
down->savedPC = cx->regs->pc;
cx->setCurrentFrame(fp);
fg.prevRegs = cx->regs;
cx->setCurrentRegs(&fg.regs);
}
@ -251,18 +250,17 @@ JS_REQUIRES_STACK JS_ALWAYS_INLINE void
StackSpace::popInvokeFrame(const InvokeFrameGuard &fg)
{
JSContext *cx = fg.cx;
JSStackFrame *fp = fg.fp;
JSStackFrame *fp = fg.regs.fp;
JS_ASSERT(isCurrentAndActive(cx));
if (JS_UNLIKELY(currentSegment->getInitialFrame() == fp)) {
cx->popSegmentAndFrame();
} else {
JS_ASSERT(fp == cx->fp);
JS_ASSERT(&fg.regs == cx->regs);
cx->setCurrentFrame(fp->down);
JS_ASSERT(fp->down == fg.prevRegs->fp);
cx->setCurrentRegs(fg.prevRegs);
#ifdef DEBUG
cx->fp->savedPC = JSStackFrame::sInvalidPC;
cx->fp()->savedPC = JSStackFrame::sInvalidPC;
#endif
}
}
@ -296,14 +294,13 @@ StackSpace::pushInlineFrame(JSContext *cx, JSStackFrame *fp, jsbytecode *pc,
JSStackFrame *newfp)
{
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->fp == fp && cx->regs->pc == pc);
JS_ASSERT(cx->regs->fp == fp && cx->regs->pc == pc);
fp->savedPC = pc;
newfp->down = fp;
#ifdef DEBUG
newfp->savedPC = JSStackFrame::sInvalidPC;
#endif
cx->setCurrentFrame(newfp);
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
@ -311,26 +308,28 @@ StackSpace::popInlineFrame(JSContext *cx, JSStackFrame *up, JSStackFrame *down)
{
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->hasActiveSegment());
JS_ASSERT(cx->fp == up && up->down == down);
JS_ASSERT(cx->regs->fp == up && up->down == down);
JS_ASSERT(up->savedPC == JSStackFrame::sInvalidPC);
JS_ASSERT(!up->hasIMacroPC());
JSFrameRegs *regs = cx->regs;
regs->fp = down;
regs->pc = down->savedPC;
#ifdef DEBUG
down->savedPC = JSStackFrame::sInvalidPC;
#endif
cx->setCurrentFrame(down);
}
JS_REQUIRES_STACK inline
FrameRegsIter::FrameRegsIter(JSContext *cx)
{
curseg = cx->getCurrentSegment();
if (JS_UNLIKELY(!curseg || !curseg->isActive()))
if (JS_UNLIKELY(!curseg || !curseg->isActive())) {
initSlow();
JS_ASSERT(cx->fp);
curfp = cx->fp;
return;
}
JS_ASSERT(cx->regs->fp);
curfp = cx->regs->fp;
cursp = cx->regs->sp;
curpc = cx->regs->pc;
return;
@ -408,7 +407,7 @@ class CompartmentChecker
public:
explicit CompartmentChecker(JSContext *cx) : context(cx), compartment(cx->compartment) {
check(cx->fp ? JS_GetGlobalForScopeChain(cx) : cx->globalObject);
check(cx->hasfp() ? JS_GetGlobalForScopeChain(cx) : cx->globalObject);
VOUCH_DOES_NOT_REQUIRE_STACK();
}

Просмотреть файл

@ -1466,43 +1466,15 @@ JS_PutPropertyDescArray(JSContext *cx, JSPropertyDescArray *pda)
/************************************************************************/
static bool
SetupFakeFrame(JSContext *cx, ExecuteFrameGuard &frame, JSFrameRegs &regs, JSObject *scopeobj)
{
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, scopeobj);
JS_ASSERT(fun->minArgs() == 0 && !fun->isInterpreted() && fun->u.n.extra == 0);
const uintN vplen = 2;
const uintN nfixed = 0;
if (!cx->stack().getExecuteFrame(cx, js_GetTopStackFrame(cx), vplen, nfixed, frame))
return false;
Value *vp = frame.getvp();
PodZero(vp, vplen);
vp[0].setObject(*scopeobj);
vp[1].setNull(); // satisfy LeaveTree assert
JSStackFrame *fp = frame.getFrame();
PodZero(fp);
fp->setFunction(fun);
fp->argv = vp + 2;
fp->setScopeChain(scopeobj->getGlobal());
regs.pc = NULL;
regs.sp = fp->slots();
cx->stack().pushExecuteFrame(cx, frame, regs, NULL);
return true;
}
JS_FRIEND_API(JSBool)
js_GetPropertyByIdWithFakeFrame(JSContext *cx, JSObject *obj, JSObject *scopeobj, jsid id,
jsval *vp)
{
ExecuteFrameGuard frame;
JSFrameRegs regs;
JS_ASSERT(scopeobj->isGlobal());
if (!SetupFakeFrame(cx, frame, regs, scopeobj))
JSFrameRegs regs;
FrameGuard frame;
if (!cx->stack().pushDummyFrame(cx, frame, regs, scopeobj))
return false;
bool ok = JS_GetPropertyById(cx, obj, id, vp);
@ -1514,10 +1486,11 @@ JS_FRIEND_API(JSBool)
js_SetPropertyByIdWithFakeFrame(JSContext *cx, JSObject *obj, JSObject *scopeobj, jsid id,
jsval *vp)
{
ExecuteFrameGuard frame;
JSFrameRegs regs;
JS_ASSERT(scopeobj->isGlobal());
if (!SetupFakeFrame(cx, frame, regs, scopeobj))
JSFrameRegs regs;
FrameGuard frame;
if (!cx->stack().pushDummyFrame(cx, frame, regs, scopeobj))
return false;
bool ok = JS_SetPropertyById(cx, obj, id, vp);
@ -1529,10 +1502,11 @@ JS_FRIEND_API(JSBool)
js_CallFunctionValueWithFakeFrame(JSContext *cx, JSObject *obj, JSObject *scopeobj, jsval funval,
uintN argc, jsval *argv, jsval *rval)
{
ExecuteFrameGuard frame;
JSFrameRegs regs;
JS_ASSERT(scopeobj->isGlobal());
if (!SetupFakeFrame(cx, frame, regs, scopeobj))
JSFrameRegs regs;
FrameGuard frame;
if (!cx->stack().pushDummyFrame(cx, frame, regs, scopeobj))
return false;
bool ok = JS_CallFunctionValue(cx, obj, funval, argc, argv, rval);
@ -2241,7 +2215,7 @@ jstv_Lineno(JSContext *cx, JSStackFrame *fp)
JS_FRIEND_API(void)
js::StoreTraceVisState(JSContext *cx, TraceVisState s, TraceVisExitReason r)
{
JSStackFrame *fp = cx->fp;
JSStackFrame *fp = cx->fp();
char *script_file = jstv_Filename(fp);
JSHashNumber hash = JS_HashString(script_file);

Просмотреть файл

@ -217,8 +217,8 @@ DTrace::ObjectCreationScope::handleCreationEnd()
void
DTrace::ObjectCreationScope::handleCreationImpl(JSObject *obj)
{
JAVASCRIPT_OBJECT_CREATE(jsdtrace_filename(cx->fp), (char *)clasp->name, (uintptr_t)obj,
jsdtrace_frame_linenumber(cx, cx->fp));
JAVASCRIPT_OBJECT_CREATE(jsdtrace_filename(fp), (char *)clasp->name, (uintptr_t)obj,
jsdtrace_frame_linenumber(cx, fp));
}
void

Просмотреть файл

@ -293,7 +293,7 @@ InitExnPrivate(JSContext *cx, JSObject *exnObject, JSString *message,
stackDepth = 0;
valueCount = 0;
for (fp = js_GetTopStackFrame(cx); fp; fp = fp->down) {
if (fp->hasFunction() && fp->argv) {
if (fp->hasFunction() && fp->argv && !fp->isEvalFrame()) {
Value v = NullValue();
if (checkAccess &&
!checkAccess(cx, fp->callee(), callerid, JSACC_READ, &v)) {
@ -334,7 +334,7 @@ InitExnPrivate(JSContext *cx, JSObject *exnObject, JSString *message,
values = GetStackTraceValueBuffer(priv);
elem = priv->stackElems;
for (fp = js_GetTopStackFrame(cx); fp != fpstop; fp = fp->down) {
if (!fp->hasFunction()) {
if (!fp->hasFunction() || fp->isEvalFrame()) {
elem->funName = NULL;
elem->argc = 0;
} else {

Просмотреть файл

@ -2823,7 +2823,7 @@ js_NewFlatClosure(JSContext *cx, JSFunction *fun)
* Flat closures can be partial, they may need to search enclosing scope
* objects via JSOP_NAME, etc.
*/
JSObject *scopeChain = js_GetScopeChain(cx, cx->fp);
JSObject *scopeChain = js_GetScopeChain(cx, cx->fp());
if (!scopeChain)
return NULL;
@ -2844,11 +2844,11 @@ js_NewFlatClosure(JSContext *cx, JSFunction *fun)
JSObject *
js_NewDebuggableFlatClosure(JSContext *cx, JSFunction *fun)
{
JS_ASSERT(cx->fp->getFunction()->flags & JSFUN_HEAVYWEIGHT);
JS_ASSERT(!cx->fp->getFunction()->optimizedClosure());
JS_ASSERT(cx->fp()->getFunction()->flags & JSFUN_HEAVYWEIGHT);
JS_ASSERT(!cx->fp()->getFunction()->optimizedClosure());
JS_ASSERT(FUN_FLAT_CLOSURE(fun));
return WrapEscapingClosure(cx, cx->fp, fun);
return WrapEscapingClosure(cx, cx->fp(), fun);
}
JSFunction *

Просмотреть файл

@ -240,6 +240,14 @@ JS_STATIC_ASSERT(sizeof(JSFunction) % GC_CELL_SIZE == 0);
JS_STATIC_ASSERT(sizeof(JSXML) % GC_CELL_SIZE == 0);
#endif
#ifdef JS_GCMETER
# define METER(x) ((void) (x))
# define METER_IF(condition, x) ((void) ((condition) && (x)))
#else
# define METER(x) ((void) 0)
# define METER_IF(condition, x) ((void) 0)
#endif
struct JSGCArenaInfo {
/*
* Allocation list for the arena.
@ -532,7 +540,7 @@ MarkIfUnmarkedGCThing(void *thing, uint32 color = BLACK)
return true;
}
inline size_t
size_t
ThingsPerArena(size_t thingSize)
{
JS_ASSERT(!(thingSize & GC_CELL_MASK));
@ -595,22 +603,6 @@ MakeNewArenaFreeList(JSGCArena *a, size_t thingSize)
return reinterpret_cast<JSGCThing *>(thingsStart);
}
#ifdef JS_GCMETER
# define METER(x) ((void) (x))
# define METER_IF(condition, x) ((void) ((condition) && (x)))
#else
# define METER(x) ((void) 0)
# define METER_IF(condition, x) ((void) 0)
#endif
#define METER_UPDATE_MAX(maxLval, rval) \
METER_IF((maxLval) < (rval), (maxLval) = (rval))
#ifdef MOZ_GCTIMER
static jsrefcount newChunkCount = 0;
static jsrefcount destroyChunkCount = 0;
#endif
inline jsuword
GetGCChunk(JSRuntime *rt)
{
@ -1081,30 +1073,6 @@ IsGCThingWord(JSRuntime *rt, jsuword w)
return IsGCThingWord(rt, w, thing, traceKind);
}
#if defined(JS_DUMP_CONSERVATIVE_GC_ROOTS) || defined(JS_GCMETER)
void
ConservativeGCStats::dump(FILE *fp)
{
size_t words = 0;
for (size_t i = 0; i != JS_ARRAY_LENGTH(counter); ++i)
words += counter[i];
#define ULSTAT(x) ((unsigned long)(x))
fprintf(fp, "CONSERVATIVE STACK SCANNING:\n");
fprintf(fp, " number of stack words: %lu\n", ULSTAT(words));
fprintf(fp, " excluded, low bit set: %lu\n", ULSTAT(counter[CGCT_LOWBITSET]));
fprintf(fp, " not withing a chunk: %lu\n", ULSTAT(counter[CGCT_NOTCHUNK]));
fprintf(fp, " not within arena range: %lu\n", ULSTAT(counter[CGCT_NOTARENA]));
fprintf(fp, " points to free arena: %lu\n", ULSTAT(counter[CGCT_FREEARENA]));
fprintf(fp, " excluded, wrong tag: %lu\n", ULSTAT(counter[CGCT_WRONGTAG]));
fprintf(fp, " excluded, not live: %lu\n", ULSTAT(counter[CGCT_NOTLIVE]));
fprintf(fp, " valid GC things: %lu\n", ULSTAT(counter[CGCT_VALID]));
#undef ULSTAT
}
#endif
static void
MarkWordConservatively(JSTracer *trc, jsuword w)
{
@ -1211,158 +1179,6 @@ ConservativeGCThreadData::disable()
} /* namespace js */
#ifdef JS_GCMETER
static void
UpdateArenaStats(JSGCArenaStats *st, uint32 nlivearenas, uint32 nkilledArenas,
uint32 nthings)
{
size_t narenas;
narenas = nlivearenas + nkilledArenas;
JS_ASSERT(narenas >= st->livearenas);
st->newarenas = narenas - st->livearenas;
st->narenas = narenas;
st->livearenas = nlivearenas;
if (st->maxarenas < narenas)
st->maxarenas = narenas;
st->totalarenas += narenas;
st->nthings = nthings;
if (st->maxthings < nthings)
st->maxthings = nthings;
st->totalthings += nthings;
}
JS_FRIEND_API(void)
js_DumpGCStats(JSRuntime *rt, FILE *fp)
{
static const char *const GC_ARENA_NAMES[] = {
"object",
"function",
#if JS_HAS_XML_SUPPORT
"xml",
#endif
"short string",
"string",
"external_string_0",
"external_string_1",
"external_string_2",
"external_string_3",
"external_string_4",
"external_string_5",
"external_string_6",
"external_string_7",
};
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(GC_ARENA_NAMES) == FINALIZE_LIMIT);
fprintf(fp, "\nGC allocation statistics:\n\n");
#define UL(x) ((unsigned long)(x))
#define ULSTAT(x) UL(rt->gcStats.x)
#define PERCENT(x,y) (100.0 * (double) (x) / (double) (y))
size_t sumArenas = 0;
size_t sumTotalArenas = 0;
size_t sumThings = 0;
size_t sumMaxThings = 0;
size_t sumThingSize = 0;
size_t sumTotalThingSize = 0;
size_t sumArenaCapacity = 0;
size_t sumTotalArenaCapacity = 0;
size_t sumAlloc = 0;
size_t sumLocalAlloc = 0;
size_t sumFail = 0;
size_t sumRetry = 0;
for (int i = 0; i < (int) FINALIZE_LIMIT; i++) {
size_t thingSize, thingsPerArena;
JSGCArenaStats *st;
thingSize = rt->gcArenaList[i].thingSize;
thingsPerArena = ThingsPerArena(thingSize);
st = &rt->gcStats.arenaStats[i];
if (st->maxarenas == 0)
continue;
fprintf(fp,
"%s arenas (thing size %lu, %lu things per arena):",
GC_ARENA_NAMES[i], UL(thingSize), UL(thingsPerArena));
putc('\n', fp);
fprintf(fp, " arenas before GC: %lu\n", UL(st->narenas));
fprintf(fp, " new arenas before GC: %lu (%.1f%%)\n",
UL(st->newarenas), PERCENT(st->newarenas, st->narenas));
fprintf(fp, " arenas after GC: %lu (%.1f%%)\n",
UL(st->livearenas), PERCENT(st->livearenas, st->narenas));
fprintf(fp, " max arenas: %lu\n", UL(st->maxarenas));
fprintf(fp, " things: %lu\n", UL(st->nthings));
fprintf(fp, " GC cell utilization: %.1f%%\n",
PERCENT(st->nthings, thingsPerArena * st->narenas));
fprintf(fp, " average cell utilization: %.1f%%\n",
PERCENT(st->totalthings, thingsPerArena * st->totalarenas));
fprintf(fp, " max things: %lu\n", UL(st->maxthings));
fprintf(fp, " alloc attempts: %lu\n", UL(st->alloc));
fprintf(fp, " alloc without locks: %lu (%.1f%%)\n",
UL(st->localalloc), PERCENT(st->localalloc, st->alloc));
sumArenas += st->narenas;
sumTotalArenas += st->totalarenas;
sumThings += st->nthings;
sumMaxThings += st->maxthings;
sumThingSize += thingSize * st->nthings;
sumTotalThingSize += size_t(thingSize * st->totalthings);
sumArenaCapacity += thingSize * thingsPerArena * st->narenas;
sumTotalArenaCapacity += thingSize * thingsPerArena * st->totalarenas;
sumAlloc += st->alloc;
sumLocalAlloc += st->localalloc;
sumFail += st->fail;
sumRetry += st->retry;
putc('\n', fp);
}
fputs("Never used arenas:\n", fp);
for (int i = 0; i < (int) FINALIZE_LIMIT; i++) {
size_t thingSize, thingsPerArena;
JSGCArenaStats *st;
thingSize = rt->gcArenaList[i].thingSize;
thingsPerArena = ThingsPerArena(thingSize);
st = &rt->gcStats.arenaStats[i];
if (st->maxarenas != 0)
continue;
fprintf(fp,
"%s (thing size %lu, %lu things per arena)\n",
GC_ARENA_NAMES[i], UL(thingSize), UL(thingsPerArena));
}
fprintf(fp, "\nTOTAL STATS:\n");
fprintf(fp, " bytes allocated: %lu\n", UL(rt->gcBytes));
fprintf(fp, " total GC arenas: %lu\n", UL(sumArenas));
fprintf(fp, " max allocated arenas: %lu\n", ULSTAT(maxnallarenas));
fprintf(fp, " max allocated chunks: %lu\n", ULSTAT(maxnchunks));
fprintf(fp, " total GC things: %lu\n", UL(sumThings));
fprintf(fp, " max total GC things: %lu\n", UL(sumMaxThings));
fprintf(fp, " GC cell utilization: %.1f%%\n",
PERCENT(sumThingSize, sumArenaCapacity));
fprintf(fp, " average cell utilization: %.1f%%\n",
PERCENT(sumTotalThingSize, sumTotalArenaCapacity));
fprintf(fp, "allocation retries after GC: %lu\n", UL(sumRetry));
fprintf(fp, " alloc attempts: %lu\n", UL(sumAlloc));
fprintf(fp, " alloc without locks: %lu (%.1f%%)\n",
UL(sumLocalAlloc), PERCENT(sumLocalAlloc, sumAlloc));
fprintf(fp, " allocation failures: %lu\n", UL(sumFail));
fprintf(fp, " valid lock calls: %lu\n", ULSTAT(lock));
fprintf(fp, " valid unlock calls: %lu\n", ULSTAT(unlock));
fprintf(fp, " delayed tracing calls: %lu\n", ULSTAT(unmarked));
#ifdef DEBUG
fprintf(fp, " max trace later count: %lu\n", ULSTAT(maxunmarked));
#endif
fprintf(fp, "potentially useful GC calls: %lu\n", ULSTAT(poke));
fprintf(fp, " thing arenas freed so far: %lu\n", ULSTAT(afree));
rt->gcStats.conservative.dump(fp);
#undef UL
#undef ULSTAT
#undef PERCENT
}
#endif
#ifdef DEBUG
static void
CheckLeakedRoots(JSRuntime *rt);
@ -1628,7 +1444,7 @@ RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
for (;;) {
if (doGC) {
LastDitchGC(cx);
METER(cx->runtime->gcStats.arenaStats[thingKind].retry++);
METER(cx->runtime->gcArenaStats[thingKind].retry++);
canGC = false;
/*
@ -1655,7 +1471,7 @@ RefillFinalizableFreeList(JSContext *cx, unsigned thingKind)
if (a)
break;
if (!canGC) {
METER(cx->runtime->gcStats.arenaStats[thingKind].fail++);
METER(cx->runtime->gcArenaStats[thingKind].fail++);
return NULL;
}
doGC = true;
@ -1702,7 +1518,7 @@ js_NewFinalizableGCThing(JSContext *cx, unsigned thingKind)
#endif
/* Updates of metering counters here may not be thread-safe. */
METER(cx->runtime->gcStats.arenaStats[thingKind].alloc++);
METER(cx->runtime->gcArenaStats[thingKind].alloc++);
JSGCThing **freeListp =
JS_THREAD_DATA(cx)->gcFreeLists.finalizables + thingKind;
@ -1710,7 +1526,7 @@ js_NewFinalizableGCThing(JSContext *cx, unsigned thingKind)
if (thing) {
*freeListp = thing->link;
CheckGCFreeListLink(thing);
METER(cx->runtime->gcStats.arenaStats[thingKind].localalloc++);
METER(cx->runtime->gcArenaStats[thingKind].localalloc++);
return thing;
}
@ -1878,64 +1694,6 @@ GCMarker::~GCMarker()
#endif
}
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
void
GCMarker::dumpConservativeRoots()
{
if (!conservativeDumpFileName)
return;
FILE *fp;
if (!strcmp(conservativeDumpFileName, "stdout")) {
fp = stdout;
} else if (!strcmp(conservativeDumpFileName, "stderr")) {
fp = stderr;
} else if (!(fp = fopen(conservativeDumpFileName, "aw"))) {
fprintf(stderr,
"Warning: cannot open %s to dump the conservative roots\n",
conservativeDumpFileName);
return;
}
conservativeStats.dump(fp);
for (ConservativeRoot *i = conservativeRoots.begin();
i != conservativeRoots.end();
++i) {
fprintf(fp, " %p: ", i->thing);
switch (i->traceKind) {
default:
JS_NOT_REACHED("Unknown trace kind");
case JSTRACE_OBJECT: {
JSObject *obj = (JSObject *) i->thing;
fprintf(fp, "object %s", obj->getClass()->name);
break;
}
case JSTRACE_STRING: {
JSString *str = (JSString *) i->thing;
char buf[50];
js_PutEscapedString(buf, sizeof buf, str, '"');
fprintf(fp, "string %s", buf);
break;
}
# if JS_HAS_XML_SUPPORT
case JSTRACE_XML: {
JSXML *xml = (JSXML *) i->thing;
fprintf(fp, "xml %u", (unsigned)xml->xml_class);
break;
}
# endif
}
fputc('\n', fp);
}
fputc('\n', fp);
if (fp != stdout && fp != stderr)
fclose(fp);
}
#endif /* JS_DUMP_CONSERVATIVE_GC_ROOTS */
void
GCMarker::delayMarkingChildren(void *thing)
{
@ -2785,89 +2543,10 @@ FinalizeArenaList(JSContext *cx, unsigned thingKind)
}
arenaList->cursor = arenaList->head;
METER(UpdateArenaStats(&cx->runtime->gcStats.arenaStats[thingKind],
METER(UpdateArenaStats(&cx->runtime->gcArenaStats[thingKind],
nlivearenas, nkilledarenas, nthings));
}
#ifdef MOZ_GCTIMER
const bool JS_WANT_GC_SUITE_PRINT = true; //false for gnuplot output
struct GCTimer {
uint64 enter;
uint64 startMark;
uint64 startSweep;
uint64 sweepObjectEnd;
uint64 sweepStringEnd;
uint64 sweepDestroyEnd;
uint64 end;
GCTimer() {
getFirstEnter();
memset(this, 0, sizeof(GCTimer));
enter = rdtsc();
}
static uint64 getFirstEnter() {
static uint64 firstEnter = rdtsc();
return firstEnter;
}
void finish(bool lastGC) {
end = rdtsc();
if (startMark > 0) {
if (JS_WANT_GC_SUITE_PRINT) {
fprintf(stderr, "%f %f %f\n",
(double)(end - enter) / 1e6,
(double)(startSweep - startMark) / 1e6,
(double)(sweepDestroyEnd - startSweep) / 1e6);
} else {
static FILE *gcFile;
if (!gcFile) {
gcFile = fopen("gcTimer.dat", "w");
fprintf(gcFile, " AppTime, Total, Mark, Sweep, FinObj,");
fprintf(gcFile, " FinStr, Destroy, newChunks, destoyChunks\n");
}
JS_ASSERT(gcFile);
fprintf(gcFile, "%12.1f, %6.1f, %6.1f, %6.1f, %6.1f, %6.1f, %7.1f, ",
(double)(enter - getFirstEnter()) / 1e6,
(double)(end - enter) / 1e6,
(double)(startSweep - startMark) / 1e6,
(double)(sweepDestroyEnd - startSweep) / 1e6,
(double)(sweepObjectEnd - startSweep) / 1e6,
(double)(sweepStringEnd - sweepObjectEnd) / 1e6,
(double)(sweepDestroyEnd - sweepStringEnd) / 1e6);
fprintf(gcFile, "%10d, %10d \n", newChunkCount,
destroyChunkCount);
fflush(gcFile);
if (lastGC) {
fclose(gcFile);
gcFile = NULL;
}
}
}
newChunkCount = 0;
destroyChunkCount = 0;
}
};
# define GCTIMER_PARAM , GCTimer &gcTimer
# define GCTIMER_ARG , gcTimer
# define TIMESTAMP(x) (gcTimer.x = rdtsc())
# define GCTIMER_BEGIN() GCTimer gcTimer
# define GCTIMER_END(last) (gcTimer.finish(last))
#else
# define GCTIMER_PARAM
# define GCTIMER_ARG
# define TIMESTAMP(x) ((void) 0)
# define GCTIMER_BEGIN() ((void) 0)
# define GCTIMER_END(last) ((void) 0)
#endif
#ifdef JS_THREADSAFE
namespace js {
@ -3128,32 +2807,12 @@ GC(JSContext *cx GCTIMER_PARAM)
#endif
#ifdef JS_SCOPE_DEPTH_METER
{ static FILE *fp;
if (!fp)
fp = fopen("/tmp/scopedepth.stats", "w");
if (fp) {
JS_DumpBasicStats(&rt->protoLookupDepthStats, "proto-lookup depth", fp);
JS_DumpBasicStats(&rt->scopeSearchDepthStats, "scope-search depth", fp);
JS_DumpBasicStats(&rt->hostenvScopeDepthStats, "hostenv scope depth", fp);
JS_DumpBasicStats(&rt->lexicalScopeDepthStats, "lexical scope depth", fp);
putc('\n', fp);
fflush(fp);
}
}
#endif /* JS_SCOPE_DEPTH_METER */
DumpScopeDepthMeter(rt);
#endif
#ifdef JS_DUMP_LOOP_STATS
{ static FILE *lsfp;
if (!lsfp)
lsfp = fopen("/tmp/loopstats", "w");
if (lsfp) {
JS_DumpBasicStats(&rt->loopStats, "loops", lsfp);
fflush(lsfp);
}
}
#endif /* JS_DUMP_LOOP_STATS */
DumpLoopStats(rt);
#endif
}
#ifdef JS_THREADSAFE

Просмотреть файл

@ -56,17 +56,7 @@
#include "jsversion.h"
#include "jsobj.h"
#include "jsfun.h"
#if !defined JS_DUMP_CONSERVATIVE_GC_ROOTS && defined DEBUG
# define JS_DUMP_CONSERVATIVE_GC_ROOTS 1
#endif
#if defined JS_GCMETER
const bool JS_WANT_GC_METER_PRINT = true;
#elif defined DEBUG
# define JS_GCMETER 1
const bool JS_WANT_GC_METER_PRINT = false;
#endif
#include "jsgcstats.h"
#define JSTRACE_XML 2
@ -87,6 +77,9 @@ js_GetExternalStringGCType(JSString *str);
extern JS_FRIEND_API(uint32)
js_GetGCThingTraceKind(void *thing);
extern size_t
ThingsPerArena(size_t thingSize);
/*
* The sole purpose of the function is to preserve public API compatibility
* in JS_GetStringBytes which takes only single JSString* argument.
@ -440,33 +433,6 @@ struct ConservativeGCThreadData {
bool isEnabled() const { return enableCount > 0; }
};
/*
* The conservative GC test for a word shows that it is either a valid GC
* thing or is not for one of the following reasons.
*/
enum ConservativeGCTest {
CGCT_VALID,
CGCT_LOWBITSET, /* excluded because one of the low bits was set */
CGCT_NOTARENA, /* not within arena range in a chunk */
CGCT_NOTCHUNK, /* not within a valid chunk */
CGCT_FREEARENA, /* within arena containing only free things */
CGCT_WRONGTAG, /* tagged pointer but wrong type */
CGCT_NOTLIVE, /* gcthing is not allocated */
CGCT_END
};
struct ConservativeGCStats {
uint32 counter[CGCT_END]; /* ConservativeGCTest classification
counters */
void add(const ConservativeGCStats &another) {
for (size_t i = 0; i != JS_ARRAY_LENGTH(counter); ++i)
counter[i] += another.counter[i];
}
void dump(FILE *fp);
};
struct GCMarker : public JSTracer {
private:
/* The color is only applied to objects, functions and xml. */
@ -522,50 +488,6 @@ struct GCMarker : public JSTracer {
extern void
js_FinalizeStringRT(JSRuntime *rt, JSString *str);
#ifdef JS_GCMETER
struct JSGCArenaStats {
uint32 alloc; /* allocation attempts */
uint32 localalloc; /* allocations from local lists */
uint32 retry; /* allocation retries after running the GC */
uint32 fail; /* allocation failures */
uint32 nthings; /* live GC things */
uint32 maxthings; /* maximum of live GC cells */
double totalthings; /* live GC things the GC scanned so far */
uint32 narenas; /* number of arena in list before the GC */
uint32 newarenas; /* new arenas allocated before the last GC */
uint32 livearenas; /* number of live arenas after the last GC */
uint32 maxarenas; /* maximum of allocated arenas */
uint32 totalarenas; /* total number of arenas with live things that
GC scanned so far */
};
struct JSGCStats {
uint32 lock; /* valid lock calls */
uint32 unlock; /* valid unlock calls */
uint32 unmarked; /* number of times marking of GC thing's children were
delayed due to a low C stack */
#ifdef DEBUG
uint32 maxunmarked;/* maximum number of things with children to mark
later */
#endif
uint32 poke; /* number of potentially useful GC calls */
uint32 afree; /* thing arenas freed so far */
uint32 nallarenas; /* number of all allocated arenas */
uint32 maxnallarenas; /* maximum number of all allocated arenas */
uint32 nchunks; /* number of allocated chunks */
uint32 maxnchunks; /* maximum number of allocated chunks */
JSGCArenaStats arenaStats[FINALIZE_LIMIT];
js::ConservativeGCStats conservative;
};
extern JS_FRIEND_API(void)
js_DumpGCStats(JSRuntime *rt, FILE *fp);
#endif /* JS_GCMETER */
/*
* This function is defined in jsdbgapi.cpp but is declared here to avoid
* polluting jsdbgapi.h, a public API header, with internal functions.

375
js/src/jsgcstats.cpp Normal file
Просмотреть файл

@ -0,0 +1,375 @@
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99 ft=cpp:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
* June 30, 2010
*
* The Initial Developer of the Original Code is
* the Mozilla Corporation.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#include "jstypes.h"
#include "jscntxt.h"
#include "jsgcstats.h"
#include "jsgc.h"
#include "jsxml.h"
#include "jsbuiltins.h"
using namespace js;
#if defined(JS_DUMP_CONSERVATIVE_GC_ROOTS) || defined(JS_GCMETER)
void
ConservativeGCStats::dump(FILE *fp)
{
size_t words = 0;
for (size_t i = 0; i != JS_ARRAY_LENGTH(counter); ++i)
words += counter[i];
#define ULSTAT(x) ((unsigned long)(x))
fprintf(fp, "CONSERVATIVE STACK SCANNING:\n");
fprintf(fp, " number of stack words: %lu\n", ULSTAT(words));
fprintf(fp, " excluded, low bit set: %lu\n", ULSTAT(counter[CGCT_LOWBITSET]));
fprintf(fp, " not withing a chunk: %lu\n", ULSTAT(counter[CGCT_NOTCHUNK]));
fprintf(fp, " not within arena range: %lu\n", ULSTAT(counter[CGCT_NOTARENA]));
fprintf(fp, " points to free arena: %lu\n", ULSTAT(counter[CGCT_FREEARENA]));
fprintf(fp, " excluded, wrong tag: %lu\n", ULSTAT(counter[CGCT_WRONGTAG]));
fprintf(fp, " excluded, not live: %lu\n", ULSTAT(counter[CGCT_NOTLIVE]));
fprintf(fp, " valid GC things: %lu\n", ULSTAT(counter[CGCT_VALID]));
#undef ULSTAT
}
#endif
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
void
GCMarker::dumpConservativeRoots()
{
if (!conservativeDumpFileName)
return;
FILE *fp;
if (!strcmp(conservativeDumpFileName, "stdout")) {
fp = stdout;
} else if (!strcmp(conservativeDumpFileName, "stderr")) {
fp = stderr;
} else if (!(fp = fopen(conservativeDumpFileName, "aw"))) {
fprintf(stderr,
"Warning: cannot open %s to dump the conservative roots\n",
conservativeDumpFileName);
return;
}
conservativeStats.dump(fp);
for (ConservativeRoot *i = conservativeRoots.begin();
i != conservativeRoots.end();
++i) {
fprintf(fp, " %p: ", i->thing);
switch (i->traceKind) {
default:
JS_NOT_REACHED("Unknown trace kind");
case JSTRACE_OBJECT: {
JSObject *obj = (JSObject *) i->thing;
fprintf(fp, "object %s", obj->getClass()->name);
break;
}
case JSTRACE_STRING: {
JSString *str = (JSString *) i->thing;
char buf[50];
js_PutEscapedString(buf, sizeof buf, str, '"');
fprintf(fp, "string %s", buf);
break;
}
# if JS_HAS_XML_SUPPORT
case JSTRACE_XML: {
JSXML *xml = (JSXML *) i->thing;
fprintf(fp, "xml %u", (unsigned)xml->xml_class);
break;
}
# endif
}
fputc('\n', fp);
}
fputc('\n', fp);
if (fp != stdout && fp != stderr)
fclose(fp);
}
#endif /* JS_DUMP_CONSERVATIVE_GC_ROOTS */
#ifdef JS_GCMETER
void
UpdateArenaStats(JSGCArenaStats *st, uint32 nlivearenas, uint32 nkilledArenas,
uint32 nthings)
{
size_t narenas;
narenas = nlivearenas + nkilledArenas;
JS_ASSERT(narenas >= st->livearenas);
st->newarenas = narenas - st->livearenas;
st->narenas = narenas;
st->livearenas = nlivearenas;
if (st->maxarenas < narenas)
st->maxarenas = narenas;
st->totalarenas += narenas;
st->nthings = nthings;
if (st->maxthings < nthings)
st->maxthings = nthings;
st->totalthings += nthings;
}
JS_FRIEND_API(void)
js_DumpGCStats(JSRuntime *rt, FILE *fp)
{
static const char *const GC_ARENA_NAMES[] = {
"object",
"function",
#if JS_HAS_XML_SUPPORT
"xml",
#endif
"short string",
"string",
"external_string_0",
"external_string_1",
"external_string_2",
"external_string_3",
"external_string_4",
"external_string_5",
"external_string_6",
"external_string_7",
};
fprintf(fp, "\nGC allocation statistics:\n\n");
#define UL(x) ((unsigned long)(x))
#define ULSTAT(x) UL(rt->gcStats.x)
#define PERCENT(x,y) (100.0 * (double) (x) / (double) (y))
size_t sumArenas = 0;
size_t sumTotalArenas = 0;
size_t sumThings = 0;
size_t sumMaxThings = 0;
size_t sumThingSize = 0;
size_t sumTotalThingSize = 0;
size_t sumArenaCapacity = 0;
size_t sumTotalArenaCapacity = 0;
size_t sumAlloc = 0;
size_t sumLocalAlloc = 0;
size_t sumFail = 0;
size_t sumRetry = 0;
for (int i = 0; i < (int) FINALIZE_LIMIT; i++) {
size_t thingSize, thingsPerArena;
JSGCArenaStats *st;
thingSize = rt->gcArenaList[i].thingSize;
thingsPerArena = ThingsPerArena(thingSize);
st = &rt->gcArenaStats[i];
if (st->maxarenas == 0)
continue;
fprintf(fp,
"%s arenas (thing size %lu, %lu things per arena):",
GC_ARENA_NAMES[i], UL(thingSize), UL(thingsPerArena));
putc('\n', fp);
fprintf(fp, " arenas before GC: %lu\n", UL(st->narenas));
fprintf(fp, " new arenas before GC: %lu (%.1f%%)\n",
UL(st->newarenas), PERCENT(st->newarenas, st->narenas));
fprintf(fp, " arenas after GC: %lu (%.1f%%)\n",
UL(st->livearenas), PERCENT(st->livearenas, st->narenas));
fprintf(fp, " max arenas: %lu\n", UL(st->maxarenas));
fprintf(fp, " things: %lu\n", UL(st->nthings));
fprintf(fp, " GC cell utilization: %.1f%%\n",
PERCENT(st->nthings, thingsPerArena * st->narenas));
fprintf(fp, " average cell utilization: %.1f%%\n",
PERCENT(st->totalthings, thingsPerArena * st->totalarenas));
fprintf(fp, " max things: %lu\n", UL(st->maxthings));
fprintf(fp, " alloc attempts: %lu\n", UL(st->alloc));
fprintf(fp, " alloc without locks: %lu (%.1f%%)\n",
UL(st->localalloc), PERCENT(st->localalloc, st->alloc));
sumArenas += st->narenas;
sumTotalArenas += st->totalarenas;
sumThings += st->nthings;
sumMaxThings += st->maxthings;
sumThingSize += thingSize * st->nthings;
sumTotalThingSize += size_t(thingSize * st->totalthings);
sumArenaCapacity += thingSize * thingsPerArena * st->narenas;
sumTotalArenaCapacity += thingSize * thingsPerArena * st->totalarenas;
sumAlloc += st->alloc;
sumLocalAlloc += st->localalloc;
sumFail += st->fail;
sumRetry += st->retry;
putc('\n', fp);
}
fputs("Never used arenas:\n", fp);
for (int i = 0; i < (int) FINALIZE_LIMIT; i++) {
size_t thingSize, thingsPerArena;
JSGCArenaStats *st;
thingSize = rt->gcArenaList[i].thingSize;
thingsPerArena = ThingsPerArena(thingSize);
st = &rt->gcArenaStats[i];
if (st->maxarenas != 0)
continue;
fprintf(fp,
"%s (thing size %lu, %lu things per arena)\n",
GC_ARENA_NAMES[i], UL(thingSize), UL(thingsPerArena));
}
fprintf(fp, "\nTOTAL STATS:\n");
fprintf(fp, " bytes allocated: %lu\n", UL(rt->gcBytes));
fprintf(fp, " total GC arenas: %lu\n", UL(sumArenas));
fprintf(fp, " max allocated arenas: %lu\n", ULSTAT(maxnallarenas));
fprintf(fp, " max allocated chunks: %lu\n", ULSTAT(maxnchunks));
fprintf(fp, " total GC things: %lu\n", UL(sumThings));
fprintf(fp, " max total GC things: %lu\n", UL(sumMaxThings));
fprintf(fp, " GC cell utilization: %.1f%%\n",
PERCENT(sumThingSize, sumArenaCapacity));
fprintf(fp, " average cell utilization: %.1f%%\n",
PERCENT(sumTotalThingSize, sumTotalArenaCapacity));
fprintf(fp, "allocation retries after GC: %lu\n", UL(sumRetry));
fprintf(fp, " alloc attempts: %lu\n", UL(sumAlloc));
fprintf(fp, " alloc without locks: %lu (%.1f%%)\n",
UL(sumLocalAlloc), PERCENT(sumLocalAlloc, sumAlloc));
fprintf(fp, " allocation failures: %lu\n", UL(sumFail));
fprintf(fp, " valid lock calls: %lu\n", ULSTAT(lock));
fprintf(fp, " valid unlock calls: %lu\n", ULSTAT(unlock));
fprintf(fp, " delayed tracing calls: %lu\n", ULSTAT(unmarked));
#ifdef DEBUG
fprintf(fp, " max trace later count: %lu\n", ULSTAT(maxunmarked));
#endif
fprintf(fp, "potentially useful GC calls: %lu\n", ULSTAT(poke));
fprintf(fp, " thing arenas freed so far: %lu\n", ULSTAT(afree));
rt->gcStats.conservative.dump(fp);
#undef UL
#undef ULSTAT
#undef PERCENT
}
#endif
#ifdef MOZ_GCTIMER
namespace js {
jsrefcount newChunkCount = 0;
jsrefcount destroyChunkCount = 0;
GCTimer::GCTimer() {
getFirstEnter();
memset(this, 0, sizeof(GCTimer));
enter = rdtsc();
}
uint64
GCTimer::getFirstEnter() {
static uint64 firstEnter = rdtsc();
return firstEnter;
}
void
GCTimer::finish(bool lastGC) {
end = rdtsc();
if (startMark > 0 && JS_WANT_GC_TIMER_PRINT) {
if (JS_WANT_GC_SUITE_PRINT) {
fprintf(stderr, "%f %f %f\n",
(double)(end - enter) / 1e6,
(double)(startSweep - startMark) / 1e6,
(double)(sweepDestroyEnd - startSweep) / 1e6);
} else {
static FILE *gcFile;
if (!gcFile) {
gcFile = fopen("gcTimer.dat", "w");
fprintf(gcFile, " AppTime, Total, Mark, Sweep, FinObj,");
fprintf(gcFile, " FinStr, Destroy, newChunks, destoyChunks\n");
}
JS_ASSERT(gcFile);
fprintf(gcFile, "%12.1f, %6.1f, %6.1f, %6.1f, %6.1f, %6.1f, %7.1f, ",
(double)(enter - getFirstEnter()) / 1e6,
(double)(end - enter) / 1e6,
(double)(startSweep - startMark) / 1e6,
(double)(sweepDestroyEnd - startSweep) / 1e6,
(double)(sweepObjectEnd - startSweep) / 1e6,
(double)(sweepStringEnd - sweepObjectEnd) / 1e6,
(double)(sweepDestroyEnd - sweepStringEnd) / 1e6);
fprintf(gcFile, "%10d, %10d \n", newChunkCount,
destroyChunkCount);
fflush(gcFile);
if (lastGC) {
fclose(gcFile);
gcFile = NULL;
}
}
}
newChunkCount = 0;
destroyChunkCount = 0;
}
#ifdef JS_SCOPE_DEPTH_METER
void
DumpScopeDepthMeter(JSRuntime *rt)
{
static FILE *fp;
if (!fp)
fp = fopen("/tmp/scopedepth.stats", "w");
if (fp) {
JS_DumpBasicStats(&rt->protoLookupDepthStats, "proto-lookup depth", fp);
JS_DumpBasicStats(&rt->scopeSearchDepthStats, "scope-search depth", fp);
JS_DumpBasicStats(&rt->hostenvScopeDepthStats, "hostenv scope depth", fp);
JS_DumpBasicStats(&rt->lexicalScopeDepthStats, "lexical scope depth", fp);
putc('\n', fp);
fflush(fp);
}
}
#endif
#ifdef JS_DUMP_LOOP_STATS
void
DumpLoopStats(JSRuntime *rt)
{
static FILE *lsfp;
if (!lsfp)
lsfp = fopen("/tmp/loopstats", "w");
if (lsfp) {
JS_DumpBasicStats(&rt->loopStats, "loops", lsfp);
fflush(lsfp);
}
}
#endif
} /* namespace js */
#endif

190
js/src/jsgcstats.h Normal file
Просмотреть файл

@ -0,0 +1,190 @@
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99 ft=cpp:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
* June 30, 2010
*
* The Initial Developer of the Original Code is
* the Mozilla Corporation.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef jsgcstats_h___
#define jsgcstats_h___
#if !defined JS_DUMP_CONSERVATIVE_GC_ROOTS && defined DEBUG
# define JS_DUMP_CONSERVATIVE_GC_ROOTS 1
#endif
/* Define JS_GCMETER here if wanted */
#if defined JS_GCMETER
const bool JS_WANT_GC_METER_PRINT = true;
#elif defined DEBUG
# define JS_GCMETER 1
const bool JS_WANT_GC_METER_PRINT = false;
#endif
/* MOZ_GCTIMER is a compile flag. --enable-gctimer */
#if defined MOZ_GCTIMER
const bool JS_WANT_GC_TIMER_PRINT = true;
#elif defined DEBUG
# define MOZ_GCTIMER 1
const bool JS_WANT_GC_TIMER_PRINT = false;
#endif
#define METER_UPDATE_MAX(maxLval, rval) \
METER_IF((maxLval) < (rval), (maxLval) = (rval))
namespace js {
/*
* The conservative GC test for a word shows that it is either a valid GC
* thing or is not for one of the following reasons.
*/
enum ConservativeGCTest {
CGCT_VALID,
CGCT_LOWBITSET, /* excluded because one of the low bits was set */
CGCT_NOTARENA, /* not within arena range in a chunk */
CGCT_NOTCHUNK, /* not within a valid chunk */
CGCT_FREEARENA, /* within arena containing only free things */
CGCT_WRONGTAG, /* tagged pointer but wrong type */
CGCT_NOTLIVE, /* gcthing is not allocated */
CGCT_END
};
struct ConservativeGCStats {
uint32 counter[CGCT_END]; /* ConservativeGCTest classification
counters */
void add(const ConservativeGCStats &another) {
for (size_t i = 0; i != JS_ARRAY_LENGTH(counter); ++i)
counter[i] += another.counter[i];
}
void dump(FILE *fp);
};
} /* namespace js */
#ifdef JS_GCMETER
struct JSGCArenaStats {
uint32 alloc; /* allocation attempts */
uint32 localalloc; /* allocations from local lists */
uint32 retry; /* allocation retries after running the GC */
uint32 fail; /* allocation failures */
uint32 nthings; /* live GC things */
uint32 maxthings; /* maximum of live GC cells */
double totalthings; /* live GC things the GC scanned so far */
uint32 narenas; /* number of arena in list before the GC */
uint32 newarenas; /* new arenas allocated before the last GC */
uint32 livearenas; /* number of live arenas after the last GC */
uint32 maxarenas; /* maximum of allocated arenas */
uint32 totalarenas; /* total number of arenas with live things that
GC scanned so far */
};
struct JSGCStats {
uint32 lock; /* valid lock calls */
uint32 unlock; /* valid unlock calls */
uint32 unmarked; /* number of times marking of GC thing's children were
delayed due to a low C stack */
#ifdef DEBUG
uint32 maxunmarked;/* maximum number of things with children to mark
later */
#endif
uint32 poke; /* number of potentially useful GC calls */
uint32 afree; /* thing arenas freed so far */
uint32 nallarenas; /* number of all allocated arenas */
uint32 maxnallarenas; /* maximum number of all allocated arenas */
uint32 nchunks; /* number of allocated chunks */
uint32 maxnchunks; /* maximum number of allocated chunks */
js::ConservativeGCStats conservative;
};
extern JS_FRIEND_API(void)
js_DumpGCStats(JSRuntime *rt, FILE *fp);
extern void
UpdateArenaStats(JSGCArenaStats *st, uint32 nlivearenas, uint32 nkilledArenas,
uint32 nthings);
#endif /* JS_GCMETER */
namespace js {
#ifdef MOZ_GCTIMER
extern jsrefcount newChunkCount;
extern jsrefcount destroyChunkCount;
const bool JS_WANT_GC_SUITE_PRINT = false; //false for gnuplot output
struct GCTimer {
uint64 enter;
uint64 startMark;
uint64 startSweep;
uint64 sweepObjectEnd;
uint64 sweepStringEnd;
uint64 sweepDestroyEnd;
uint64 end;
GCTimer();
static uint64 getFirstEnter();
void finish(bool lastGC);
};
# define GCTIMER_PARAM , GCTimer &gcTimer
# define GCTIMER_ARG , gcTimer
# define TIMESTAMP(x) (gcTimer.x = rdtsc())
# define GCTIMER_BEGIN() GCTimer gcTimer
# define GCTIMER_END(last) (gcTimer.finish(last))
#else
# define GCTIMER_PARAM
# define GCTIMER_ARG
# define TIMESTAMP(x) ((void) 0)
# define GCTIMER_BEGIN() ((void) 0)
# define GCTIMER_END(last) ((void) 0)
#endif
#ifdef JS_SCOPE_DEPTH_METER
extern void
DumpScopeDepthMeter(JSRuntime *rt);
#endif
#ifdef JS_DUMP_LOOP_STATS
extern void
DumpLoopStats(JSRuntime *rt);
#endif
} /* namepsace js */
#endif /* jsgcstats_h___ */

Просмотреть файл

@ -490,7 +490,7 @@ InvokeCommon(JSContext *cx, JSFunction *fun, JSScript *script, T native,
InvokeFrameGuard frame;
if (!cx->stack().getInvokeFrame(cx, args, nmissing, nfixed, frame))
return false;
JSStackFrame *fp = frame.getFrame();
JSStackFrame *fp = frame.getRegs().fp;
/* Initialize missing missing arguments and new local variables. */
Value *missing = args.argv() + args.argc();
@ -565,7 +565,7 @@ InvokeCommon(JSContext *cx, JSFunction *fun, JSScript *script, T native,
ok = callJSNative(cx, native, thisp, fp->numActualArgs(), fp->argv,
fp->addressReturnValue());
JS_ASSERT(cx->fp == fp);
JS_ASSERT(cx->fp() == fp);
JS_RUNTIME_METER(cx->runtime, nativeCalls);
#ifdef DEBUG_NOT_THROWING
if (ok && !alreadyThrowing)
@ -605,7 +605,7 @@ DoConstruct(JSContext *cx, JSObject *obj, uintN argc, Value *argv, Value *rval)
static JSBool
DoSlowCall(JSContext *cx, uintN argc, Value *vp)
{
JSStackFrame *fp = cx->fp;
JSStackFrame *fp = cx->fp();
JSObject *obj = fp->getThisObject(cx);
if (!obj)
return false;
@ -780,7 +780,7 @@ Execute(JSContext *cx, JSObject *chain, JSScript *script,
* in before execution and copied out after.
*/
JSFrameRegs regs;
ExecuteFrameGuard frame;
FrameGuard frame;
if (!cx->stack().getExecuteFrame(cx, down, 0, script->nslots, frame))
return false;
JSStackFrame *fp = frame.getFrame();
@ -813,24 +813,25 @@ Execute(JSContext *cx, JSObject *chain, JSScript *script,
JSObject *initialVarObj;
if (down) {
/* Propagate arg state for eval and the debugger API. */
JS_ASSERT_IF(down->hasFunction(), down->hasCallObj());
fp->setCallObj(down->maybeCallObj());
fp->setArgsObj(NULL);
fp->setFunction((script->staticLevel > 0) ? down->maybeFunction() : NULL);
fp->setThisValue(down->getThisValue());
fp->flags = flags | (down->flags & JSFRAME_COMPUTED_THIS);
fp->setNumActualArgs(down->numActualArgs());
fp->setNumActualArgs(0);
fp->argv = down->argv;
fp->setAnnotation(down->maybeAnnotation());
fp->setScopeChain(chain);
/*
* We want to call |down->varobj()|, but this requires knowing the
* CallStackSegment of |down|. If |down == cx->fp|, the callstack is
* CallStackSegment of |down|. If |down == cx->fp()|, the callstack is
* simply the context's active callstack, so we can use
* |down->varobj(cx)|. When |down != cx->fp|, we need to do a slow
* |down->varobj(cx)|. When |down != cx->fp()|, we need to do a slow
* linear search. Luckily, this only happens with EvaluateInFrame.
*/
initialVarObj = (down == cx->fp)
initialVarObj = (down == cx->maybefp())
? down->varobj(cx)
: down->varobj(cx->containingSegment(down));
} else {
@ -1214,7 +1215,7 @@ ValueToId(JSContext *cx, const Value &v, jsid *idp)
JS_STATIC_INTERPRET JS_REQUIRES_STACK JSBool
js_EnterWith(JSContext *cx, jsint stackIndex)
{
JSStackFrame *fp = cx->fp;
JSStackFrame *fp = cx->fp();
Value *sp = cx->regs->sp;
JS_ASSERT(stackIndex < 0);
JS_ASSERT(fp->base() <= sp + stackIndex);
@ -1251,11 +1252,11 @@ js_LeaveWith(JSContext *cx)
{
JSObject *withobj;
withobj = cx->fp->getScopeChain();
withobj = cx->fp()->getScopeChain();
JS_ASSERT(withobj->getClass() == &js_WithClass);
JS_ASSERT(withobj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp));
JS_ASSERT(withobj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp()));
JS_ASSERT(OBJ_BLOCK_DEPTH(cx, withobj) >= 0);
cx->fp->setScopeChain(withobj->getParent());
cx->fp()->setScopeChain(withobj->getParent());
withobj->setPrivate(NULL);
}
@ -1266,7 +1267,7 @@ js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth)
clasp = obj->getClass();
if ((clasp == &js_WithClass || clasp == &js_BlockClass) &&
obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp) &&
obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp()) &&
OBJ_BLOCK_DEPTH(cx, obj) >= stackDepth) {
return clasp;
}
@ -1284,9 +1285,9 @@ js_UnwindScope(JSContext *cx, jsint stackDepth, JSBool normalUnwind)
Class *clasp;
JS_ASSERT(stackDepth >= 0);
JS_ASSERT(cx->fp->base() + stackDepth <= cx->regs->sp);
JS_ASSERT(cx->fp()->base() + stackDepth <= cx->regs->sp);
JSStackFrame *fp = cx->fp;
JSStackFrame *fp = cx->fp();
for (obj = fp->maybeBlockChain(); obj; obj = obj->getParent()) {
JS_ASSERT(obj->getClass() == &js_BlockClass);
if (OBJ_BLOCK_DEPTH(cx, obj) < stackDepth)
@ -1374,7 +1375,7 @@ js_TraceOpcode(JSContext *cx)
tracefp = (FILE *) cx->tracefp;
JS_ASSERT(tracefp);
fp = cx->fp;
fp = cx->fp();
regs = cx->regs;
/*
@ -2178,7 +2179,7 @@ Interpret(JSContext *cx)
JSRuntime *const rt = cx->runtime;
/* Set registerized frame pointer and derived script pointer. */
JSStackFrame *fp = cx->fp;
JSStackFrame *fp = cx->fp();
JSScript *script = fp->getScript();
JS_ASSERT(!script->isEmpty());
JS_ASSERT(script->length > 1);
@ -2239,7 +2240,7 @@ Interpret(JSContext *cx)
#define RESTORE_INTERP_VARS() \
JS_BEGIN_MACRO \
fp = cx->fp; \
fp = cx->fp(); \
script = fp->getScript(); \
atoms = FrameAtomBase(cx, fp); \
currentVersion = (JSVersion) script->version; \
@ -2658,7 +2659,7 @@ BEGIN_CASE(JSOP_STOP)
regs.sp[-1] = fp->getReturnValue();
/* Sync interpreter registers. */
fp = cx->fp;
fp = cx->fp();
script = fp->getScript();
atoms = FrameAtomBase(cx, fp);
@ -4639,11 +4640,12 @@ BEGIN_CASE(JSOP_APPLY)
stack.pushInlineFrame(cx, fp, regs.pc, newfp);
/* Initializer regs after pushInlineFrame snapshots pc. */
regs.fp = newfp;
regs.pc = newscript->code;
regs.sp = newsp;
/* Import into locals. */
JS_ASSERT(newfp == cx->fp);
JS_ASSERT(newfp == cx->fp());
fp = newfp;
script = newscript;
atoms = script->atomMap.vector;

Просмотреть файл

@ -51,6 +51,7 @@
#include "jsvalue.h"
typedef struct JSFrameRegs {
JSStackFrame *fp; /* active frame */
jsbytecode *pc; /* program counter */
js::Value *sp; /* stack pointer */
} JSFrameRegs;
@ -160,6 +161,7 @@ struct JSStackFrame
JSObject* getArgsObj() const {
JS_ASSERT(hasArgsObj());
JS_ASSERT(!isEvalFrame());
return argsobj;
}
@ -384,6 +386,7 @@ struct JSStackFrame
}
size_t numFormalArgs() const {
JS_ASSERT(!isEvalFrame());
return getFunction()->nargs;
}
@ -426,6 +429,7 @@ struct JSStackFrame
/* Argument count accessors */
size_t numActualArgs() const {
JS_ASSERT(!isEvalFrame());
return argc;
}
@ -499,6 +503,7 @@ struct JSStackFrame
}
bool isDummyFrame() const { return !!(flags & JSFRAME_DUMMY); }
bool isEvalFrame() const { return !!(flags & JSFRAME_EVAL); }
/* Contains static assertions for member alignment, don't call. */
inline void staticAsserts();

Просмотреть файл

@ -1094,7 +1094,7 @@ js_NewGenerator(JSContext *cx)
return NULL;
/* Load and compute stack slot counts. */
JSStackFrame *fp = cx->fp;
JSStackFrame *fp = cx->fp();
uintN argc = fp->numActualArgs();
uintN nargs = JS_MAX(argc, fp->numFormalArgs());
uintN vplen = 2 + nargs;
@ -1229,8 +1229,8 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
* Get a pointer to new frame/slots. This memory is not "claimed", so
* the code before pushExecuteFrame must not reenter the interpreter.
*/
ExecuteFrameGuard frame;
if (!cx->stack().getExecuteFrame(cx, cx->fp, vplen, nfixed, frame)) {
FrameGuard frame;
if (!cx->stack().getExecuteFrame(cx, cx->maybefp(), vplen, nfixed, frame)) {
gen->state = JSGEN_CLOSED;
return JS_FALSE;
}

Просмотреть файл

@ -76,6 +76,7 @@
#include "jsstr.h"
#include "jstracer.h"
#include "jsdbgapi.h"
#include "json.h"
#include "jsscopeinlines.h"
#include "jsscriptinlines.h"
@ -1129,6 +1130,28 @@ obj_eval(JSContext *cx, uintN argc, Value *vp)
JSString *str = argv[0].toString();
JSScript *script = NULL;
const jschar *chars;
size_t length;
str->getCharsAndLength(chars, length);
/*
* If the eval string starts with '(' and ends with ')', it may be JSON.
* Try the JSON parser first because it's much faster. If the eval string
* isn't JSON, JSON parsing will probably fail quickly, so little time
* will be lost.
*/
if (length > 2 && chars[0] == '(' && chars[length-1] == ')') {
JSONParser *jp = js_BeginJSONParse(cx, vp, /* suppressErrors = */true);
JSBool ok = jp != NULL;
if (ok) {
/* Run JSON-parser on string inside ( and ). */
ok = js_ConsumeJSONText(cx, jp, chars+1, length-2);
ok &= js_FinishJSONParse(cx, jp, NullValue());
if (ok)
return JS_TRUE;
}
}
/*
* Cache local eval scripts indexed by source qualified by scope.
@ -1216,7 +1239,7 @@ obj_eval(JSContext *cx, uintN argc, Value *vp)
script = Compiler::compileScript(cx, scopeobj, callerFrame,
principals,
TCF_COMPILE_N_GO | TCF_NEED_MUTABLE_SCRIPT,
str->chars(), str->length(),
chars, length,
NULL, file, line, str, staticLevel);
if (!script)
return JS_FALSE;
@ -2693,10 +2716,10 @@ Detecting(JSContext *cx, jsbytecode *pc)
JSOp op;
JSAtom *atom;
script = cx->fp->getScript();
script = cx->fp()->getScript();
endpc = script->code + script->length;
for (;; pc += js_CodeSpec[op].length) {
JS_ASSERT_IF(!cx->fp->hasIMacroPC(), script->code <= pc && pc < endpc);
JS_ASSERT_IF(!cx->fp()->hasIMacroPC(), script->code <= pc && pc < endpc);
/* General case: a branch or equality op follows the access. */
op = js_GetOpcode(cx, script, pc);
@ -2775,7 +2798,8 @@ js_InferFlags(JSContext *cx, uintN defaultFlags)
flags |= JSRESOLVE_ASSIGNING;
} else if (cs->length >= 0) {
pc += cs->length;
if (pc < cx->fp->getScript()->code + cx->fp->getScript()->length && Detecting(cx, pc))
JSScript *script = cx->fp()->getScript();
if (pc < script->code + script->length && Detecting(cx, pc))
flags |= JSRESOLVE_DETECTING;
}
if (format & JOF_DECLARING)
@ -2892,7 +2916,7 @@ js_NewWithObject(JSContext *cx, JSObject *proto, JSObject *parent, jsint depth)
if (!obj)
return NULL;
obj->init(&js_WithClass, proto, parent,
PrivateValue(js_FloatingFrameIfGenerator(cx, cx->fp)));
PrivateValue(js_FloatingFrameIfGenerator(cx, cx->fp())));
OBJ_SET_BLOCK_DEPTH(cx, obj, depth);
obj->map = cx->runtime->emptyWithScope->hold();
@ -2951,10 +2975,10 @@ js_PutBlockObject(JSContext *cx, JSBool normalUnwind)
/* Blocks have one fixed slot available for the first local.*/
JS_STATIC_ASSERT(JS_INITIAL_NSLOTS == JSSLOT_BLOCK_DEPTH + 2);
JSStackFrame *const fp = cx->fp;
JSStackFrame *const fp = cx->fp();
JSObject *obj = fp->getScopeChain();
JS_ASSERT(obj->getClass() == &js_BlockClass);
JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp));
JS_ASSERT(obj->getPrivate() == js_FloatingFrameIfGenerator(cx, cx->fp()));
JS_ASSERT(OBJ_IS_CLONED_BLOCK(obj));
/*
@ -3729,12 +3753,12 @@ js_FindClassObject(JSContext *cx, JSObject *start, JSProtoKey protoKey,
JSScopeProperty *sprop;
/*
* Find the global object. Use cx->fp directly to avoid falling off
* Find the global object. Use cx->fp() directly to avoid falling off
* trace; all JIT-elided stack frames have the same global object as
* cx->fp.
* cx->fp().
*/
VOUCH_DOES_NOT_REQUIRE_STACK();
if (!start && (fp = cx->fp) != NULL)
if (!start && (fp = cx->maybefp()) != NULL)
start = fp->maybeScopeChain();
if (start) {
@ -4775,7 +4799,7 @@ js_GetPropertyHelper(JSContext *cx, JSObject *obj, jsid id, uintN getHow,
op = (JSOp) *pc;
if (op == JSOP_TRAP) {
JS_ASSERT_NOT_ON_TRACE(cx);
op = JS_GetTrapOpcode(cx, cx->fp->getScript(), pc);
op = JS_GetTrapOpcode(cx, cx->fp()->getScript(), pc);
}
if (op == JSOP_GETXPROP) {
flags = JSREPORT_ERROR;
@ -5262,7 +5286,7 @@ js_DeleteProperty(JSContext *cx, JSObject *obj, jsid id, Value *rval)
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
if (fun != funobj) {
for (JSStackFrame *fp = cx->fp; fp; fp = fp->down) {
for (JSStackFrame *fp = cx->maybefp(); fp; fp = fp->down) {
if (fp->callee() == fun &&
fp->getThisValue().isObject() &&
&fp->getThisValue().toObject() == obj) {
@ -5559,8 +5583,8 @@ js_GetClassPrototype(JSContext *cx, JSObject *scope, JSProtoKey protoKey,
if (protoKey != JSProto_Null) {
if (!scope) {
if (cx->fp)
scope = cx->fp->maybeScopeChain();
if (cx->hasfp())
scope = cx->fp()->maybeScopeChain();
if (!scope) {
scope = cx->globalObject;
if (!scope) {
@ -6081,8 +6105,11 @@ JSObject::getCompartment(JSContext *cx)
return cx->runtime->defaultCompartment;
}
// Compile-time Function, Block, and RegExp objects are not parented.
if (clasp == &js_FunctionClass || clasp == &js_BlockClass || clasp == &js_RegExpClass) {
/*
* Script objects and compile-time Function, Block, RegExp objects
* are not parented.
*/
if (clasp == &js_FunctionClass || clasp == &js_BlockClass || clasp == &js_RegExpClass || clasp == &js_ScriptClass) {
// This is a bogus answer, but it'll do for now.
return cx->runtime->defaultCompartment;
}
@ -6347,7 +6374,7 @@ js_DumpStackFrame(JSContext *cx, JSStackFrame *start)
VOUCH_DOES_NOT_REQUIRE_STACK();
if (!start)
start = cx->fp;
start = cx->maybefp();
FrameRegsIter i(cx);
while (!i.done() && i.fp() != start)
++i;

Просмотреть файл

@ -420,6 +420,10 @@ struct JSObject {
JSObject *getGlobal() const;
bool isGlobal() const {
return !!(getClass()->flags & JSCLASS_IS_GLOBAL);
}
void *getPrivate() const {
JS_ASSERT(getClass()->flags & JSCLASS_HAS_PRIVATE);
void *priv = fslots[JSSLOT_PRIVATE].toPrivate();

Просмотреть файл

@ -547,7 +547,7 @@ NewNativeClassInstance(JSContext *cx, Class *clasp, JSObject *proto, JSObject *p
JS_ASSERT(proto->isNative());
JS_ASSERT(parent);
DTrace::ObjectCreationScope objectCreationScope(cx, cx->fp, clasp);
DTrace::ObjectCreationScope objectCreationScope(cx, cx->maybefp(), clasp);
/*
* Allocate an object from the GC heap and initialize all its fields before
@ -599,13 +599,13 @@ NewBuiltinClassInstance(JSContext *cx, Class *clasp)
/* NB: inline-expanded and specialized version of js_GetClassPrototype. */
JSObject *global;
if (!cx->fp) {
if (!cx->hasfp()) {
global = cx->globalObject;
OBJ_TO_INNER_OBJECT(cx, global);
if (!global)
return NULL;
} else {
global = cx->fp->getScopeChain()->getGlobal();
global = cx->fp()->getScopeChain()->getGlobal();
}
JS_ASSERT(global->getClass()->flags & JSCLASS_IS_GLOBAL);
@ -682,7 +682,7 @@ NewObject(JSContext *cx, js::Class *clasp, JSObject *proto, JSObject *parent)
}
DTrace::ObjectCreationScope objectCreationScope(cx, cx->fp, clasp);
DTrace::ObjectCreationScope objectCreationScope(cx, cx->maybefp(), clasp);
/*
* Allocate an object from the GC heap and initialize all its fields before

Просмотреть файл

@ -77,7 +77,7 @@ struct JSONParser
{
JSONParser(JSContext *cx)
: hexChar(), numHex(), statep(), stateStack(), rootVal(), objectStack(),
objectKey(cx), buffer(cx)
objectKey(cx), buffer(cx), suppressErrors(false)
{}
/* Used while handling \uNNNN in strings */
@ -90,6 +90,7 @@ struct JSONParser
JSObject *objectStack;
js::Vector<jschar, 8> objectKey;
js::Vector<jschar, 8> buffer;
bool suppressErrors;
};
#ifdef _MSC_VER
@ -655,6 +656,14 @@ Walk(JSContext *cx, jsid id, JSObject *holder, const Value &reviver, Value *vp)
return true;
}
static JSBool
JSONParseError(JSONParser *jp, JSContext *cx)
{
if (!jp->suppressErrors)
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
}
static bool
Revive(JSContext *cx, const Value &reviver, Value *vp)
{
@ -673,7 +682,7 @@ Revive(JSContext *cx, const Value &reviver, Value *vp)
}
JSONParser *
js_BeginJSONParse(JSContext *cx, Value *rootVal)
js_BeginJSONParse(JSContext *cx, Value *rootVal, bool suppressErrors /*= true*/)
{
if (!cx)
return NULL;
@ -693,6 +702,7 @@ js_BeginJSONParse(JSContext *cx, Value *rootVal)
jp->statep = jp->stateStack;
*jp->statep = JSON_PARSE_STATE_INIT;
jp->rootVal = rootVal;
jp->suppressErrors = suppressErrors;
return jp;
@ -729,18 +739,15 @@ js_FinishJSONParse(JSContext *cx, JSONParser *jp, const Value &reviver)
bool ok = *jp->statep == JSON_PARSE_STATE_FINISHED;
Value *vp = jp->rootVal;
cx->destroy(jp);
if (!early_ok)
return false;
if (!ok) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return false;
if (!early_ok) {
ok = false;
} else if (!ok) {
JSONParseError(jp, cx);
} else if (reviver.isObject() && reviver.toObject().isCallable()) {
ok = Revive(cx, reviver, vp);
}
if (reviver.isObject() && reviver.toObject().isCallable())
ok = Revive(cx, reviver, vp);
cx->destroy(jp);
return ok;
}
@ -750,15 +757,13 @@ PushState(JSContext *cx, JSONParser *jp, JSONParserState state)
{
if (*jp->statep == JSON_PARSE_STATE_FINISHED) {
// extra input
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
jp->statep++;
if ((uint32)(jp->statep - jp->stateStack) >= JS_ARRAY_LENGTH(jp->stateStack)) {
// too deep
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
*jp->statep = state;
@ -772,8 +777,7 @@ PopState(JSContext *cx, JSONParser *jp)
jp->statep--;
if (jp->statep < jp->stateStack) {
jp->statep = jp->stateStack;
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
if (*jp->statep == JSON_PARSE_STATE_INIT)
@ -811,10 +815,8 @@ PushObject(JSContext *cx, JSONParser *jp, JSObject *obj)
jsuint len;
if (!js_GetLengthProperty(cx, jp->objectStack, &len))
return JS_FALSE;
if (len >= JSON_MAX_DEPTH) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
}
if (len >= JSON_MAX_DEPTH)
return JSONParseError(jp, cx);
AutoObjectRooter tvr(cx, obj);
Value v = ObjectOrNullValue(obj);
@ -917,8 +919,7 @@ HandleNumber(JSContext *cx, JSONParser *jp, const jschar *buf, uint32 len)
return JS_FALSE;
if (ep != buf + len) {
// bad number input
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
return PushPrimitive(cx, jp, DoubleValue(val));
@ -941,8 +942,7 @@ HandleKeyword(JSContext *cx, JSONParser *jp, const jschar *buf, uint32 len)
TokenKind tt = js_CheckKeyword(buf, len);
if (tt != TOK_PRIMARY) {
// bad keyword
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
if (buf[0] == 'n') {
@ -952,8 +952,7 @@ HandleKeyword(JSContext *cx, JSONParser *jp, const jschar *buf, uint32 len)
} else if (buf[0] == 'f') {
keyword.setBoolean(false);
} else {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
return PushPrimitive(cx, jp, keyword);
@ -1006,10 +1005,8 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
if (!PopState(cx, jp))
return JS_FALSE;
if (*jp->statep != JSON_PARSE_STATE_ARRAY) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
}
if (*jp->statep != JSON_PARSE_STATE_ARRAY)
return JSONParseError(jp, cx);
if (!CloseArray(cx, jp) || !PopState(cx, jp))
return JS_FALSE;
@ -1019,8 +1016,7 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
if (c == '}') {
// we should only find these in OBJECT_KEY state
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
if (c == '"') {
@ -1053,8 +1049,7 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
if (!OpenArray(cx, jp) || !PushState(cx, jp, JSON_PARSE_STATE_VALUE))
return JS_FALSE;
} else if (!JS_ISXMLSPACE(c)) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
break;
@ -1066,12 +1061,11 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
if (!PushState(cx, jp, JSON_PARSE_STATE_OBJECT_PAIR))
return JS_FALSE;
} else if (c == ']' || !JS_ISXMLSPACE(c)) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
break;
case JSON_PARSE_STATE_ARRAY :
case JSON_PARSE_STATE_ARRAY:
if (c == ']') {
if (!CloseArray(cx, jp) || !PopState(cx, jp))
return JS_FALSE;
@ -1079,12 +1073,11 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
if (!PushState(cx, jp, JSON_PARSE_STATE_VALUE))
return JS_FALSE;
} else if (!JS_ISXMLSPACE(c)) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
break;
case JSON_PARSE_STATE_OBJECT_PAIR :
case JSON_PARSE_STATE_OBJECT_PAIR:
if (c == '"') {
// we want to be waiting for a : when the string has been read
*jp->statep = JSON_PARSE_STATE_OBJECT_IN_PAIR;
@ -1095,8 +1088,7 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
if (!CloseObject(cx, jp) || !PopState(cx, jp) || !PopState(cx, jp))
return JS_FALSE;
} else if (c == ']' || !JS_ISXMLSPACE(c)) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
break;
@ -1104,8 +1096,7 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
if (c == ':') {
*jp->statep = JSON_PARSE_STATE_VALUE;
} else if (!JS_ISXMLSPACE(c)) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
break;
@ -1126,8 +1117,7 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
} else if (c < 31) {
// The JSON lexical grammer does not allow a JSONStringCharacter to be
// any of the Unicode characters U+0000 thru U+001F (control characters).
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
} else {
if (!jp->buffer.append(c))
return JS_FALSE;
@ -1152,8 +1142,7 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
*jp->statep = JSON_PARSE_STATE_STRING_HEX;
continue;
} else {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
}
@ -1170,8 +1159,7 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
} else if (('A' <= c) && (c <= 'F')) {
jp->hexChar = (jp->hexChar << 4) | (c - 'A' + 0x0a);
} else {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
if (++(jp->numHex) == 4) {
@ -1215,8 +1203,7 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
case JSON_PARSE_STATE_FINISHED:
if (!JS_ISXMLSPACE(c)) {
// extra input
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_JSON_BAD_PARSE);
return JS_FALSE;
return JSONParseError(jp, cx);
}
break;

Просмотреть файл

@ -111,7 +111,7 @@ enum JSONDataType {
struct JSONParser;
extern JSONParser *
js_BeginJSONParse(JSContext *cx, js::Value *rootVal);
js_BeginJSONParse(JSContext *cx, js::Value *rootVal, bool suppressErrors = false);
extern JSBool
js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len);

Просмотреть файл

@ -279,7 +279,7 @@ js_Disassemble(JSContext *cx, JSScript *script, JSBool lines, FILE *fp)
JS_FRIEND_API(JSBool)
js_DumpPC(JSContext *cx)
{
return js_DisassembleAtPC(cx, cx->fp->getScript(), true, stdout, cx->regs->pc);
return js_DisassembleAtPC(cx, cx->fp()->getScript(), true, stdout, cx->regs->pc);
}
JSBool
@ -5155,7 +5155,8 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in,
jsbytecode* savepc = i.pc();
jsbytecode* savedIMacroPC = fp->maybeIMacroPC();
if (savedIMacroPC) {
if (fp == cx->fp)
JS_ASSERT(cx->hasfp());
if (fp == cx->fp())
cx->regs->pc = savedIMacroPC;
else
fp->savedPC = savedIMacroPC;
@ -5173,7 +5174,8 @@ js_DecompileValueGenerator(JSContext *cx, intN spindex, jsval v_in,
name = DecompileExpression(cx, script, fp->maybeFunction(), pc);
if (savedIMacroPC) {
if (fp == cx->fp)
JS_ASSERT(cx->hasfp());
if (fp == cx->fp())
cx->regs->pc = savedIMacroPC;
else
fp->savedPC = savepc;

Просмотреть файл

@ -61,8 +61,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
JS_ASSERT(this == &JS_PROPERTY_CACHE(cx));
JS_ASSERT(!cx->runtime->gcRunning);
/* FIXME bug 489098: consider enabling the property cache for eval. */
if (js_IsPropertyCacheDisabled(cx) || (cx->fp->flags & JSFRAME_EVAL)) {
if (js_IsPropertyCacheDisabled(cx)) {
PCMETER(disfills++);
return JS_NO_PROP_CACHE_FILL;
}
@ -128,7 +127,7 @@ PropertyCache::fill(JSContext *cx, JSObject *obj, uintN scopeIndex, uintN protoI
* opcode format flags.
*/
pc = cx->regs->pc;
op = js_GetOpcode(cx, cx->fp->getScript(), pc);
op = js_GetOpcode(cx, cx->fp()->getScript(), pc);
cs = &js_CodeSpec[op];
kshape = 0;
@ -317,7 +316,7 @@ GetAtomFromBytecode(JSContext *cx, jsbytecode *pc, JSOp op, const JSCodeSpec &cs
ptrdiff_t pcoff = (JOF_TYPE(cs.format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
JSAtom *atom;
GET_ATOM_FROM_BYTECODE(cx->fp->getScript(), pc, pcoff, atom);
GET_ATOM_FROM_BYTECODE(cx->fp()->getScript(), pc, pcoff, atom);
return atom;
}
@ -328,12 +327,13 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject
JSObject *obj, *pobj, *tmp;
uint32 vcap;
JS_ASSERT(this == &JS_PROPERTY_CACHE(cx));
JS_ASSERT(
uintN((cx->fp->hasIMacroPC() ? cx->fp->getIMacroPC() : pc) - cx->fp->getScript()->code)
< cx->fp->getScript()->length);
JSStackFrame *fp = cx->fp();
JSOp op = js_GetOpcode(cx, cx->fp->getScript(), pc);
JS_ASSERT(this == &JS_PROPERTY_CACHE(cx));
JS_ASSERT(uintN((fp->hasIMacroPC() ? fp->getIMacroPC() : pc) - fp->getScript()->code)
< fp->getScript()->length);
JSOp op = js_GetOpcode(cx, fp->getScript(), pc);
const JSCodeSpec &cs = js_CodeSpec[op];
obj = *objp;
@ -344,18 +344,19 @@ PropertyCache::fullTest(JSContext *cx, jsbytecode *pc, JSObject **objp, JSObject
JSAtom *atom = GetAtomFromBytecode(cx, pc, op, cs);
#ifdef DEBUG_notme
JSScript *script = cx->fp()->getScript();
fprintf(stderr,
"id miss for %s from %s:%u"
" (pc %u, kpc %u, kshape %u, shape %u)\n",
js_AtomToPrintableString(cx, atom),
cx->fp->script->filename,
js_PCToLineNumber(cx, cx->fp->script, pc),
pc - cx->fp->script->code,
entry->kpc - cx->fp->script->code,
script->filename,
js_PCToLineNumber(cx, script, pc),
pc - script->code,
entry->kpc - script->code,
entry->kshape,
obj->shape());
js_Disassemble1(cx, cx->fp->script, pc,
pc - cx->fp->script->code,
js_Disassemble1(cx, script, pc,
pc - script->code,
JS_FALSE, stderr);
#endif

Просмотреть файл

@ -183,10 +183,10 @@ TraceRecorder::downSnapshot(FrameInfo* downFrame)
JS_ASSERT(unsigned(exit->calldepth) == callDepth);
exit->numGlobalSlots = ngslots;
exit->numStackSlots = downPostSlots + 1;
exit->numStackSlotsBelowCurrentFrame = cx->fp->down->argv ?
nativeStackOffset(&cx->fp->argv[-2]) / sizeof(double) : 0;
exit->numStackSlotsBelowCurrentFrame = cx->fp()->down->argv ?
nativeStackOffset(&cx->fp()->argv[-2]) / sizeof(double) : 0;
exit->exitType = UNSTABLE_LOOP_EXIT;
exit->block = cx->fp->down->maybeBlockChain();
exit->block = cx->fp()->down->maybeBlockChain();
exit->pc = downFrame->pc + JSOP_CALL_LENGTH;
exit->imacpc = NULL;
exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - tree->nativeStackBase;
@ -205,16 +205,16 @@ DownFrameSP(JSContext *cx)
{
FrameRegsIter i(cx);
++i;
JS_ASSERT(i.fp() == cx->fp->down);
JS_ASSERT(i.fp() == cx->fp()->down);
return i.sp();
}
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::upRecursion()
{
JS_ASSERT((JSOp)*cx->fp->down->savedPC == JSOP_CALL);
JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, cx->fp->down->getScript(),
cx->fp->down->savedPC)].length == JSOP_CALL_LENGTH);
JS_ASSERT((JSOp)*cx->fp()->down->savedPC == JSOP_CALL);
JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, cx->fp()->down->getScript(),
cx->fp()->down->savedPC)].length == JSOP_CALL_LENGTH);
JS_ASSERT(callDepth == 0);
@ -226,10 +226,10 @@ TraceRecorder::upRecursion()
if (anchor && (anchor->exitType == RECURSIVE_EMPTY_RP_EXIT ||
anchor->exitType == RECURSIVE_SLURP_MISMATCH_EXIT ||
anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT)) {
return slurpDownFrames(cx->fp->down->savedPC);
return slurpDownFrames(cx->fp()->down->savedPC);
}
jsbytecode* return_pc = cx->fp->down->savedPC;
jsbytecode* return_pc = cx->fp()->down->savedPC;
jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH;
/*
@ -256,11 +256,11 @@ TraceRecorder::upRecursion()
* Need to compute this from the down frame, since the stack could have
* moved on this one.
*/
fi->spdist = DownFrameSP(cx) - cx->fp->down->slots();
JS_ASSERT(cx->fp->numActualArgs() == cx->fp->down->numActualArgs());
fi->set_argc(uint16(cx->fp->numActualArgs()), false);
fi->spdist = DownFrameSP(cx) - cx->fp()->down->slots();
JS_ASSERT(cx->fp()->numActualArgs() == cx->fp()->down->numActualArgs());
fi->set_argc(uint16(cx->fp()->numActualArgs()), false);
fi->callerHeight = downPostSlots;
fi->callerArgc = cx->fp->down->numActualArgs();
fi->callerArgc = cx->fp()->down->numActualArgs();
if (anchor && anchor->exitType == RECURSIVE_MISMATCH_EXIT) {
/*
@ -390,7 +390,7 @@ JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
{
/* Missing - no go */
if (cx->fp->numActualArgs() != cx->fp->numFormalArgs())
if (cx->fp()->numActualArgs() != cx->fp()->numFormalArgs())
RETURN_STOP_A("argc != nargs");
LIns* argv_ins;
@ -398,8 +398,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
unsigned downPostSlots;
FrameRegsIter i(cx);
LIns* fp_ins =
addName(lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp), ACCSET_OTHER), "fp");
LIns* fp_ins = addName(entryFrameIns(), "fp");
/*
* When first emitting slurp code, do so against the down frame. After
@ -434,7 +433,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
addName(lir->insLoad(LIR_ldp, fp_ins,
JSStackFrame::offsetScript(), ACCSET_OTHER),
"script"),
INS_CONSTPTR(cx->fp->down->getScript())),
INS_CONSTPTR(cx->fp()->down->getScript())),
RECURSIVE_LOOP_EXIT);
}
@ -453,7 +452,7 @@ TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
addName(lir->insLoad(LIR_ldi, fp_ins, JSStackFrame::offsetNumActualArgs(),
ACCSET_OTHER),
"argc"),
INS_CONST(cx->fp->numActualArgs())),
INS_CONST(cx->fp()->numActualArgs())),
MISMATCH_EXIT);
/* Pop the interpreter frame. */
@ -672,7 +671,7 @@ public:
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::downRecursion()
{
JSStackFrame* fp = cx->fp;
JSStackFrame* fp = cx->fp();
JSScript *script = fp->getScript();
if ((jsbytecode*)fragment->ip < script->code ||
(jsbytecode*)fragment->ip >= script->code + script->length) {

Просмотреть файл

@ -2340,7 +2340,7 @@ ASTSerializer::literal(JSParseNode *pn, Value *dst)
LOCAL_ASSERT(re1 && re1->isRegExp());
JSObject *proto;
if (!js_GetClassPrototype(cx, cx->fp->getScopeChain(), JSProto_RegExp, &proto))
if (!js_GetClassPrototype(cx, cx->fp()->getScopeChain(), JSProto_RegExp, &proto))
return false;
JSObject *re2 = js_CloneRegExpObject(cx, re1, proto);

Просмотреть файл

@ -1277,6 +1277,33 @@ js_TraceScript(JSTracer *trc, JSScript *script)
js_MarkScriptFilename(script->filename);
}
JSBool
js_NewScriptObject(JSContext *cx, JSScript *script)
{
AutoScriptRooter root(cx, script);
JS_ASSERT(!script->u.object);
JS_ASSERT(script != JSScript::emptyScript());
JSObject *obj = NewNonFunction<WithProto::Class>(cx, &js_ScriptClass, NULL, NULL);
if (!obj)
return JS_FALSE;
obj->setPrivate(script);
script->u.object = obj;
/*
* Clear the object's proto, to avoid entraining stuff. Once we no longer use the parent
* for security checks, then we can clear the parent, too.
*/
obj->clearProto();
#ifdef CHECK_SCRIPT_OWNER
script->owner = NULL;
#endif
return JS_TRUE;
}
typedef struct GSNCacheEntry {
JSDHashEntryHdr hdr;
jsbytecode *pc;

Просмотреть файл

@ -184,7 +184,23 @@ struct JSScript {
uint16 staticLevel;/* static level for display maintenance */
JSPrincipals *principals;/* principals for this script */
union {
JSObject *object; /* optional Script-class object wrapper */
/*
* A script object of class js_ScriptClass, to ensure the script is GC'd.
* - All scripts returned by JSAPI functions (JS_CompileScript,
* JS_CompileFile, etc.) have these objects.
* - Function scripts never have script objects; such scripts are owned
* by their function objects.
* - Temporary scripts created by obj_eval, JS_EvaluateScript, and
* similar functions never have these objects; such scripts are
* explicitly destroyed by the code that created them.
* Debugging API functions (JSDebugHooks::newScriptHook;
* JS_GetFunctionScript) may reveal sans-script-object Function and
* temporary scripts to clients, but clients must never call
* JS_NewScriptObject on such scripts: doing so would double-free them,
* once from the explicit call to js_DestroyScript, and once when the
* script object is garbage collected.
*/
JSObject *object;
JSScript *nextToGC; /* next to GC in rt->scriptsToGC list */
} u;
#ifdef CHECK_SCRIPT_OWNER
@ -375,6 +391,9 @@ js_DestroyScript(JSContext *cx, JSScript *script);
extern void
js_TraceScript(JSTracer *trc, JSScript *script);
extern JSBool
js_NewScriptObject(JSContext *cx, JSScript *script);
/*
* To perturb as little code as possible, we introduce a js_GetSrcNote lookup
* cache without adding an explicit cx parameter. Thus js_GetSrcNote becomes

Просмотреть файл

@ -1631,119 +1631,153 @@ str_trimRight(JSContext *cx, uintN argc, Value *vp)
* Perl-inspired string functions.
*/
/* Result of a successfully performed flat match. */
class FlatMatch
{
JSString *patstr;
const jschar *pat;
size_t patlen;
int32 match_;
friend class RegExpGuard;
public:
JSString *pattern() const { return patstr; }
size_t patternLength() const { return patlen; }
/*
* @note The match is -1 when the match is performed successfully,
* but no match is found.
*/
int32 match() const { return match_; }
};
/* A regexp and optional associated object. */
class RegExpPair
{
JSObject *reobj_;
RegExp *re_;
explicit RegExpPair(RegExp *re): re_(re) {}
friend class RegExpGuard;
public:
/* @note May be null. */
JSObject *reobj() const { return reobj_; }
RegExp &re() const { JS_ASSERT(re_); return *re_; }
};
/*
* RegExpGuard factors logic out of String regexp operations. After each
* operation completes, RegExpGuard data members become available, according to
* the comments below.
* RegExpGuard factors logic out of String regexp operations.
*
* Notes on parameters to RegExpGuard member functions:
* - 'optarg' indicates in which argument position RegExp flags will be found,
* if present. This is a Mozilla extension and not part of any ECMA spec.
* - 'flat' indicates that the given pattern string will not be interpreted as
* a regular expression, hence regexp meta-characters are ignored.
* @param optarg Indicates in which argument position RegExp
* flags will be found, if present. This is a Mozilla
* extension and not part of any ECMA spec.
*/
class RegExpGuard
{
RegExpGuard(const RegExpGuard &);
void operator=(const RegExpGuard &);
JSContext *mCx;
JSObject *mReobj;
js::RegExp *mRe;
public:
RegExpGuard(JSContext *cx) : mCx(cx), mRe(NULL) {}
~RegExpGuard() {
if (mRe)
mRe->decref(mCx);
}
JSContext* cx() const { return mCx; }
/* init must succeed in order to call tryFlatMatch or normalizeRegExp. */
bool
init(uintN argc, Value *vp)
{
if (argc != 0 && VALUE_IS_REGEXP(mCx, vp[2])) {
mReobj = &vp[2].toObject();
mRe = static_cast<js::RegExp *>(mReobj->getPrivate());
mRe->incref(mCx);
} else {
patstr = ArgToRootedString(mCx, argc, vp, 0);
if (!patstr)
return false;
}
return true;
}
JSContext *cx;
RegExpPair rep;
FlatMatch fm;
/*
* Upper bound on the number of characters we are willing to potentially
* waste on searching for RegExp meta-characters.
*/
static const size_t sMaxFlatPatLen = 256;
static const size_t MAX_FLAT_PAT_LEN = 256;
public:
explicit RegExpGuard(JSContext *cx) : cx(cx), rep(NULL) {}
~RegExpGuard() {
if (rep.re_)
rep.re_->decref(cx);
}
/* init must succeed in order to call tryFlatMatch or normalizeRegExp. */
bool
init(uintN argc, Value *vp)
{
if (argc != 0 && VALUE_IS_REGEXP(cx, vp[2])) {
rep.reobj_ = &vp[2].toObject();
rep.re_ = RegExp::extractFrom(rep.reobj_);
rep.re_->incref(cx);
} else {
fm.patstr = ArgToRootedString(cx, argc, vp, 0);
if (!fm.patstr)
return false;
}
return true;
}
/*
* Attempt to match |patstr| with |textstr|. Return false if flat matching
* could not be used.
* Attempt to match |patstr| to |textstr|. A flags argument, metachars in the
* pattern string, or a lengthy pattern string can thwart this process.
*
* @param checkMetaChars Look for regexp metachars in the pattern string.
* @return Whether flat matching could be used.
*/
bool
tryFlatMatch(JSString *textstr, bool flat, uintN optarg, uintN argc)
const FlatMatch *
tryFlatMatch(JSString *textstr, uintN optarg, uintN argc, bool checkMetaChars = true)
{
if (mRe)
return false;
patstr->getCharsAndLength(pat, patlen);
if (optarg < argc ||
(!flat &&
(patlen > sMaxFlatPatLen || RegExp::hasMetaChars(pat, patlen)))) {
return false;
if (rep.re_)
return NULL;
fm.patstr->getCharsAndLength(fm.pat, fm.patlen);
if (optarg < argc)
return NULL;
if (checkMetaChars &&
(fm.patlen > MAX_FLAT_PAT_LEN || RegExp::hasMetaChars(fm.pat, fm.patlen))) {
return NULL;
}
/*
* textstr could be a rope, so we want to avoid flattening it for as
* long as possible.
*/
if (textstr->isTopNode()) {
match = RopeMatch(textstr, pat, patlen);
fm.match_ = RopeMatch(textstr, fm.pat, fm.patlen);
} else {
const jschar *text;
size_t textlen;
textstr->getCharsAndLength(text, textlen);
match = StringMatch(text, textlen, pat, patlen);
fm.match_ = StringMatch(text, textlen, fm.pat, fm.patlen);
}
return true;
return &fm;
}
/* Data available on successful return from |tryFlatMatch|. */
JSString *patstr;
const jschar *pat;
size_t patlen;
jsint match;
/* If the pattern is not already a regular expression, make it so. */
bool
const RegExpPair *
normalizeRegExp(bool flat, uintN optarg, uintN argc, Value *vp)
{
/* If we don't have a RegExp, build RegExp from pattern string. */
if (mRe)
return true;
if (rep.re_)
return &rep;
JSString *opt;
if (optarg < argc) {
opt = js_ValueToString(mCx, vp[2 + optarg]);
opt = js_ValueToString(cx, vp[2 + optarg]);
if (!opt)
return false;
return NULL;
} else {
opt = NULL;
}
mRe = RegExp::createFlagged(mCx, patstr, opt);
if (!mRe)
return false;
mReobj = NULL;
return true;
rep.re_ = RegExp::createFlagged(cx, fm.patstr, opt);
if (!rep.re_)
return NULL;
rep.reobj_ = NULL;
return &rep;
}
/* Data available on successful return from |normalizeRegExp|. */
JSObject *reobj() const { return mReobj; } /* nullable */
js::RegExp *re() const { return mRe; } /* non-null */
#if DEBUG
bool hasRegExpPair() const { return rep.re_; }
#endif
};
/* js_ExecuteRegExp indicates success in two ways, based on the 'test' flag. */
@ -1771,15 +1805,15 @@ enum MatchControlFlags {
/* Factor out looping and matching logic. */
static bool
DoMatch(JSContext *cx, Value *vp, JSString *str, const RegExpGuard &g,
DoMatch(JSContext *cx, Value *vp, JSString *str, const RegExpPair &rep,
DoMatchCallback callback, void *data, MatchControlFlags flags)
{
RegExp &re = *g.re();
RegExp &re = rep.re();
if (re.global()) {
/* global matching ('g') */
bool testGlobal = flags & TEST_GLOBAL_BIT;
if (g.reobj())
g.reobj()->zeroRegExpLastIndex();
if (rep.reobj())
rep.reobj()->zeroRegExpLastIndex();
for (size_t count = 0, i = 0, length = str->length(); i <= length; ++count) {
if (!re.execute(cx, str, &i, testGlobal, vp))
return false;
@ -1804,10 +1838,9 @@ DoMatch(JSContext *cx, Value *vp, JSString *str, const RegExpGuard &g,
}
static bool
BuildFlatMatchArray(JSContext *cx, JSString *textstr, const RegExpGuard &g,
Value *vp)
BuildFlatMatchArray(JSContext *cx, JSString *textstr, const FlatMatch &fm, Value *vp)
{
if (g.match < 0) {
if (fm.match() < 0) {
vp->setNull();
return true;
}
@ -1818,9 +1851,9 @@ BuildFlatMatchArray(JSContext *cx, JSString *textstr, const RegExpGuard &g,
return false;
vp->setObject(*obj);
return obj->defineProperty(cx, INT_TO_JSID(0), StringValue(g.patstr)) &&
return obj->defineProperty(cx, INT_TO_JSID(0), StringValue(fm.pattern())) &&
obj->defineProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.indexAtom),
Int32Value(g.match)) &&
Int32Value(fm.match())) &&
obj->defineProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.inputAtom),
StringValue(textstr));
}
@ -1860,18 +1893,20 @@ str_match(JSContext *cx, uintN argc, Value *vp)
RegExpGuard g(cx);
if (!g.init(argc, vp))
return false;
if (g.tryFlatMatch(str, false, 1, argc))
return BuildFlatMatchArray(cx, str, g, vp);
if (!g.normalizeRegExp(false, 1, argc, vp))
if (const FlatMatch *fm = g.tryFlatMatch(str, 1, argc))
return BuildFlatMatchArray(cx, str, *fm, vp);
const RegExpPair *rep = g.normalizeRegExp(false, 1, argc, vp);
if (!rep)
return false;
AutoObjectRooter array(cx);
MatchArgType arg = array.addr();
if (!DoMatch(cx, vp, str, g, MatchCallback, arg, MATCH_ARGS))
if (!DoMatch(cx, vp, str, *rep, MatchCallback, arg, MATCH_ARGS))
return false;
/* When not global, DoMatch will leave |RegExp.exec()| in *vp. */
if (g.re()->global())
if (rep->re().global())
vp->setObjectOrNull(array.object());
return true;
}
@ -1885,15 +1920,16 @@ str_search(JSContext *cx, uintN argc, Value *vp)
RegExpGuard g(cx);
if (!g.init(argc, vp))
return false;
if (g.tryFlatMatch(str, false, 1, argc)) {
vp->setInt32(g.match);
if (const FlatMatch *fm = g.tryFlatMatch(str, 1, argc)) {
vp->setInt32(fm->match());
return true;
}
if (!g.normalizeRegExp(false, 1, argc, vp))
const RegExpPair *rep = g.normalizeRegExp(false, 1, argc, vp);
if (!rep)
return false;
size_t i = 0;
if (!g.re()->execute(cx, str, &i, true, vp))
if (!rep->re().execute(cx, str, &i, true, vp))
return false;
if (vp->isTrue())
@ -2056,8 +2092,8 @@ FindReplaceLength(JSContext *cx, ReplaceData &rdata, size_t *sizep)
}
/* Push match index and input string. */
sp++->setInt32(statics.get(0, 0));
sp++->setString(rdata.str);
sp[0].setInt32(statics.get(0, 0));
sp[1].setString(rdata.str);
if (!Invoke(cx, rdata.args, 0))
return false;
@ -2151,16 +2187,11 @@ ReplaceCallback(JSContext *cx, size_t count, void *p)
static bool
BuildFlatReplacement(JSContext *cx, JSString *textstr, JSString *repstr,
const RegExpGuard &g, Value *vp)
const FlatMatch &fm, Value *vp)
{
if (g.match == -1) {
vp->setString(textstr);
return true;
}
JSRopeBuilder builder(cx);
size_t match = g.match; /* Avoid signed/unsigned warnings. */
size_t matchEnd = match + g.patlen;
size_t match = fm.match(); /* Avoid signed/unsigned warnings. */
size_t matchEnd = match + fm.patternLength();
if (textstr->isTopNode()) {
/*
@ -2186,8 +2217,8 @@ BuildFlatReplacement(JSContext *cx, JSString *textstr, JSString *repstr,
*/
JSString *leftSide = js_NewDependentString(cx, str, 0, match - pos);
if (!leftSide ||
!builder.append(cx, leftSide) ||
!builder.append(cx, repstr)) {
!builder.append(leftSide) ||
!builder.append(repstr)) {
return false;
}
}
@ -2199,11 +2230,11 @@ BuildFlatReplacement(JSContext *cx, JSString *textstr, JSString *repstr,
if (strEnd > matchEnd) {
JSString *rightSide = js_NewDependentString(cx, str, matchEnd - pos,
strEnd - matchEnd);
if (!rightSide || !builder.append(cx, rightSide))
if (!rightSide || !builder.append(rightSide))
return false;
}
} else {
if (!builder.append(cx, str))
if (!builder.append(str))
return false;
}
pos += str->length();
@ -2212,12 +2243,12 @@ BuildFlatReplacement(JSContext *cx, JSString *textstr, JSString *repstr,
JSString *leftSide = js_NewDependentString(cx, textstr, 0, match);
if (!leftSide)
return false;
JSString *rightSide = js_NewDependentString(cx, textstr, match + g.patlen,
textstr->length() - match - g.patlen);
JSString *rightSide = js_NewDependentString(cx, textstr, match + fm.patternLength(),
textstr->length() - match - fm.patternLength());
if (!rightSide ||
!builder.append(cx, leftSide) ||
!builder.append(cx, repstr) ||
!builder.append(cx, rightSide)) {
!builder.append(leftSide) ||
!builder.append(repstr) ||
!builder.append(rightSide)) {
return false;
}
}
@ -2226,6 +2257,172 @@ BuildFlatReplacement(JSContext *cx, JSString *textstr, JSString *repstr,
return true;
}
/*
* Perform a linear-scan dollar substitution on the replacement text,
* constructing a result string that looks like:
*
* newstring = string[:matchStart] + dollarSub(replaceValue) + string[matchLimit:]
*/
static inline bool
BuildDollarReplacement(JSContext *cx, JSString *textstr, JSString *repstr,
const jschar *firstDollar, const FlatMatch &fm, Value *vp)
{
JS_ASSERT(repstr->chars() <= firstDollar && firstDollar < repstr->chars() + repstr->length());
size_t matchStart = fm.match();
size_t matchLimit = matchStart + fm.patternLength();
JSCharBuffer newReplaceChars(cx);
/*
* Most probably:
*
* len(newstr) >= len(orig) - len(match) + len(replacement)
*
* Note that dollar vars _could_ make the resulting text smaller than this.
*/
if (!newReplaceChars.reserve(textstr->length() - fm.patternLength() + repstr->length()))
return false;
/* Move the pre-dollar chunk in bulk. */
JS_ALWAYS_TRUE(newReplaceChars.append(repstr->chars(), firstDollar));
/* Move the rest char-by-char, interpreting dollars as we encounter them. */
#define ENSURE(__cond) if (!(__cond)) return false;
const jschar *repstrLimit = repstr->chars() + repstr->length();
for (const jschar *it = firstDollar; it < repstrLimit; ++it) {
if (*it != '$' || it == repstrLimit - 1) {
ENSURE(newReplaceChars.append(*it));
continue;
}
switch (*(it + 1)) {
case '$': /* Eat one of the dollars. */
ENSURE(newReplaceChars.append(*it));
break;
case '&':
ENSURE(newReplaceChars.append(textstr->chars() + matchStart,
textstr->chars() + matchLimit));
break;
case '`':
ENSURE(newReplaceChars.append(textstr->chars(), textstr->chars() + matchStart));
break;
case '\'':
ENSURE(newReplaceChars.append(textstr->chars() + matchLimit,
textstr->chars() + textstr->length()));
break;
default: /* The dollar we saw was not special (no matter what its mother told it). */
ENSURE(newReplaceChars.append(*it));
continue;
}
++it; /* We always eat an extra char in the above switch. */
}
JSString *leftSide = js_NewDependentString(cx, textstr, 0, matchStart);
ENSURE(leftSide);
JSString *newReplace = js_NewStringFromCharBuffer(cx, newReplaceChars);
ENSURE(newReplace);
JS_ASSERT(textstr->length() >= matchLimit);
JSString *rightSide = js_NewDependentString(cx, textstr, matchLimit,
textstr->length() - matchLimit);
ENSURE(rightSide);
JSRopeBuilder builder(cx);
ENSURE(builder.append(leftSide) &&
builder.append(newReplace) &&
builder.append(rightSide));
#undef ENSURE
vp->setString(builder.getStr());
return true;
}
static inline bool
str_replace_regexp(JSContext *cx, uintN argc, Value *vp, ReplaceData &rdata)
{
const RegExpPair *rep = rdata.g.normalizeRegExp(true, 2, argc, vp);
if (!rep)
return false;
rdata.index = 0;
rdata.leftIndex = 0;
rdata.calledBack = false;
if (!DoMatch(cx, vp, rdata.str, *rep, ReplaceCallback, &rdata, REPLACE_ARGS))
return false;
if (!rdata.calledBack) {
/* Didn't match, so the string is unmodified. */
vp->setString(rdata.str);
return true;
}
JSSubString sub;
cx->regExpStatics.getRightContext(&sub);
if (!rdata.cb.append(sub.chars, sub.length))
return false;
JSString *retstr = js_NewStringFromCharBuffer(cx, rdata.cb);
if (!retstr)
return false;
vp->setString(retstr);
return true;
}
static inline bool
str_replace_flat_lambda(JSContext *cx, uintN argc, Value *vp, ReplaceData &rdata,
const FlatMatch &fm)
{
JS_ASSERT(fm.match() >= 0);
LeaveTrace(cx);
JSString *matchStr = js_NewDependentString(cx, rdata.str, fm.match(), fm.patternLength());
if (!matchStr)
return false;
/* lambda(matchStr, matchStart, textstr) */
static const uint32 lambdaArgc = 3;
if (!cx->stack().pushInvokeArgs(cx, lambdaArgc, rdata.args))
return false;
CallArgs &args = rdata.args;
args.callee().setObject(*rdata.lambda);
args.thisv().setObjectOrNull(rdata.lambda->getParent());
Value *sp = args.argv();
sp[0].setString(matchStr);
sp[1].setInt32(fm.match());
sp[2].setString(rdata.str);
if (!Invoke(cx, rdata.args, 0))
return false;
JSString *repstr = js_ValueToString(cx, args.rval());
if (!repstr)
return false;
JSString *leftSide = js_NewDependentString(cx, rdata.str, 0, fm.match());
if (!leftSide)
return false;
size_t matchLimit = fm.match() + fm.patternLength();
JSString *rightSide = js_NewDependentString(cx, rdata.str, matchLimit,
rdata.str->length() - matchLimit);
if (!rightSide)
return false;
JSRopeBuilder builder(cx);
if (!(builder.append(leftSide) &&
builder.append(repstr) &&
builder.append(rightSide))) {
return false;
}
vp->setString(builder.getStr());
return true;
}
JSBool
js::str_replace(JSContext *cx, uintN argc, Value *vp)
{
@ -2253,37 +2450,39 @@ js::str_replace(JSContext *cx, uintN argc, Value *vp)
if (!rdata.g.init(argc, vp))
return false;
if (!rdata.dollar && !rdata.lambda &&
rdata.g.tryFlatMatch(rdata.str, true, 2, argc)) {
return BuildFlatReplacement(cx, rdata.str, rdata.repstr, rdata.g, vp);
/*
* Unlike its |String.prototype| brethren, |replace| doesn't convert
* its input to a regular expression. (Even if it contains metachars.)
*
* However, if the user invokes our (non-standard) |flags| argument
* extension then we revert to creating a regular expression. Note that
* this is observable behavior through the side-effect mutation of the
* |RegExp| statics.
*/
const FlatMatch *fm = rdata.g.tryFlatMatch(rdata.str, 2, argc, false);
if (!fm) {
JS_ASSERT_IF(!rdata.g.hasRegExpPair(), argc > 2);
return str_replace_regexp(cx, argc, vp, rdata);
}
if (!rdata.g.normalizeRegExp(true, 2, argc, vp))
return false;
rdata.index = 0;
rdata.leftIndex = 0;
rdata.calledBack = false;
if (!DoMatch(cx, vp, rdata.str, rdata.g, ReplaceCallback, &rdata, REPLACE_ARGS))
return false;
if (!rdata.calledBack) {
/* Didn't match, so the string is unmodified. */
if (fm->match() < 0) {
vp->setString(rdata.str);
return true;
}
JSSubString sub;
cx->regExpStatics.getRightContext(&sub);
if (!rdata.cb.append(sub.chars, sub.length))
return false;
if (rdata.lambda)
return str_replace_flat_lambda(cx, argc, vp, rdata, *fm);
JSString *retstr = js_NewStringFromCharBuffer(cx, rdata.cb);
if (!retstr)
return false;
/*
* Note: we could optimize the text.length == pattern.length case if we wanted,
* even in the presence of dollar metachars.
*/
if (rdata.dollar)
return BuildDollarReplacement(cx, rdata.str, rdata.repstr, rdata.dollar, *fm, vp);
vp->setString(retstr);
return true;
return BuildFlatReplacement(cx, rdata.str, rdata.repstr, *fm, vp);
}
/*

Просмотреть файл

@ -678,17 +678,15 @@ class JSRopeLeafIterator {
};
class JSRopeBuilder {
private:
JSString *mStr;
JSContext * const cx;
JSString *mStr;
public:
JSRopeBuilder(JSContext *cx);
inline bool append(JSContext *cx, JSString *str) {
inline bool append(JSString *str) {
mStr = js_ConcatStrings(cx, mStr, str);
if (!mStr)
return false;
return true;
return !!mStr;
}
inline JSString *getStr() {

Просмотреть файл

@ -76,8 +76,7 @@ JSString::intString(jsint i)
}
inline
JSRopeBuilder::JSRopeBuilder(JSContext *cx) {
mStr = cx->runtime->emptyString;
}
JSRopeBuilder::JSRopeBuilder(JSContext *cx)
: cx(cx), mStr(cx->runtime->emptyString) {}
#endif /* jsstrinlines_h___ */

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -1136,6 +1136,7 @@ class TraceRecorder
JS_REQUIRES_STACK nanojit::LIns* scopeChain();
JS_REQUIRES_STACK nanojit::LIns* entryScopeChain() const;
JS_REQUIRES_STACK nanojit::LIns* entryFrameIns() const;
JS_REQUIRES_STACK JSStackFrame* frameIfInRange(JSObject* obj, unsigned* depthp = NULL) const;
JS_REQUIRES_STACK RecordingStatus traverseScopeChain(JSObject *obj, nanojit::LIns *obj_ins, JSObject *obj2, nanojit::LIns *&obj2_ins);
JS_REQUIRES_STACK AbortableRecordingStatus scopeChainProp(JSObject* obj, Value*& vp, nanojit::LIns*& ins, NameResult& nr);

Просмотреть файл

@ -381,7 +381,8 @@ JSCompartment::wrap(JSContext *cx, Value *vp)
* we parent all wrappers to the global object in their home compartment.
* This loses us some transparency, and is generally very cheesy.
*/
JSObject *global = cx->fp ? cx->fp->getScopeChain()->getGlobal() : cx->globalObject;
JSObject *global =
cx->hasfp() ? cx->fp()->getScopeChain()->getGlobal() : cx->globalObject;
wrapper->setParent(global);
return true;
}
@ -474,31 +475,6 @@ JSCompartment::sweep(JSContext *cx)
}
}
static bool
SetupFakeFrame(JSContext *cx, ExecuteFrameGuard &frame, JSFrameRegs &regs, JSObject *obj)
{
const uintN vplen = 2;
const uintN nfixed = 0;
if (!cx->stack().getExecuteFrame(cx, js_GetTopStackFrame(cx), vplen, nfixed, frame))
return false;
Value *vp = frame.getvp();
vp[0].setUndefined();
vp[1].setNull(); // satisfy LeaveTree assert
JSStackFrame *fp = frame.getFrame();
PodZero(fp); // fp->fun and fp->script are both NULL
fp->argv = vp + 2;
fp->setScopeChain(obj->getGlobal());
fp->flags = JSFRAME_DUMMY;
regs.pc = NULL;
regs.sp = fp->slots();
cx->stack().pushExecuteFrame(cx, frame, regs, NULL);
return true;
}
AutoCompartment::AutoCompartment(JSContext *cx, JSObject *target)
: context(cx),
origin(cx->compartment),
@ -521,9 +497,11 @@ AutoCompartment::enter()
{
JS_ASSERT(!entered);
if (origin != destination) {
LeaveTrace(context);
context->compartment = destination;
JSObject *scopeChain = target->getGlobal();
frame.construct();
if (!SetupFakeFrame(context, frame.ref(), regs, target)) {
if (!context->stack().pushDummyFrame(context, frame.ref(), regs, scopeChain)) {
frame.destroy();
context->compartment = origin;
return false;

Просмотреть файл

@ -152,7 +152,7 @@ class AutoCompartment
JSObject * const target;
JSCompartment * const destination;
private:
LazilyConstructed<ExecuteFrameGuard> frame;
LazilyConstructed<FrameGuard> frame;
JSFrameRegs regs;
RegExpStatics statics;
AutoStringRooter input;

Просмотреть файл

@ -671,8 +671,17 @@ JS_XDRScript(JSXDRState *xdr, JSScript **scriptp)
{
if (!js_XDRScript(xdr, scriptp, true, NULL))
return JS_FALSE;
if (xdr->mode == JSXDR_DECODE)
if (xdr->mode == JSXDR_DECODE) {
js_CallNewScriptHook(xdr->cx, *scriptp, NULL);
if (*scriptp != JSScript::emptyScript() &&
!js_NewScriptObject(xdr->cx, *scriptp)) {
js_DestroyScript(xdr->cx, *scriptp);
*scriptp = NULL;
return JS_FALSE;
}
}
return JS_TRUE;
}

Просмотреть файл

@ -1 +1 @@
c7009f5cd83ea028b98f59e1f8830a76ba27c1dd
2e44b58e0662f140ab49064b26dfbe15d64bc061

Просмотреть файл

@ -351,14 +351,21 @@ namespace nanojit
void Assembler::resourceConsistencyCheck()
{
NanoAssert(!error());
#ifdef NANOJIT_IA32
// Within the expansion of a single LIR instruction, we may use the x87
// stack for unmanaged temporaries. Otherwise, we do not use the x87 stack
// as such, but use the top element alone as a single allocatable FP register.
// Compensation code must be inserted to keep the stack balanced and avoid
// overflow, and the mechanisms for this are rather fragile and IA32-specific.
// The predicate below should hold between any pair of instructions within
// a basic block, at labels, and just after a conditional branch. Currently,
// we enforce this condition between all pairs of instructions, but this is
// overly restrictive, and would fail if we did not generate unreachable x87
// stack pops following unconditional branches.
NanoAssert((_allocator.active[FST0] && _fpuStkDepth == -1) ||
(!_allocator.active[FST0] && _fpuStkDepth == 0));
(!_allocator.active[FST0] && _fpuStkDepth == 0));
#endif
_activation.checkForResourceConsistency(_allocator);
registerConsistencyCheck();
}
@ -633,46 +640,52 @@ namespace nanojit
//
Register Assembler::prepareResultReg(LIns *ins, RegisterMask allow)
{
// At this point, we know the result of 'ins' result has a use later
// in the code. (Exception: if 'ins' is a call to an impure function
// the return value may not be used, but 'ins' will still be present
// because it has side-effects.) It may have had to be evicted, in
// which case the restore will have already been generated, so we now
// generate the spill (unless the restore was actually a
// rematerialize, in which case it's not necessary).
// At this point, we know the result of 'ins' is used later in the
// code, unless it is a call to an impure function that must be
// included for effect even though its result is ignored. It may have
// had to be evicted, in which case the restore will have already been
// generated, so we now generate the spill. QUERY: Is there any attempt
// to elide the spill if we know that all restores can be rematerialized?
#ifdef NANOJIT_IA32
// If 'allow' includes FST0 we have to pop if 'ins' isn't in FST0 in
// the post-regstate. This could be because 'ins' is unused, 'ins' is
// in a spill slot, or 'ins' is in an XMM register.
const bool pop = (allow & rmask(FST0)) &&
(!ins->isInReg() || ins->getReg() != FST0);
#else
const bool pop = false;
#endif
const bool notInFST0 = (!ins->isInReg() || ins->getReg() != FST0);
Register r = findRegFor(ins, allow);
asm_maybe_spill(ins, pop);
#ifdef NANOJIT_IA32
if (!ins->isInAr() && pop && r == FST0) {
// This can only happen with a LIR_calld to an impure function
// whose return value was ignored (ie. if ins->isInReg() was false
// prior to the findRegFor() call).
FSTP(FST0); // pop the fpu result since it isn't used
// If the result register is FST0, but FST0 is not in the post-regstate,
// then we must pop the x87 stack. This may occur because the result is
// unused, or because it has been stored to a spill slot or an XMM register.
const bool needPop = notInFST0 && (r == FST0);
const bool didSpill = asm_maybe_spill(ins, needPop);
if (!didSpill && needPop) {
// If the instruction is spilled, then the pop will have already
// been performed by the store to the stack slot. Otherwise, we
// must pop now. This may occur when the result of a LIR_calld
// to an impure (side-effecting) function is not used.
FSTP(FST0);
}
#else
Register r = findRegFor(ins, allow);
asm_maybe_spill(ins, false);
#endif
return r;
}
void Assembler::asm_maybe_spill(LIns* ins, bool pop)
bool Assembler::asm_maybe_spill(LIns* ins, bool pop)
{
int d = ins->isInAr() ? arDisp(ins) : 0;
Register r = ins->getReg();
if (ins->isInAr()) {
int d = arDisp(ins);
Register r = ins->getReg();
verbose_only( RefBuf b;
if (_logc->lcbits & LC_Native) {
setOutputForEOL(" <= spill %s",
_thisfrag->lirbuf->printer->formatRef(&b, ins)); } )
asm_spill(r, d, pop, ins->isQorD());
#ifdef NANOJIT_IA32
asm_spill(r, d, pop);
#else
(void)pop;
asm_spill(r, d, ins->isQorD());
#endif
return true;
}
return false;
}
// XXX: This function is error-prone and should be phased out; see bug 513615.
@ -2358,9 +2371,9 @@ namespace nanojit
}
#ifdef NANOJIT_IA32
if (savedins && (rmask(r) & x87Regs)) {
if (savedins && r == FST0) {
verbose_only( shouldMention=true; )
FSTP(r);
FSTP(FST0);
}
#endif
}
@ -2414,12 +2427,13 @@ namespace nanojit
}
#ifdef NANOJIT_IA32
if (rmask(r) & x87Regs) {
if (r == FST0) {
if (savedins) {
FSTP(r);
// Discard top of x87 stack.
FSTP(FST0);
}
else if (curins) {
// saved state did not have fpu reg allocated,
// Saved state did not have fpu reg allocated,
// so we must evict here to keep x87 stack balanced.
evict(curins);
}

Просмотреть файл

@ -429,8 +429,12 @@ namespace nanojit
// Otherwise, register allocation decisions will be suboptimal.
void asm_restore(LIns*, Register);
void asm_maybe_spill(LIns* ins, bool pop);
void asm_spill(Register rr, int d, bool pop, bool quad);
bool asm_maybe_spill(LIns* ins, bool pop);
#ifdef NANOJIT_IA32
void asm_spill(Register rr, int d, bool pop);
#else
void asm_spill(Register rr, int d, bool quad);
#endif
void asm_load64(LIns* ins);
void asm_ret(LIns* ins);
#ifdef NANOJIT_64BIT
@ -502,10 +506,10 @@ namespace nanojit
// since we generate backwards the depth is negative
inline void fpu_push() {
debug_only( ++_fpuStkDepth; NanoAssert(_fpuStkDepth<=0); )
debug_only( ++_fpuStkDepth; NanoAssert(_fpuStkDepth <= 0); )
}
inline void fpu_pop() {
debug_only( --_fpuStkDepth; NanoAssert(_fpuStkDepth<=0); )
debug_only( --_fpuStkDepth; NanoAssert(_fpuStkDepth >= -7); )
}
#endif
const Config& _config;

Просмотреть файл

@ -1299,9 +1299,8 @@ Assembler::asm_restore(LIns* i, Register r)
}
void
Assembler::asm_spill(Register rr, int d, bool pop, bool quad)
Assembler::asm_spill(Register rr, int d, bool quad)
{
(void) pop;
(void) quad;
NanoAssert(d);
// The following registers should never be spilled:
@ -1570,7 +1569,7 @@ Assembler::asm_immd(LIns* ins)
if (_config.arm_vfp && deprecated_isKnownReg(rr)) {
if (d)
asm_spill(rr, d, false, true);
asm_spill(rr, d, true);
underrunProtect(4*4);
asm_immd_nochk(rr, ins->immDlo(), ins->immDhi());
@ -2759,13 +2758,14 @@ Assembler::asm_cmov(LIns* ins)
Register rf = findRegFor(iffalse, allow & ~rmask(rr));
// If 'iftrue' isn't in a register, it can be clobbered by 'ins'.
Register rt = iftrue->isInReg() ? iftrue->getReg() : rr;
if (ins->isop(LIR_cmovd)) {
NIns* target = _nIns;
asm_nongp_copy(rr, rf);
asm_branch(false, condval, target);
// If 'iftrue' isn't in a register, it can be clobbered by 'ins'.
Register rt = iftrue->isInReg() ? iftrue->getReg() : rr;
if (rr != rt)
asm_nongp_copy(rr, rt);
freeResourcesOf(ins);
@ -2776,6 +2776,9 @@ Assembler::asm_cmov(LIns* ins)
return;
}
// If 'iftrue' isn't in a register, it can be clobbered by 'ins'.
Register rt = iftrue->isInReg() ? iftrue->getReg() : rr;
// WARNING: We cannot generate any code that affects the condition
// codes between the MRcc generation here and the asm_cmp() call
// below. See asm_cmp() for more details.

Просмотреть файл

@ -630,7 +630,7 @@ namespace nanojit
if (cpu_has_fpu && deprecated_isKnownReg(rr)) {
if (d)
asm_spill(rr, d, false, true);
asm_spill(rr, d, true);
asm_li_d(rr, ins->immDhi(), ins->immDlo());
}
else {
@ -955,17 +955,75 @@ namespace nanojit
switch (op) {
case LIR_addxovi:
case LIR_addjovi:
SLT(AT, rr, ra);
// add with overflow result into $at
// overflow is indicated by ((sign(rr)^sign(ra)) & (sign(rr)^sign(rhsc))
// [move $t,$ra] if (rr==ra)
// addiu $rr,$ra,rhsc
// [xor $at,$rr,$ra] if (rr!=ra)
// [xor $at,$rr,$t] if (rr==ra)
// [not $t,$rr] if (rhsc < 0)
// [and $at,$at,$t] if (rhsc < 0)
// [and $at,$at,$rr] if (rhsc >= 0)
// srl $at,$at,31
t = registerAllocTmp(allow);
SRL(AT, AT, 31);
if (rhsc < 0) {
AND(AT, AT, t);
NOT(t, rr);
}
else
AND(AT, AT, rr);
if (rr == ra)
XOR(AT, rr, t);
else
XOR(AT, rr, ra);
ADDIU(rr, ra, rhsc);
if (rr == ra)
MOVE(t, ra);
goto done;
case LIR_addi:
ADDIU(rr, ra, rhsc);
goto done;
case LIR_subxovi:
case LIR_subjovi:
// subtract with overflow result into $at
// overflow is indicated by (sign(ra)^sign(rhsc)) & (sign(rr)^sign(ra))
// [move $t,$ra] if (rr==ra)
// addiu $rr,$ra,-rhsc
// [xor $at,$rr,$ra] if (rr!=ra)
// [xor $at,$rr,$t] if (rr==ra)
// [and $at,$at,$ra] if (rhsc >= 0 && rr!=ra)
// [and $at,$at,$t] if (rhsc >= 0 && rr==ra)
// [not $t,$ra] if (rhsc < 0 && rr!=ra)
// [not $t,$t] if (rhsc < 0 && rr==ra)
// [and $at,$at,$t] if (rhsc < 0)
// srl $at,$at,31
if (isS16(-rhsc)) {
SLT(AT, ra, rr);
t = registerAllocTmp(allow);
SRL(AT,AT,31);
if (rhsc < 0) {
AND(AT, AT, t);
if (rr == ra)
NOT(t, t);
else
NOT(t, ra);
}
else {
if (rr == ra)
AND(AT, AT, t);
else
AND(AT, AT, ra);
}
if (rr == ra)
XOR(AT, rr, t);
else
XOR(AT, rr, ra);
ADDIU(rr, ra, -rhsc);
if (rr == ra)
MOVE(t, ra);
goto done;
}
break;
@ -1025,11 +1083,44 @@ namespace nanojit
NanoAssert(deprecated_isKnownReg(rb));
allow &= ~rmask(rb);
// The register allocator will have set up one of these 4 cases
// rr==ra && ra==rb r0 = r0 op r0
// rr==ra && ra!=rb r0 = r0 op r1
// rr!=ra && ra==rb r0 = r1 op r1
// rr!=ra && ra!=rb && rr!=rb r0 = r1 op r2
NanoAssert(ra == rb || rr != rb);
switch (op) {
case LIR_addxovi:
case LIR_addjovi:
SLT(AT, rr, ra);
// add with overflow result into $at
// overflow is indicated by (sign(rr)^sign(ra)) & (sign(rr)^sign(rb))
// [move $t,$ra] if (rr==ra)
// addu $rr,$ra,$rb
// ; Generate sign($rr)^sign($ra)
// [xor $at,$rr,$t] sign($at)=sign($rr)^sign($t) if (rr==ra)
// [xor $at,$rr,$ra] sign($at)=sign($rr)^sign($ra) if (rr!=ra)
// ; Generate sign($rr)^sign($rb) if $ra!=$rb
// [xor $t,$rr,$rb] if (ra!=rb)
// [and $at,$t] if (ra!=rb)
// srl $at,31
t = ZERO;
if (rr == ra || ra != rb)
t = registerAllocTmp(allow);
SRL(AT, AT, 31);
if (ra != rb) {
AND(AT, AT, t);
XOR(t, rr, rb);
}
if (rr == ra)
XOR(AT, rr, t);
else
XOR(AT, rr, ra);
ADDU(rr, ra, rb);
if (rr == ra)
MOVE(t, ra);
break;
case LIR_addi:
ADDU(rr, ra, rb);
@ -1045,23 +1136,52 @@ namespace nanojit
break;
case LIR_subxovi:
case LIR_subjovi:
SLT(AT,ra,rr);
SUBU(rr, ra, rb);
// subtract with overflow result into $at
// overflow is indicated by (sign(ra)^sign(rb)) & (sign(rr)^sign(ra))
// [move $t,$ra] if (rr==ra)
// ; Generate sign($at)=sign($ra)^sign($rb)
// xor $at,$ra,$rb
// subu $rr,$ra,$rb
// ; Generate sign($t)=sign($rr)^sign($ra)
// [xor $t,$rr,$ra] if (rr!=ra)
// [xor $t,$rr,$t] if (rr==ra)
// and $at,$at,$t
// srl $at,$at,31
if (ra == rb) {
// special case for (ra == rb) which can't overflow
MOVE(AT, ZERO);
SUBU(rr, ra, rb);
}
else {
t = registerAllocTmp(allow);
SRL(AT, AT, 31);
AND(AT, AT, t);
if (rr == ra)
XOR(t, rr, t);
else
XOR(t, rr, ra);
SUBU(rr, ra, rb);
XOR(AT, ra, rb);
if (rr == ra)
MOVE(t, ra);
}
break;
case LIR_subi:
SUBU(rr, ra, rb);
break;
case LIR_lshi:
// SLLV uses the low-order 5 bits of rb for the shift amount so no masking required
SLLV(rr, ra, rb);
ANDI(rb, rb, 31);
break;
case LIR_rshi:
// SRAV uses the low-order 5 bits of rb for the shift amount so no masking required
SRAV(rr, ra, rb);
ANDI(rb, rb, 31);
break;
case LIR_rshui:
// SRLV uses the low-order 5 bits of rb for the shift amount so no masking required
SRLV(rr, ra, rb);
ANDI(rb, rb, 31);
break;
case LIR_mulxovi:
case LIR_muljovi:
@ -1529,9 +1649,8 @@ namespace nanojit
}
void
Assembler::asm_spill(Register rr, int d, bool pop, bool quad)
Assembler::asm_spill(Register rr, int d, bool quad)
{
USE(pop);
USE(quad);
NanoAssert(d);
if (IsFpReg(rr)) {
@ -1542,7 +1661,7 @@ namespace nanojit
NanoAssert(!quad);
asm_ldst(OP_SW, rr, d, FP);
}
TAG("asm_spill(rr=%d, d=%d, pop=%d, quad=%d)", rr, d, pop, quad);
TAG("asm_spill(rr=%d, d=%d, quad=%d)", rr, d, quad);
}
void

Просмотреть файл

@ -827,7 +827,7 @@ namespace nanojit
}
}
void Assembler::asm_spill(Register rr, int d, bool /* pop */, bool quad) {
void Assembler::asm_spill(Register rr, int d, bool quad) {
(void)quad;
NanoAssert(d);
if (IsFpReg(rr)) {

Просмотреть файл

@ -330,7 +330,7 @@ namespace nanojit
}
}
void Assembler::asm_spill(Register rr, int d, bool pop, bool quad)
void Assembler::asm_spill(Register rr, int d, bool quad)
{
underrunProtect(24);
(void)quad;

Просмотреть файл

@ -1119,13 +1119,14 @@ namespace nanojit
Register rf = findRegFor(iffalse, allow & ~rmask(rr));
// If 'iftrue' isn't in a register, it can be clobbered by 'ins'.
Register rt = iftrue->isInReg() ? iftrue->getReg() : rr;
if (ins->isop(LIR_cmovd)) {
NIns* target = _nIns;
asm_nongp_copy(rr, rf);
asm_branch(false, cond, target);
// If 'iftrue' isn't in a register, it can be clobbered by 'ins'.
Register rt = iftrue->isInReg() ? iftrue->getReg() : rr;
if (rr != rt)
asm_nongp_copy(rr, rt);
freeResourcesOf(ins);
@ -1136,6 +1137,9 @@ namespace nanojit
return;
}
// If 'iftrue' isn't in a register, it can be clobbered by 'ins'.
Register rt = iftrue->isInReg() ? iftrue->getReg() : rr;
// WARNING: We cannot generate any code that affects the condition
// codes between the MRcc generation here and the asm_cmp() call
// below. See asm_cmp() for more details.
@ -1181,12 +1185,21 @@ namespace nanojit
}
NIns* Assembler::asm_branch(bool onFalse, LIns *cond, NIns *target) {
if (target && !isTargetWithinS32(target)) {
setError(ConditionalBranchTooFar);
NanoAssert(0);
}
NanoAssert(cond->isCmp());
LOpcode condop = cond->opcode();
if (target && !isTargetWithinS32(target)) {
// conditional jumps beyond 32bit range, so invert the branch/compare
// and emit an unconditional jump to the target
// j(inverted) B1
// jmp target
// B1:
NIns* shortTarget = _nIns;
JMP(target);
target = shortTarget;
onFalse = !onFalse;
}
if (isCmpDOpcode(condop))
return asm_branchd(onFalse, cond, target);
@ -1844,7 +1857,7 @@ namespace nanojit
endOpRegs(ins, rr, ra);
}
void Assembler::asm_spill(Register rr, int d, bool /*pop*/, bool quad) {
void Assembler::asm_spill(Register rr, int d, bool quad) {
NanoAssert(d);
if (!IsFpReg(rr)) {
if (quad)

Просмотреть файл

@ -1215,7 +1215,7 @@ namespace nanojit
if (rmask(r) & XmmRegs) {
SSE_LDQ(r, d, FP);
} else {
NanoAssert(rmask(r) & x87Regs);
NanoAssert(r == FST0);
FLDQ(d, FP);
}
}
@ -1275,17 +1275,16 @@ namespace nanojit
}
}
void Assembler::asm_spill(Register rr, int d, bool pop, bool quad)
void Assembler::asm_spill(Register rr, int d, bool pop)
{
(void)quad;
NanoAssert(d);
if (rmask(rr) & GpRegs) {
ST(FP, d, rr);
} else if (rmask(rr) & XmmRegs) {
SSE_STQ(d, FP, rr);
} else {
NanoAssert(rmask(rr) & x87Regs);
FSTQ((pop?1:0), d, FP);
NanoAssert(rr == FST0);
FSTQ(pop, d, FP);
}
}
@ -1313,7 +1312,7 @@ namespace nanojit
if (rmask(rr) & XmmRegs) {
SSE_LDQ(rr, db, rb);
} else {
NanoAssert(rmask(rr) & x87Regs);
NanoAssert(rr == FST0);
FLDQ(db, rb);
}
break;
@ -1324,7 +1323,7 @@ namespace nanojit
SSE_LDSS(rr, db, rb);
SSE_XORPDr(rr,rr);
} else {
NanoAssert(rmask(rr) & x87Regs);
NanoAssert(rr == FST0);
FLD32(db, rb);
}
break;
@ -1379,7 +1378,7 @@ namespace nanojit
SSE_XORPDr(rt, rt); // zero dest to ensure no dependency stalls
} else {
FST32(pop?1:0, dr, rb);
FST32(pop, dr, rb);
}
} else if (value->isImmD()) {
@ -1413,7 +1412,7 @@ namespace nanojit
if (rmask(rv) & XmmRegs) {
SSE_STQ(dr, rb, rv);
} else {
FSTQ(pop?1:0, dr, rb);
FSTQ(pop, dr, rb);
}
}
}
@ -2058,13 +2057,14 @@ namespace nanojit
Register rf = findRegFor(iffalse, allow & ~rmask(rr));
// If 'iftrue' isn't in a register, it can be clobbered by 'ins'.
Register rt = iftrue->isInReg() ? iftrue->getReg() : rr;
if (ins->isop(LIR_cmovd)) {
NIns* target = _nIns;
asm_nongp_copy(rr, rf);
asm_branch(false, condval, target);
// If 'iftrue' isn't in a register, it can be clobbered by 'ins'.
Register rt = iftrue->isInReg() ? iftrue->getReg() : rr;
if (rr != rt)
asm_nongp_copy(rr, rt);
freeResourcesOf(ins);
@ -2075,6 +2075,9 @@ namespace nanojit
return;
}
// If 'iftrue' isn't in a register, it can be clobbered by 'ins'.
Register rt = iftrue->isInReg() ? iftrue->getReg() : rr;
NanoAssert(ins->isop(LIR_cmovi));
// WARNING: We cannot generate any code that affects the condition
@ -2558,12 +2561,12 @@ namespace nanojit
Register ra = findRegFor(lhs, XmmRegs);
SSE_CVTSD2SI(rr, ra);
} else {
int pop = !lhs->isInReg();
bool pop = !lhs->isInReg();
findSpecificRegFor(lhs, FST0);
if (ins->isInReg())
evict(ins);
int d = findMemFor(ins);
FIST((pop?1:0), d, FP);
FIST(pop, d, FP);
}
freeResourcesOf(ins);
@ -2760,7 +2763,7 @@ namespace nanojit
}
evictIfActive(EAX);
int pop = !lhs->isInReg();
bool pop = !lhs->isInReg();
findSpecificRegFor(lhs, FST0);
if (lhs == rhs) {
@ -2778,12 +2781,12 @@ namespace nanojit
if (rhs->isImmD())
{
const uint64_t* p = findImmDFromPool(rhs->immDasQ());
FCOMdm((pop?1:0), (const double*)p);
FCOMdm(pop, (const double*)p);
}
else
{
int d = findMemFor(rhs);
FCOM((pop?1:0), d, FP);
FCOM(pop, d, FP);
}
}
}

Просмотреть файл

@ -3113,7 +3113,7 @@ EvalInFrame(JSContext *cx, uintN argc, jsval *vp)
? !!(JSVAL_TO_BOOLEAN(argv[2]))
: false;
JS_ASSERT(cx->fp);
JS_ASSERT(cx->hasfp());
FrameRegsIter fi(cx);
for (uint32 i = 0; i < upCount; ++i, ++fi) {

Просмотреть файл

@ -0,0 +1,66 @@
/*
* Any copyright is dedicated to the Public Domain.
* http://creativecommons.org/licenses/publicdomain/
*/
var BUGNUMBER = 587366;
var summary = "String.prototype.replace with non-regexp searchValue";
print(BUGNUMBER + ": " + summary);
/**************
* BEGIN TEST *
**************/
/*
* Check that regexp statics are preserved across the whole test.
* If the engine is trying to cheat by turning stuff into regexps,
* we should catch it!
*/
/(a|(b)|c)+/.exec('abcabc');
var before = {
"source" : RegExp.source,
"$`": RegExp.leftContext,
"$'": RegExp.rightContext,
"$&": RegExp.lastMatch,
"$1": RegExp.$1,
"$2": RegExp.$2
};
var text = 'I once was lost but now am found.';
var searchValue = 'found';
var replaceValue;
/* Lambda substitution. */
replaceValue = function(matchStr, matchStart, textStr) {
assertEq(matchStr, searchValue);
assertEq(matchStart, 27);
assertEq(textStr, text);
return 'not watching that show anymore';
}
var result = text.replace(searchValue, replaceValue);
assertEq(result, 'I once was lost but now am not watching that show anymore.');
/* Dollar substitution. */
replaceValue = "...wait, where was I again? And where is all my $$$$$$? Oh right, $`$&$'" +
" But with no $$$$$$"; /* Note the dot is not replaced and trails the end. */
result = text.replace(searchValue, replaceValue);
assertEq(result, 'I once was lost but now am ...wait, where was I again?' +
' And where is all my $$$? Oh right, I once was lost but now am found.' +
' But with no $$$.');
/* Missing capture group dollar substitution. */
replaceValue = "$1$&$2$'$3";
result = text.replace(searchValue, replaceValue);
assertEq(result, 'I once was lost but now am $1found$2.$3.');
/* Check RegExp statics haven't been mutated. */
for (var ident in before)
assertEq(RegExp[ident], before[ident]);
/******************************************************************************/
if (typeof reportCompare === "function")
reportCompare(true, true);
print("All tests passed!");

Просмотреть файл

@ -0,0 +1 @@

Просмотреть файл

@ -0,0 +1,2 @@
url-prefix ../../jsreftest.html?test=ecma_5/String/
script 15.5.4.11-01.js

Просмотреть файл

Просмотреть файл

@ -6,6 +6,7 @@ include Global/jstests.list
include JSON/jstests.list
include Object/jstests.list
include RegExp/jstests.list
include String/jstests.list
include Types/jstests.list
include extensions/jstests.list
include misc/jstests.list

Просмотреть файл

@ -0,0 +1,18 @@
// Check that the loop is trace-compiled even though it's run in an eval.
code = "\
j = 0;\
for (i = 0; i < 10; i++)\
{\
j += 5;\
}\
";
eval(code);
print (j);
checkStats({
recorderStarted: 1,
recorderAborted: 0,
traceCompleted: 1,
});

Просмотреть файл

@ -0,0 +1 @@
var re = /(?:){1,60}/

Просмотреть файл

@ -0,0 +1,3 @@
// Test flat string replacement, per ECMAScriptv5 15.5.4.11.
assertEq("1+2".replace("1+2", "$&+3"), "1+2+3");
assertEq(")".replace(")","*$&*"), "*)*");

Просмотреть файл

@ -0,0 +1,10 @@
// Use arguments in an eval.
code = " \
function f(a) { var x = a; } \
for (var i = 0; i < 10; i++) { f(5); } \
";
eval(code);
// Test it doesn't assert.

Просмотреть файл

@ -0,0 +1,14 @@
function testFloatArray() {
var v = new Float32Array(32);
for (var i = 0; i < v.length; ++i)
v[i] = i;
var t = 0;
for (var i = 0; i < v.length; ++i)
t += v[i];
return t;
}
assertEq(testFloatArray(), 496);

Просмотреть файл

@ -135,7 +135,7 @@ def run_test(test, lib_dir):
cmd = valgrind_prefix + cmd
if OPTIONS.show_cmd:
print(cmd)
print(subprocess.list2cmdline(cmd))
# close_fds is not supported on Windows and will cause a ValueError.
close_fds = sys.platform != 'win32'
p = Popen(cmd, stdin=PIPE, stdout=PIPE, stderr=PIPE, close_fds=close_fds, env=env)

Просмотреть файл

@ -406,46 +406,13 @@ GetUnsafeObject(JSContext *cx, JSObject *obj)
} // namespace XPCSafeJSObjectWrapper
static JSBool
DummyNative(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
{
return JS_TRUE;
}
static JSObject *
GetScopeFunction(JSContext *cx, JSObject *outerObj)
GetScopeChainForSafeCall(JSContext *cx, JSObject *outerObj)
{
jsval v;
if (!JS_GetReservedSlot(cx, outerObj, sScopeFunSlot, &v)) {
return nsnull;
}
JSObject *unsafeObj = GetUnsafeObject(cx, outerObj);
JSObject *scopeobj = JS_GetGlobalForObject(cx, unsafeObj);
OBJ_TO_INNER_OBJECT(cx, scopeobj);
if (!scopeobj) {
return nsnull;
}
if (JSVAL_IS_OBJECT(v)) {
JSObject *funobj = JSVAL_TO_OBJECT(v);
if (JS_GetGlobalForObject(cx, funobj) == scopeobj) {
return funobj;
}
}
JSFunction *fun = JS_NewFunction(cx, DummyNative, 0, 0, scopeobj,
"SJOWContentBoundary");
if (!fun) {
return nsnull;
}
JSObject *funobj = JS_GetFunctionObject(fun);
if (!JS_SetReservedSlot(cx, outerObj, sScopeFunSlot, OBJECT_TO_JSVAL(funobj))) {
return nsnull;
}
return funobj;
return scopeobj;
}
// Wrap a JS value in a safe wrapper of a function wrapper if
@ -662,8 +629,8 @@ XPC_SJOW_GetOrSetProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp,
return JS_FALSE;
}
JSObject *scopeFun = GetScopeFunction(cx, obj);
if (!scopeFun) {
JSObject *scopeChain = GetScopeChainForSafeCall(cx, obj);
if (!scopeChain) {
return JS_FALSE;
}
@ -681,8 +648,8 @@ XPC_SJOW_GetOrSetProperty(JSContext *cx, JSObject *obj, jsid id, jsval *vp,
}
JSBool ok = aIsSet
? js_SetPropertyByIdWithFakeFrame(cx, unsafeObj, scopeFun, id, vp)
: js_GetPropertyByIdWithFakeFrame(cx, unsafeObj, scopeFun, id, vp);
? js_SetPropertyByIdWithFakeFrame(cx, unsafeObj, scopeChain, id, vp)
: js_GetPropertyByIdWithFakeFrame(cx, unsafeObj, scopeChain, id, vp);
if (!ok) {
return JS_FALSE;
}
@ -881,8 +848,8 @@ XPC_SJOW_Call(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
return JS_FALSE;
}
JSObject *scopeFun = GetScopeFunction(cx, safeObj);
if (!scopeFun) {
JSObject *scopeChain = GetScopeChainForSafeCall(cx, safeObj);
if (!scopeChain) {
return JS_FALSE;
}
@ -903,7 +870,7 @@ XPC_SJOW_Call(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
return JS_FALSE;
}
if (!js_CallFunctionValueWithFakeFrame(cx, JSVAL_TO_OBJECT(v), scopeFun,
if (!js_CallFunctionValueWithFakeFrame(cx, JSVAL_TO_OBJECT(v), scopeChain,
OBJECT_TO_JSVAL(funToCall),
argc, argv, rval)) {
return JS_FALSE;
@ -964,8 +931,8 @@ XPC_SJOW_Create(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
return JS_FALSE;
}
JSObject *scopeFun = GetScopeFunction(cx, callee);
if (!scopeFun) {
JSObject *scopeChain = GetScopeChainForSafeCall(cx, callee);
if (!scopeChain) {
return JS_FALSE;
}
@ -989,7 +956,7 @@ XPC_SJOW_Create(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
return JS_FALSE;
}
if (!js_CallFunctionValueWithFakeFrame(cx, JSVAL_TO_OBJECT(v), scopeFun,
if (!js_CallFunctionValueWithFakeFrame(cx, JSVAL_TO_OBJECT(v), scopeChain,
OBJECT_TO_JSVAL(unsafeObj),
argc, argv, rval)) {
return JS_FALSE;

Просмотреть файл

@ -84,6 +84,7 @@ static const short escapes[] = {
0, 0, 0 /* x - z */
};
static const unsigned OPCODE_LEN = 1;
static const unsigned BRAZERO_LEN = OPCODE_LEN;
static const unsigned BRA_NEST_SIZE = 2;
static const unsigned BRA_LEN = OPCODE_LEN + LINK_SIZE + BRA_NEST_SIZE;
static const unsigned KET_LEN = OPCODE_LEN + LINK_SIZE;
@ -2485,7 +2486,7 @@ static int calculateCompiledPatternLength(const UChar* pattern, int patternLengt
}
length += repeatsLength;
if (maxRepeats > minRepeats) { /* Need this test as maxRepeats=-1 means no limit */
repeatsLength = multiplyWithOverflowCheck(maxRepeats - minRepeats, duplength + BRA_LEN + KET_LEN);
repeatsLength = multiplyWithOverflowCheck(maxRepeats - minRepeats, duplength + BRAZERO_LEN + BRA_LEN + KET_LEN);
if (repeatsLength < 0) {
errorcode = ERR16;
return -1;