зеркало из https://github.com/mozilla/gecko-dev.git
[JAEGER] Remove tracerecursion (bug 591539, r=luke).
This commit is contained in:
Родитель
3e2bdaa3ca
Коммит
c531ce8cc1
|
@ -332,35 +332,3 @@ js_NewNullClosure(JSContext* cx, JSObject* funobj, JSObject* proto, JSObject* pa
|
|||
JS_DEFINE_CALLINFO_4(extern, OBJECT, js_NewNullClosure, CONTEXT, OBJECT, OBJECT, OBJECT,
|
||||
0, ACCSET_STORE_ANY)
|
||||
|
||||
JS_REQUIRES_STACK JSBool FASTCALL
|
||||
js_PopInterpFrame(JSContext* cx, TracerState* state)
|
||||
{
|
||||
JS_ASSERT(cx->hasfp() && cx->fp()->down);
|
||||
JSStackFrame* const fp = cx->fp();
|
||||
|
||||
/*
|
||||
* Mirror frame popping code from inline_return in js_Interpret. There are
|
||||
* some things we just don't want to handle. In those cases, the trace will
|
||||
* MISMATCH_EXIT.
|
||||
*/
|
||||
if (fp->hasHookData())
|
||||
return JS_FALSE;
|
||||
if (cx->version != fp->getCallerVersion())
|
||||
return JS_FALSE;
|
||||
if (fp->flags & JSFRAME_CONSTRUCTING)
|
||||
return JS_FALSE;
|
||||
if (fp->hasIMacroPC())
|
||||
return JS_FALSE;
|
||||
if (fp->hasBlockChain())
|
||||
return JS_FALSE;
|
||||
|
||||
fp->putActivationObjects(cx);
|
||||
|
||||
/* Pop the frame and its memory. */
|
||||
cx->stack().popInlineFrame(cx, fp, fp->down);
|
||||
|
||||
/* Update the inline call count. */
|
||||
*state->inlineCallCountp = *state->inlineCallCountp - 1;
|
||||
return JS_TRUE;
|
||||
}
|
||||
JS_DEFINE_CALLINFO_2(extern, BOOL, js_PopInterpFrame, CONTEXT, TRACERSTATE, 0, ACCSET_STORE_ANY)
|
||||
|
|
|
@ -595,7 +595,6 @@ JS_DECLARE_CALLINFO(js_HasNamedPropertyInt32)
|
|||
JS_DECLARE_CALLINFO(js_TypeOfObject)
|
||||
JS_DECLARE_CALLINFO(js_BooleanIntToString)
|
||||
JS_DECLARE_CALLINFO(js_NewNullClosure)
|
||||
JS_DECLARE_CALLINFO(js_PopInterpFrame)
|
||||
|
||||
/* Defined in jsfun.cpp. */
|
||||
JS_DECLARE_CALLINFO(js_AllocFlatClosure)
|
||||
|
|
|
@ -3624,13 +3624,6 @@ js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
|
|||
if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
|
||||
return false;
|
||||
CG_SWITCH_TO_MAIN(cg);
|
||||
} else {
|
||||
/*
|
||||
* Emit a trace hint opcode only if not in a generator, since generators
|
||||
* are not yet traced and both want to be the first instruction.
|
||||
*/
|
||||
if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
|
||||
return false;
|
||||
}
|
||||
|
||||
if (cg->needsEagerArguments()) {
|
||||
|
@ -6435,11 +6428,6 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
|
|||
argc = pn->pn_count - 1;
|
||||
if (js_Emit3(cx, cg, PN_OP(pn), ARGC_HI(argc), ARGC_LO(argc)) < 0)
|
||||
return JS_FALSE;
|
||||
if (PN_OP(pn) == JSOP_CALL) {
|
||||
/* Add a trace hint opcode for recursion. */
|
||||
if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
|
||||
return JS_FALSE;
|
||||
}
|
||||
if (PN_OP(pn) == JSOP_EVAL)
|
||||
EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
|
||||
break;
|
||||
|
|
|
@ -2322,10 +2322,10 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount)
|
|||
goto error; \
|
||||
JS_END_MACRO
|
||||
|
||||
#define MONITOR_BRANCH(reason) \
|
||||
#define MONITOR_BRANCH() \
|
||||
JS_BEGIN_MACRO \
|
||||
if (TRACING_ENABLED(cx)) { \
|
||||
MonitorResult r = MonitorLoopEdge(cx, inlineCallCount, reason); \
|
||||
MonitorResult r = MonitorLoopEdge(cx, inlineCallCount); \
|
||||
if (r == MONITOR_RECORDING) { \
|
||||
JS_ASSERT(TRACE_RECORDER(cx)); \
|
||||
MONITOR_BRANCH_TRACEVIS; \
|
||||
|
@ -2341,7 +2341,7 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount)
|
|||
|
||||
#else /* !JS_TRACER */
|
||||
|
||||
#define MONITOR_BRANCH(reason) ((void) 0)
|
||||
#define MONITOR_BRANCH() ((void) 0)
|
||||
|
||||
#endif /* !JS_TRACER */
|
||||
|
||||
|
@ -2380,13 +2380,13 @@ Interpret(JSContext *cx, JSStackFrame *entryFrame, uintN inlineCallCount)
|
|||
CHECK_BRANCH(); \
|
||||
if (op == JSOP_NOP) { \
|
||||
if (TRACE_RECORDER(cx)) { \
|
||||
MONITOR_BRANCH(Record_Branch); \
|
||||
MONITOR_BRANCH(); \
|
||||
op = (JSOp) *regs.pc; \
|
||||
} else { \
|
||||
op = (JSOp) *++regs.pc; \
|
||||
} \
|
||||
} else if (op == JSOP_TRACE) { \
|
||||
MONITOR_BRANCH(Record_Branch); \
|
||||
MONITOR_BRANCH(); \
|
||||
op = (JSOp) *regs.pc; \
|
||||
} \
|
||||
} \
|
||||
|
@ -2800,14 +2800,7 @@ BEGIN_CASE(JSOP_STOP)
|
|||
JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, script, regs.pc)].length
|
||||
== JSOP_CALL_LENGTH);
|
||||
TRACE_0(LeaveFrame);
|
||||
if (*(regs.pc + JSOP_CALL_LENGTH) == JSOP_TRACE ||
|
||||
*(regs.pc + JSOP_CALL_LENGTH) == JSOP_NOP) {
|
||||
JS_STATIC_ASSERT(JSOP_TRACE_LENGTH == JSOP_NOP_LENGTH);
|
||||
regs.pc += JSOP_CALL_LENGTH;
|
||||
len = JSOP_TRACE_LENGTH;
|
||||
} else {
|
||||
len = JSOP_CALL_LENGTH;
|
||||
}
|
||||
len = JSOP_CALL_LENGTH;
|
||||
DO_NEXT_OP(len);
|
||||
}
|
||||
goto error;
|
||||
|
@ -4776,20 +4769,7 @@ BEGIN_CASE(JSOP_APPLY)
|
|||
|
||||
DTrace::enterJSFun(cx, fp, fun, fp->down, fp->numActualArgs(), fp->argv);
|
||||
|
||||
#ifdef JS_TRACER
|
||||
if (TraceRecorder *tr = TRACE_RECORDER(cx)) {
|
||||
AbortableRecordingStatus status = tr->record_EnterFrame(inlineCallCount);
|
||||
RESTORE_INTERP_VARS();
|
||||
if (StatusAbortsRecorderIfActive(status)) {
|
||||
if (TRACE_RECORDER(cx)) {
|
||||
JS_ASSERT(TRACE_RECORDER(cx) == tr);
|
||||
AbortRecording(cx, "record_EnterFrame failed");
|
||||
}
|
||||
if (status == ARECORD_ERROR)
|
||||
goto error;
|
||||
}
|
||||
}
|
||||
#endif
|
||||
TRACE_0(EnterFrame);
|
||||
|
||||
#ifdef JS_METHODJIT
|
||||
/* Try to ensure methods are method JIT'd. */
|
||||
|
|
|
@ -4990,20 +4990,6 @@ js_DecompileFunction(JSPrinter *jp)
|
|||
endpc = pc + script->length;
|
||||
ok = JS_TRUE;
|
||||
|
||||
/* Skip trace hint if it appears here. */
|
||||
#if JS_HAS_GENERATORS
|
||||
if (js_GetOpcode(jp->sprinter.context, script, script->code) != JSOP_GENERATOR)
|
||||
#endif
|
||||
{
|
||||
JSOp op = js_GetOpcode(jp->sprinter.context, script, pc);
|
||||
if (op == JSOP_TRACE || op == JSOP_NOP) {
|
||||
JS_STATIC_ASSERT(JSOP_TRACE_LENGTH == JSOP_NOP_LENGTH);
|
||||
pc += JSOP_TRACE_LENGTH;
|
||||
} else {
|
||||
JS_ASSERT(op == JSOP_STOP); /* empty script singleton */
|
||||
}
|
||||
}
|
||||
|
||||
#if JS_HAS_DESTRUCTURING
|
||||
ss.printer = NULL;
|
||||
jp->script = script;
|
||||
|
|
|
@ -1,851 +0,0 @@
|
|||
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
* vim: set ts=4 sw=4 et tw=99 ft=cpp:
|
||||
*
|
||||
* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
|
||||
* June 12, 2009.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* the Mozilla Corporation.
|
||||
*
|
||||
* Contributor(s):
|
||||
* David Anderson <danderson@mozilla.com>
|
||||
* Andreas Gal <gal@mozilla.com>
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either of the GNU General Public License Version 2 or later (the "GPL"),
|
||||
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#include "jsapi.h"
|
||||
|
||||
class RecursiveSlotMap : public SlotMap
|
||||
{
|
||||
protected:
|
||||
unsigned downPostSlots;
|
||||
LIns *rval_ins;
|
||||
|
||||
public:
|
||||
RecursiveSlotMap(TraceRecorder& rec, unsigned downPostSlots, LIns* rval_ins)
|
||||
: SlotMap(rec), downPostSlots(downPostSlots), rval_ins(rval_ins)
|
||||
{
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK void
|
||||
adjustTypes()
|
||||
{
|
||||
/* Check if the return value should be promoted. */
|
||||
if (slots[downPostSlots].lastCheck == TypeCheck_Demote)
|
||||
rval_ins = mRecorder.lir->ins1(LIR_i2d, rval_ins);
|
||||
/* Adjust any global variables. */
|
||||
for (unsigned i = downPostSlots + 1; i < slots.length(); i++)
|
||||
adjustType(slots[i]);
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK void
|
||||
adjustTail(TypeConsensus consensus)
|
||||
{
|
||||
/*
|
||||
* exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - nativeStackBase
|
||||
*
|
||||
* Store at exit->sp_adj - sizeof(double)
|
||||
*/
|
||||
ptrdiff_t retOffset = downPostSlots * sizeof(double) -
|
||||
mRecorder.tree->nativeStackBase;
|
||||
mRecorder.lir->insStore(mRecorder.addName(rval_ins, "rval_ins"),
|
||||
mRecorder.lirbuf->sp, retOffset, ACCSET_STACK);
|
||||
}
|
||||
};
|
||||
|
||||
class UpRecursiveSlotMap : public RecursiveSlotMap
|
||||
{
|
||||
public:
|
||||
UpRecursiveSlotMap(TraceRecorder& rec, unsigned downPostSlots, LIns* rval_ins)
|
||||
: RecursiveSlotMap(rec, downPostSlots, rval_ins)
|
||||
{
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK void
|
||||
adjustTail(TypeConsensus consensus)
|
||||
{
|
||||
LirBuffer* lirbuf = mRecorder.lirbuf;
|
||||
LirWriter* lir = mRecorder.lir;
|
||||
|
||||
/*
|
||||
* The native stack offset of the return value once this frame has
|
||||
* returned, is:
|
||||
* -tree->nativeStackBase + downPostSlots * sizeof(double)
|
||||
*
|
||||
* Note, not +1, since the offset is 0-based.
|
||||
*
|
||||
* This needs to be adjusted down one frame. The amount to adjust must
|
||||
* be the amount down recursion added, which was just guarded as
|
||||
* |downPostSlots|. So the offset is:
|
||||
*
|
||||
* -tree->nativeStackBase + downPostSlots * sizeof(double) -
|
||||
* downPostSlots * sizeof(double)
|
||||
* Or:
|
||||
* -tree->nativeStackBase
|
||||
*
|
||||
* This makes sense because this slot is just above the highest sp for
|
||||
* the down frame.
|
||||
*/
|
||||
lir->insStore(rval_ins, lirbuf->sp, -mRecorder.tree->nativeStackBase, ACCSET_STACK);
|
||||
|
||||
lirbuf->sp = lir->ins2(LIR_addp, lirbuf->sp,
|
||||
lir->insImmWord(-int(downPostSlots) * sizeof(double)));
|
||||
lir->insStore(lirbuf->sp, lirbuf->state, offsetof(TracerState, sp), ACCSET_OTHER);
|
||||
lirbuf->rp = lir->ins2(LIR_addp, lirbuf->rp,
|
||||
lir->insImmWord(-int(sizeof(FrameInfo*))));
|
||||
lir->insStore(lirbuf->rp, lirbuf->state, offsetof(TracerState, rp), ACCSET_OTHER);
|
||||
}
|
||||
};
|
||||
|
||||
#if defined DEBUG
|
||||
JS_REQUIRES_STACK void
|
||||
TraceRecorder::assertDownFrameIsConsistent(VMSideExit* anchor, FrameInfo* fi)
|
||||
{
|
||||
JS_ASSERT(anchor->recursive_down);
|
||||
JS_ASSERT(anchor->recursive_down->callerHeight == fi->callerHeight);
|
||||
|
||||
unsigned downPostSlots = fi->callerHeight;
|
||||
JSValueType* typeMap = fi->get_typemap();
|
||||
|
||||
captureStackTypes(1, typeMap);
|
||||
const JSValueType* m1 = anchor->recursive_down->get_typemap();
|
||||
for (unsigned i = 0; i < downPostSlots; i++) {
|
||||
if (m1[i] == typeMap[i])
|
||||
continue;
|
||||
if ((typeMap[i] == JSVAL_TYPE_INT32 && m1[i] == JSVAL_TYPE_DOUBLE) ||
|
||||
(typeMap[i] == JSVAL_TYPE_DOUBLE && m1[i] == JSVAL_TYPE_INT32)) {
|
||||
continue;
|
||||
}
|
||||
JS_NOT_REACHED("invalid RECURSIVE_MISMATCH exit");
|
||||
}
|
||||
JS_ASSERT(memcmp(anchor->recursive_down, fi, sizeof(FrameInfo)) == 0);
|
||||
}
|
||||
#endif
|
||||
|
||||
JS_REQUIRES_STACK VMSideExit*
|
||||
TraceRecorder::downSnapshot(FrameInfo* downFrame)
|
||||
{
|
||||
JS_ASSERT(!pendingSpecializedNative);
|
||||
|
||||
/* Build the typemap the exit will have. Note extra stack slot for return value. */
|
||||
unsigned downPostSlots = downFrame->callerHeight;
|
||||
unsigned ngslots = tree->globalSlots->length();
|
||||
unsigned exitTypeMapLen = downPostSlots + 1 + ngslots;
|
||||
JSValueType* exitTypeMap = (JSValueType*)alloca(sizeof(JSValueType) * exitTypeMapLen);
|
||||
JSValueType* typeMap = downFrame->get_typemap();
|
||||
|
||||
|
||||
/* Add stack slots. */
|
||||
for (unsigned i = 0; i < downPostSlots; i++)
|
||||
exitTypeMap[i] = typeMap[i];
|
||||
|
||||
/* Add the return type. */
|
||||
JS_ASSERT_IF(*cx->regs->pc != JSOP_RETURN, *cx->regs->pc == JSOP_STOP);
|
||||
if (*cx->regs->pc == JSOP_RETURN)
|
||||
exitTypeMap[downPostSlots] = determineSlotType(&stackval(-1));
|
||||
else
|
||||
exitTypeMap[downPostSlots] = JSVAL_TYPE_UNDEFINED;
|
||||
|
||||
/* Add global types. */
|
||||
determineGlobalTypes(&exitTypeMap[downPostSlots + 1]);
|
||||
|
||||
VMSideExit* exit = (VMSideExit*)
|
||||
traceMonitor->traceAlloc->alloc(sizeof(VMSideExit) + sizeof(JSValueType) * exitTypeMapLen);
|
||||
|
||||
PodZero(exit);
|
||||
exit->from = fragment;
|
||||
exit->calldepth = 0;
|
||||
JS_ASSERT(unsigned(exit->calldepth) == callDepth);
|
||||
exit->numGlobalSlots = ngslots;
|
||||
exit->numStackSlots = downPostSlots + 1;
|
||||
exit->numStackSlotsBelowCurrentFrame = cx->fp()->down->argv ?
|
||||
nativeStackOffset(&cx->fp()->argv[-2]) / sizeof(double) : 0;
|
||||
exit->exitType = UNSTABLE_LOOP_EXIT;
|
||||
exit->block = cx->fp()->down->maybeBlockChain();
|
||||
exit->pc = downFrame->pc + JSOP_CALL_LENGTH;
|
||||
exit->imacpc = NULL;
|
||||
exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - tree->nativeStackBase;
|
||||
exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
|
||||
exit->nativeCalleeWord = 0;
|
||||
exit->lookupFlags = js_InferFlags(cx, 0);
|
||||
memcpy(exit->fullTypeMap(), exitTypeMap, sizeof(JSValueType) * exitTypeMapLen);
|
||||
#if defined JS_JIT_SPEW
|
||||
TreevisLogExit(cx, exit);
|
||||
#endif
|
||||
return exit;
|
||||
}
|
||||
|
||||
static JS_REQUIRES_STACK Value *
|
||||
DownFrameSP(JSContext *cx)
|
||||
{
|
||||
FrameRegsIter i(cx);
|
||||
++i;
|
||||
JS_ASSERT(i.fp() == cx->fp()->down);
|
||||
return i.sp();
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus
|
||||
TraceRecorder::upRecursion()
|
||||
{
|
||||
JS_ASSERT((JSOp)*cx->fp()->down->savedPC == JSOP_CALL);
|
||||
JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, cx->fp()->down->getScript(),
|
||||
cx->fp()->down->savedPC)].length == JSOP_CALL_LENGTH);
|
||||
|
||||
JS_ASSERT(callDepth == 0);
|
||||
|
||||
/*
|
||||
* If some operation involving interpreter frame slurping failed, go to
|
||||
* that code right away, and don't bother with emitting the up-recursive
|
||||
* guards again.
|
||||
*/
|
||||
if (anchor && (anchor->exitType == RECURSIVE_EMPTY_RP_EXIT ||
|
||||
anchor->exitType == RECURSIVE_SLURP_MISMATCH_EXIT ||
|
||||
anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT)) {
|
||||
return slurpDownFrames(cx->fp()->down->savedPC);
|
||||
}
|
||||
|
||||
jsbytecode* return_pc = cx->fp()->down->savedPC;
|
||||
jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH;
|
||||
|
||||
/*
|
||||
* It is possible that the down frame isn't the same at runtime. It's not
|
||||
* enough to guard on the PC, since the typemap could be different as well.
|
||||
* To deal with this, guard that the FrameInfo on the callstack is 100%
|
||||
* identical.
|
||||
*
|
||||
* Note that though the counted slots is called "downPostSlots", this is
|
||||
* the number of slots after the CALL instruction has theoretically popped
|
||||
* callee/this/argv, but before the return value is pushed. This is
|
||||
* intended since the FrameInfo pushed by down recursion would not have
|
||||
* the return value yet. Instead, when closing the loop, the return value
|
||||
* becomes the sole stack type that deduces type stability.
|
||||
*/
|
||||
unsigned totalSlots = NativeStackSlots(cx, 1);
|
||||
unsigned downPostSlots = totalSlots - NativeStackSlots(cx, 0);
|
||||
FrameInfo* fi = (FrameInfo*)alloca(sizeof(FrameInfo) + totalSlots * sizeof(JSValueType));
|
||||
fi->block = NULL;
|
||||
fi->pc = (jsbytecode*)return_pc;
|
||||
fi->imacpc = NULL;
|
||||
|
||||
/*
|
||||
* Need to compute this from the down frame, since the stack could have
|
||||
* moved on this one.
|
||||
*/
|
||||
fi->spdist = DownFrameSP(cx) - cx->fp()->down->slots();
|
||||
JS_ASSERT(cx->fp()->numActualArgs() == cx->fp()->down->numActualArgs());
|
||||
fi->set_argc(uint16(cx->fp()->numActualArgs()), false);
|
||||
fi->callerHeight = downPostSlots;
|
||||
fi->callerArgc = cx->fp()->down->numActualArgs();
|
||||
|
||||
if (anchor && anchor->exitType == RECURSIVE_MISMATCH_EXIT) {
|
||||
/*
|
||||
* Case 0: Anchoring off a RECURSIVE_MISMATCH guard. Guard on this FrameInfo.
|
||||
* This is always safe because this point is only reached on simple "call myself"
|
||||
* recursive functions.
|
||||
*/
|
||||
#if defined DEBUG
|
||||
assertDownFrameIsConsistent(anchor, fi);
|
||||
#endif
|
||||
fi = anchor->recursive_down;
|
||||
} else if (recursive_pc != fragment->root->ip) {
|
||||
/*
|
||||
* Case 1: Guess that down-recursion has to started back out, infer types
|
||||
* from the down frame.
|
||||
*/
|
||||
captureStackTypes(1, fi->get_typemap());
|
||||
} else {
|
||||
/* Case 2: Guess that up-recursion is backing out, infer types from our Tree. */
|
||||
JS_ASSERT(tree->nStackTypes == downPostSlots + 1);
|
||||
JSValueType* typeMap = fi->get_typemap();
|
||||
for (unsigned i = 0; i < downPostSlots; i++)
|
||||
typeMap[i] = tree->typeMap[i];
|
||||
}
|
||||
|
||||
fi = traceMonitor->frameCache->memoize(fi);
|
||||
|
||||
/*
|
||||
* Guard that there are more recursive frames. If coming from an anchor
|
||||
* where this was already computed, don't bother doing it again.
|
||||
*/
|
||||
if (!anchor || anchor->exitType != RECURSIVE_MISMATCH_EXIT) {
|
||||
VMSideExit* exit = snapshot(RECURSIVE_EMPTY_RP_EXIT);
|
||||
|
||||
/* Guard that rp >= sr + 1 */
|
||||
guard(true,
|
||||
lir->ins2(LIR_gep, lirbuf->rp,
|
||||
lir->ins2(LIR_addp,
|
||||
lir->insLoad(LIR_ldp, lirbuf->state,
|
||||
offsetof(TracerState, sor), ACCSET_OTHER),
|
||||
INS_CONSTWORD(sizeof(FrameInfo*)))),
|
||||
exit);
|
||||
}
|
||||
|
||||
debug_only_printf(LC_TMRecorder, "guardUpRecursive fragment->root=%p fi=%p\n", (void*)fragment->root, (void*)fi);
|
||||
|
||||
/* Guard that the FrameInfo above is the same FrameInfo pointer. */
|
||||
VMSideExit* exit = snapshot(RECURSIVE_MISMATCH_EXIT);
|
||||
LIns* prev_rp = lir->insLoad(LIR_ldp, lirbuf->rp, -int32_t(sizeof(FrameInfo*)), ACCSET_RSTACK);
|
||||
guard(true, lir->ins2(LIR_eqp, prev_rp, INS_CONSTPTR(fi)), exit);
|
||||
|
||||
/*
|
||||
* Now it's time to try and close the loop. Get a special exit that points
|
||||
* at the down frame, after the return has been propagated up.
|
||||
*/
|
||||
exit = downSnapshot(fi);
|
||||
|
||||
LIns* rval_ins;
|
||||
if (*cx->regs->pc == JSOP_RETURN) {
|
||||
JS_ASSERT(!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT);
|
||||
rval_ins = get(&stackval(-1));
|
||||
JS_ASSERT(rval_ins);
|
||||
} else {
|
||||
rval_ins = INS_UNDEFINED();
|
||||
}
|
||||
|
||||
JSValueType returnType = exit->stackTypeMap()[downPostSlots];
|
||||
if (returnType == JSVAL_TYPE_INT32) {
|
||||
JS_ASSERT(*cx->regs->pc == JSOP_RETURN);
|
||||
JS_ASSERT(determineSlotType(&stackval(-1)) == JSVAL_TYPE_INT32);
|
||||
JS_ASSERT(isPromoteInt(rval_ins));
|
||||
rval_ins = demote(lir, rval_ins);
|
||||
}
|
||||
|
||||
UpRecursiveSlotMap slotMap(*this, downPostSlots, rval_ins);
|
||||
for (unsigned i = 0; i < downPostSlots; i++)
|
||||
slotMap.addSlot(exit->stackType(i));
|
||||
if (*cx->regs->pc == JSOP_RETURN)
|
||||
slotMap.addSlot(&stackval(-1));
|
||||
else
|
||||
slotMap.addSlot(JSVAL_TYPE_UNDEFINED);
|
||||
VisitGlobalSlots(slotMap, cx, *tree->globalSlots);
|
||||
if (recursive_pc == (jsbytecode*)fragment->root->ip) {
|
||||
debug_only_print0(LC_TMTracer, "Compiling up-recursive loop...\n");
|
||||
} else {
|
||||
debug_only_print0(LC_TMTracer, "Compiling up-recursive branch...\n");
|
||||
exit->exitType = RECURSIVE_UNLINKED_EXIT;
|
||||
exit->recursive_pc = recursive_pc;
|
||||
}
|
||||
JS_ASSERT(tree->recursion != Recursion_Disallowed);
|
||||
if (tree->recursion != Recursion_Detected)
|
||||
tree->recursion = Recursion_Unwinds;
|
||||
return closeLoop(slotMap, exit);
|
||||
}
|
||||
|
||||
class SlurpInfo
|
||||
{
|
||||
public:
|
||||
unsigned curSlot;
|
||||
JSValueType* typeMap;
|
||||
VMSideExit* exit;
|
||||
unsigned slurpFailSlot;
|
||||
};
|
||||
|
||||
/*
|
||||
* The three types of anchors that can cause this type of trace to be built are:
|
||||
* RECURSIVE_SLURP_MISMATCH_EXIT
|
||||
* RECURSIVE_SLURP_FAIL_EXIT
|
||||
* RECURSIVE_EMPTY_RP_EXIT
|
||||
*
|
||||
* EMPTY_RP means that recursion is unwinding, but there are no more frames.
|
||||
* This triggers a "slurp trace" to be built. A slurp trace does three things:
|
||||
* 1) Checks to see if cx->fp returns to the same point the recursive trace
|
||||
* is trying to unwind to.
|
||||
* 2) Pops the inline frame cx->fp, such that cx->fp is now cx->fp->down.
|
||||
* 3) Converts the new top-frame slots/sp into the tracer frame.
|
||||
*
|
||||
* SLURP_MISMATCH means that while trying to convert an interpreter frame,
|
||||
* it is owned by the same script, but does not return to the same pc. At this
|
||||
* point the frame has not been popped yet.
|
||||
*
|
||||
* SLURP_FAIL means that the interpreter frame has been popped, the return
|
||||
* value has been written to the native stack, but there was a type mismatch
|
||||
* while unboxing the interpreter slots.
|
||||
*/
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus
|
||||
TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
|
||||
{
|
||||
/* Missing - no go */
|
||||
if (cx->fp()->numActualArgs() != cx->fp()->numFormalArgs())
|
||||
RETURN_STOP_A("argc != nargs");
|
||||
|
||||
LIns* argv_ins;
|
||||
unsigned frameDepth;
|
||||
unsigned downPostSlots;
|
||||
|
||||
FrameRegsIter i(cx);
|
||||
LIns* fp_ins = addName(entryFrameIns(), "fp");
|
||||
|
||||
/*
|
||||
* When first emitting slurp code, do so against the down frame. After
|
||||
* popping the interpreter frame, it is illegal to resume here, as the
|
||||
* down frame has been moved up. So all this code should be skipped if
|
||||
* anchoring off such an exit.
|
||||
*/
|
||||
if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
|
||||
fp_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, down), ACCSET_OTHER),
|
||||
"downFp");
|
||||
++i;
|
||||
|
||||
argv_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argv), ACCSET_OTHER),
|
||||
"argv");
|
||||
|
||||
/* If recovering from a SLURP_MISMATCH, all of this is unnecessary. */
|
||||
if (!anchor || anchor->exitType != RECURSIVE_SLURP_MISMATCH_EXIT) {
|
||||
/* fp->down should not be NULL. */
|
||||
guard(false, lir->insEqP_0(fp_ins), RECURSIVE_LOOP_EXIT);
|
||||
|
||||
/* fp->down->argv should not be NULL. */
|
||||
guard(false, lir->insEqP_0(argv_ins), RECURSIVE_LOOP_EXIT);
|
||||
|
||||
/*
|
||||
* Guard on the script being the same. This might seem unnecessary,
|
||||
* but it lets the recursive loop end cleanly if it doesn't match.
|
||||
* With only the pc check, it is harder to differentiate between
|
||||
* end-of-recursion and recursion-returns-to-different-pc.
|
||||
*/
|
||||
guard(true,
|
||||
lir->ins2(LIR_eqp,
|
||||
addName(lir->insLoad(LIR_ldp, fp_ins,
|
||||
JSStackFrame::offsetScript(), ACCSET_OTHER),
|
||||
"script"),
|
||||
INS_CONSTPTR(cx->fp()->down->getScript())),
|
||||
RECURSIVE_LOOP_EXIT);
|
||||
}
|
||||
|
||||
/* fp->down->savedPC should be == pc. */
|
||||
guard(true,
|
||||
lir->ins2(LIR_eqp,
|
||||
addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, savedPC),
|
||||
ACCSET_OTHER),
|
||||
"savedPC"),
|
||||
INS_CONSTPTR(return_pc)),
|
||||
RECURSIVE_SLURP_MISMATCH_EXIT);
|
||||
|
||||
/* fp->down->argc should be == argc. */
|
||||
guard(true,
|
||||
lir->ins2(LIR_eqi,
|
||||
addName(lir->insLoad(LIR_ldi, fp_ins, JSStackFrame::offsetNumActualArgs(),
|
||||
ACCSET_OTHER),
|
||||
"argc"),
|
||||
INS_CONST(cx->fp()->numActualArgs())),
|
||||
MISMATCH_EXIT);
|
||||
|
||||
/* Pop the interpreter frame. */
|
||||
LIns* args[] = { lirbuf->state, cx_ins };
|
||||
guard(false, lir->insEqI_0(lir->insCall(&js_PopInterpFrame_ci, args)), MISMATCH_EXIT);
|
||||
|
||||
/* Compute slots for the down frame. */
|
||||
downPostSlots = NativeStackSlots(cx, 1) - NativeStackSlots(cx, 0);
|
||||
frameDepth = 1;
|
||||
} else {
|
||||
/* Note: loading argv from fp, not fp->down. */
|
||||
argv_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argv), ACCSET_OTHER),
|
||||
"argv");
|
||||
|
||||
/* Slots for this frame, minus the return value. */
|
||||
downPostSlots = NativeStackSlots(cx, 0) - 1;
|
||||
frameDepth = 0;
|
||||
}
|
||||
|
||||
/*
|
||||
* This is a special exit used as a template for the stack-slurping code.
|
||||
* LeaveTree will ignore all but the final slot, which contains the return
|
||||
* value. The slurpSlot variable keeps track of the last slot that has been
|
||||
* unboxed, as to avoid re-unboxing when taking a SLURP_FAIL exit.
|
||||
*/
|
||||
unsigned numGlobalSlots = tree->globalSlots->length();
|
||||
unsigned safeSlots = NativeStackSlots(cx, frameDepth) + 1 + numGlobalSlots;
|
||||
jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH;
|
||||
VMSideExit* exit = (VMSideExit*)
|
||||
traceMonitor->traceAlloc->alloc(sizeof(VMSideExit) + sizeof(JSValueType) * safeSlots);
|
||||
PodZero(exit);
|
||||
exit->pc = (jsbytecode*)recursive_pc;
|
||||
exit->from = fragment;
|
||||
exit->exitType = RECURSIVE_SLURP_FAIL_EXIT;
|
||||
exit->numStackSlots = downPostSlots + 1;
|
||||
exit->numGlobalSlots = numGlobalSlots;
|
||||
exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - tree->nativeStackBase;
|
||||
exit->recursive_pc = recursive_pc;
|
||||
|
||||
/*
|
||||
* Build the exit typemap. This may capture extra types, but they are
|
||||
* thrown away.
|
||||
*/
|
||||
JSValueType* typeMap = exit->stackTypeMap();
|
||||
jsbytecode* oldpc = cx->regs->pc;
|
||||
cx->regs->pc = exit->pc;
|
||||
captureStackTypes(frameDepth, typeMap);
|
||||
cx->regs->pc = oldpc;
|
||||
if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
|
||||
JS_ASSERT_IF(*cx->regs->pc != JSOP_RETURN, *cx->regs->pc == JSOP_STOP);
|
||||
if (*cx->regs->pc == JSOP_RETURN)
|
||||
typeMap[downPostSlots] = determineSlotType(&stackval(-1));
|
||||
else
|
||||
typeMap[downPostSlots] = JSVAL_TYPE_UNDEFINED;
|
||||
} else {
|
||||
typeMap[downPostSlots] = anchor->stackTypeMap()[anchor->numStackSlots - 1];
|
||||
}
|
||||
determineGlobalTypes(&typeMap[exit->numStackSlots]);
|
||||
#if defined JS_JIT_SPEW
|
||||
TreevisLogExit(cx, exit);
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Return values are tricky because there are two cases. Anchoring off a
|
||||
* slurp failure (the second case) means the return value has already been
|
||||
* moved. However it can still be promoted to link trees together, so we
|
||||
* load it from the new location.
|
||||
*
|
||||
* In all other cases, the return value lives in the tracker and it can be
|
||||
* grabbed safely.
|
||||
*/
|
||||
LIns* rval_ins;
|
||||
intptr_t offset = exit->sp_adj - sizeof(double);
|
||||
JSValueType returnType = exit->stackTypeMap()[downPostSlots];
|
||||
|
||||
if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
|
||||
/*
|
||||
* It is safe to read cx->regs->pc here because the frame hasn't
|
||||
* been popped yet. We're guaranteed to have a return or stop.
|
||||
*/
|
||||
JSOp op = JSOp(*cx->regs->pc);
|
||||
JS_ASSERT(op == JSOP_RETURN || op == JSOP_STOP);
|
||||
|
||||
if (op == JSOP_RETURN) {
|
||||
rval_ins = get(&stackval(-1));
|
||||
if (returnType == JSVAL_TYPE_INT32) {
|
||||
JS_ASSERT(determineSlotType(&stackval(-1)) == JSVAL_TYPE_INT32);
|
||||
JS_ASSERT(isPromoteInt(rval_ins));
|
||||
rval_ins = demote(lir, rval_ins);
|
||||
}
|
||||
} else {
|
||||
rval_ins = INS_UNDEFINED();
|
||||
}
|
||||
|
||||
/*
|
||||
* The return value must be written out early, before slurping can fail,
|
||||
* otherwise it will not be available when there's a type mismatch.
|
||||
*/
|
||||
lir->insStore(rval_ins, lirbuf->sp, offset, ACCSET_STACK);
|
||||
} else {
|
||||
switch (returnType)
|
||||
{
|
||||
case JSVAL_TYPE_BOOLEAN:
|
||||
case JSVAL_TYPE_UNDEFINED:
|
||||
case JSVAL_TYPE_INT32:
|
||||
rval_ins = lir->insLoad(LIR_ldi, lirbuf->sp, offset, ACCSET_STACK);
|
||||
break;
|
||||
case JSVAL_TYPE_DOUBLE:
|
||||
rval_ins = lir->insLoad(LIR_ldd, lirbuf->sp, offset, ACCSET_STACK);
|
||||
break;
|
||||
case JSVAL_TYPE_FUNOBJ:
|
||||
case JSVAL_TYPE_NONFUNOBJ:
|
||||
case JSVAL_TYPE_STRING:
|
||||
case JSVAL_TYPE_NULL:
|
||||
rval_ins = lir->insLoad(LIR_ldp, lirbuf->sp, offset, ACCSET_STACK);
|
||||
break;
|
||||
default:
|
||||
JS_NOT_REACHED("unknown type");
|
||||
RETURN_STOP_A("unknown type");
|
||||
}
|
||||
}
|
||||
|
||||
/* Slurp */
|
||||
SlurpInfo info;
|
||||
info.curSlot = 0;
|
||||
info.exit = exit;
|
||||
info.typeMap = typeMap;
|
||||
info.slurpFailSlot = (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) ?
|
||||
anchor->slurpFailSlot : 0;
|
||||
|
||||
JSStackFrame *const fp = i.fp();
|
||||
|
||||
/* callee */
|
||||
slurpSlot(argv_ins, -2 * ptrdiff_t(sizeof(Value)), &fp->argv[-2], &info);
|
||||
/* this */
|
||||
slurpSlot(argv_ins, -1 * ptrdiff_t(sizeof(Value)), &fp->argv[-1], &info);
|
||||
/* args[0..n] */
|
||||
for (unsigned i = 0; i < JS_MAX(fp->numActualArgs(), fp->numFormalArgs()); i++)
|
||||
slurpSlot(argv_ins, i * sizeof(Value), &fp->argv[i], &info);
|
||||
/* argsobj */
|
||||
slurpFrameObjPtrSlot(fp_ins, JSStackFrame::offsetArgsObj(), fp->addressArgsObj(), &info);
|
||||
/* scopeChain */
|
||||
slurpFrameObjPtrSlot(fp_ins, JSStackFrame::offsetScopeChain(), fp->addressScopeChain(), &info);
|
||||
/* vars */
|
||||
LIns* slots_ins = addName(lir->ins2(LIR_addp, fp_ins, INS_CONSTWORD(sizeof(JSStackFrame))),
|
||||
"slots");
|
||||
for (unsigned i = 0; i < fp->getFixedCount(); i++)
|
||||
slurpSlot(slots_ins, i * sizeof(Value), &fp->slots()[i], &info);
|
||||
/* stack vals */
|
||||
unsigned nfixed = fp->getFixedCount();
|
||||
Value* stack = fp->base();
|
||||
LIns* stack_ins = addName(lir->ins2(LIR_addp,
|
||||
slots_ins,
|
||||
INS_CONSTWORD(nfixed * sizeof(Value))),
|
||||
"stackBase");
|
||||
|
||||
size_t limit = size_t(i.sp() - fp->base());
|
||||
if (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT)
|
||||
limit--;
|
||||
else
|
||||
limit -= fp->numFormalArgs() + 2;
|
||||
for (size_t i = 0; i < limit; i++)
|
||||
slurpSlot(stack_ins, i * sizeof(Value), &stack[i], &info);
|
||||
|
||||
JS_ASSERT(info.curSlot == downPostSlots);
|
||||
|
||||
/* Jump back to the start */
|
||||
exit = copy(exit);
|
||||
exit->exitType = UNSTABLE_LOOP_EXIT;
|
||||
#if defined JS_JIT_SPEW
|
||||
TreevisLogExit(cx, exit);
|
||||
#endif
|
||||
|
||||
RecursiveSlotMap slotMap(*this, downPostSlots, rval_ins);
|
||||
for (unsigned i = 0; i < downPostSlots; i++)
|
||||
slotMap.addSlot(typeMap[i]);
|
||||
if (*cx->regs->pc == JSOP_RETURN)
|
||||
slotMap.addSlot(&stackval(-1), typeMap[downPostSlots]);
|
||||
else
|
||||
slotMap.addSlot(JSVAL_TYPE_UNDEFINED);
|
||||
VisitGlobalSlots(slotMap, cx, *tree->globalSlots);
|
||||
debug_only_print0(LC_TMTracer, "Compiling up-recursive slurp...\n");
|
||||
exit = copy(exit);
|
||||
if (exit->recursive_pc == fragment->root->ip)
|
||||
exit->exitType = UNSTABLE_LOOP_EXIT;
|
||||
else
|
||||
exit->exitType = RECURSIVE_UNLINKED_EXIT;
|
||||
debug_only_printf(LC_TMTreeVis, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit,
|
||||
getExitName(exit->exitType));
|
||||
JS_ASSERT(tree->recursion >= Recursion_Unwinds);
|
||||
return closeLoop(slotMap, exit);
|
||||
}
|
||||
|
||||
class ImportFrameSlotsVisitor : public SlotVisitorBase
|
||||
{
|
||||
TraceRecorder &mRecorder;
|
||||
public:
|
||||
ImportFrameSlotsVisitor(TraceRecorder &recorder) : mRecorder(recorder)
|
||||
{}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
|
||||
visitStackSlots(Value *vp, size_t count, JSStackFrame* fp) {
|
||||
/* N.B. vp may point to a JSObject*. */
|
||||
for (size_t i = 0; i < count; ++i)
|
||||
mRecorder.get(vp++);
|
||||
return true;
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK JS_ALWAYS_INLINE bool
|
||||
visitFrameObjPtr(JSObject **p, JSStackFrame* fp) {
|
||||
/* visitStackSlots only uses the address of its argument. */
|
||||
return visitStackSlots((Value *)p, 1, fp);
|
||||
}
|
||||
};
|
||||
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus
|
||||
TraceRecorder::downRecursion()
|
||||
{
|
||||
JSStackFrame* fp = cx->fp();
|
||||
JSScript *script = fp->getScript();
|
||||
if ((jsbytecode*)fragment->ip < script->code ||
|
||||
(jsbytecode*)fragment->ip >= script->code + script->length) {
|
||||
RETURN_STOP_A("inner recursive call must compile first");
|
||||
}
|
||||
|
||||
/* Adjust the stack by the budget the down-frame needs. */
|
||||
int slots = NativeStackSlots(cx, 1) - NativeStackSlots(cx, 0);
|
||||
JS_ASSERT(unsigned(slots) ==
|
||||
NativeStackSlots(cx, 1) - fp->numActualArgs() - 2 - fp->getFixedCount() - 2);
|
||||
|
||||
/* Guard that there is enough stack space. */
|
||||
JS_ASSERT(tree->maxNativeStackSlots >= tree->nativeStackBase / sizeof(double));
|
||||
int guardSlots = slots + tree->maxNativeStackSlots -
|
||||
tree->nativeStackBase / sizeof(double);
|
||||
LIns* sp_top = lir->ins2(LIR_addp, lirbuf->sp, lir->insImmWord(guardSlots * sizeof(double)));
|
||||
guard(true, lir->ins2(LIR_ltp, sp_top, eos_ins), OOM_EXIT);
|
||||
|
||||
/* Guard that there is enough call stack space. */
|
||||
LIns* rp_top = lir->ins2(LIR_addp, lirbuf->rp,
|
||||
lir->insImmWord((tree->maxCallDepth + 1) * sizeof(FrameInfo*)));
|
||||
guard(true, lir->ins2(LIR_ltp, rp_top, eor_ins), OOM_EXIT);
|
||||
|
||||
/*
|
||||
* For every slot in the new frame that is not in the tracker, create a load
|
||||
* in the tracker. This is necessary because otherwise snapshot() will see
|
||||
* missing imports and use the down frame, rather than the new frame.
|
||||
* This won't affect performance because the loads will be killed if not
|
||||
* used.
|
||||
*/
|
||||
ImportFrameSlotsVisitor visitor(*this);
|
||||
VisitStackSlots(visitor, cx, callDepth);
|
||||
|
||||
/* Add space for a new JIT frame. */
|
||||
lirbuf->sp = lir->ins2(LIR_addp, lirbuf->sp, lir->insImmWord(slots * sizeof(double)));
|
||||
lir->insStore(lirbuf->sp, lirbuf->state, offsetof(TracerState, sp), ACCSET_OTHER);
|
||||
lirbuf->rp = lir->ins2(LIR_addp, lirbuf->rp, lir->insImmWord(sizeof(FrameInfo*)));
|
||||
lir->insStore(lirbuf->rp, lirbuf->state, offsetof(TracerState, rp), ACCSET_OTHER);
|
||||
--callDepth;
|
||||
clearCurrentFrameSlotsFromTracker(nativeFrameTracker);
|
||||
|
||||
/*
|
||||
* If the callee and caller have identical call sites, this is a down-
|
||||
* recursive loop. Otherwise something special happened. For example, a
|
||||
* recursive call that is unwinding could nest back down recursively again.
|
||||
* In this case, we build a fragment that ideally we'll never invoke
|
||||
* directly, but link from a down-recursive branch. The UNLINKED_EXIT tells
|
||||
* closeLoop() that the peer trees should match the recursive pc, not the
|
||||
* tree pc.
|
||||
*/
|
||||
VMSideExit* exit;
|
||||
if ((jsbytecode*)fragment->root->ip == script->code)
|
||||
exit = snapshot(UNSTABLE_LOOP_EXIT);
|
||||
else
|
||||
exit = snapshot(RECURSIVE_UNLINKED_EXIT);
|
||||
exit->recursive_pc = script->code;
|
||||
debug_only_print0(LC_TMTracer, "Compiling down-recursive function call.\n");
|
||||
JS_ASSERT(tree->recursion != Recursion_Disallowed);
|
||||
tree->recursion = Recursion_Detected;
|
||||
return closeLoop(exit);
|
||||
}
|
||||
|
||||
#if JS_BITS_PER_WORD == 32
|
||||
JS_REQUIRES_STACK inline LIns*
|
||||
TraceRecorder::slurpDoubleSlot(LIns* addr_ins, ptrdiff_t offset, VMSideExit* exit)
|
||||
{
|
||||
LIns* tag_ins = lir->insLoad(LIR_ldi, addr_ins, offset + sTagOffset, ACCSET_OTHER);
|
||||
return unbox_number_as_double(addr_ins, offset, tag_ins, exit, ACCSET_OTHER);
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK LIns*
|
||||
TraceRecorder::slurpObjectSlot(LIns* addr_ins, ptrdiff_t offset, JSValueType type, VMSideExit* exit)
|
||||
{
|
||||
LIns* tag_ins = lir->insLoad(LIR_ldi, addr_ins, offset + sTagOffset, ACCSET_OTHER);
|
||||
return unbox_object(addr_ins, offset, tag_ins, type, exit, ACCSET_OTHER);
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK inline LIns*
|
||||
TraceRecorder::slurpNonDoubleObjectSlot(LIns* addr_ins, ptrdiff_t offset, JSValueType type, VMSideExit* exit)
|
||||
{
|
||||
LIns* tag_ins = lir->insLoad(LIR_ldi, addr_ins, offset + sTagOffset, ACCSET_OTHER);
|
||||
return unbox_non_double_object(addr_ins, offset, tag_ins, type, exit, ACCSET_OTHER);
|
||||
}
|
||||
#elif JS_BITS_PER_WORD == 64
|
||||
JS_REQUIRES_STACK inline LIns*
|
||||
TraceRecorder::slurpDoubleSlot(LIns* addr_ins, ptrdiff_t offset, VMSideExit* exit)
|
||||
{
|
||||
LIns* v_ins = lir->insLoad(LIR_ldq, addr_ins, offset, ACCSET_OTHER);
|
||||
return unbox_number_as_double(v_ins, exit);
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK LIns*
|
||||
TraceRecorder::slurpObjectSlot(LIns* addr_ins, ptrdiff_t offset, JSValueType type, VMSideExit* exit)
|
||||
{
|
||||
LIns* v_ins = lir->insLoad(LIR_ldq, addr_ins, offset, ACCSET_OTHER);
|
||||
return unbox_object(v_ins, type, exit);
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK inline LIns*
|
||||
TraceRecorder::slurpNonDoubleObjectSlot(LIns* addr_ins, ptrdiff_t offset, JSValueType type, VMSideExit* exit)
|
||||
{
|
||||
LIns* v_ins = lir->insLoad(LIR_ldq, addr_ins, offset, ACCSET_OTHER);
|
||||
return unbox_non_double_object(v_ins, type, exit);
|
||||
}
|
||||
#endif
|
||||
|
||||
JS_REQUIRES_STACK inline LIns*
|
||||
TraceRecorder::slurpSlot(LIns* addr_ins, ptrdiff_t offset, Value* vp, VMSideExit* exit)
|
||||
{
|
||||
if (exit->slurpType == JSVAL_TYPE_DOUBLE)
|
||||
return slurpDoubleSlot(addr_ins, offset, exit);
|
||||
if (exit->slurpType == JSVAL_TYPE_FUNOBJ || exit->slurpType == JSVAL_TYPE_NONFUNOBJ)
|
||||
return slurpObjectSlot(addr_ins, offset, exit->slurpType, exit);
|
||||
JSValueType type = exit->slurpType;
|
||||
return slurpNonDoubleObjectSlot(addr_ins, offset, type, exit);
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK void
|
||||
TraceRecorder::slurpSlot(LIns* addr_ins, ptrdiff_t offset, Value* vp, SlurpInfo* info)
|
||||
{
|
||||
/* Don't re-read slots that aren't needed. */
|
||||
if (info->curSlot < info->slurpFailSlot) {
|
||||
info->curSlot++;
|
||||
return;
|
||||
}
|
||||
VMSideExit* exit = copy(info->exit);
|
||||
exit->slurpFailSlot = info->curSlot;
|
||||
exit->slurpType = info->typeMap[info->curSlot];
|
||||
|
||||
/* Make sure that we don't try and record infinity branches */
|
||||
JS_ASSERT_IF(anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT &&
|
||||
info->curSlot == info->slurpFailSlot,
|
||||
anchor->slurpType != exit->slurpType);
|
||||
|
||||
LIns* val = slurpSlot(addr_ins, offset, vp, exit);
|
||||
lir->insStore(val,
|
||||
lirbuf->sp,
|
||||
-tree->nativeStackBase + ptrdiff_t(info->curSlot) * sizeof(double),
|
||||
ACCSET_STACK);
|
||||
info->curSlot++;
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK void
|
||||
TraceRecorder::slurpFrameObjPtrSlot(LIns* addr_ins, ptrdiff_t offset, JSObject** p, SlurpInfo* info)
|
||||
{
|
||||
/* Don't re-read slots that aren't needed. */
|
||||
if (info->curSlot < info->slurpFailSlot) {
|
||||
info->curSlot++;
|
||||
return;
|
||||
}
|
||||
VMSideExit* exit = copy(info->exit);
|
||||
exit->slurpFailSlot = info->curSlot;
|
||||
exit->slurpType = info->typeMap[info->curSlot];
|
||||
|
||||
/* Make sure that we don't try and record infinity branches */
|
||||
JS_ASSERT_IF(anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT &&
|
||||
info->curSlot == info->slurpFailSlot,
|
||||
anchor->slurpType != exit->slurpType);
|
||||
|
||||
LIns *val;
|
||||
LIns *ptr_val = lir->insLoad(LIR_ldp, addr_ins, offset, ACCSET_OTHER);
|
||||
LIns *ptr_is_null_ins = lir->insEqP_0(ptr_val);
|
||||
if (exit->slurpType == JSVAL_TYPE_NULL) {
|
||||
guard(true, ptr_is_null_ins, exit);
|
||||
val = INS_NULL();
|
||||
} else {
|
||||
JS_ASSERT(exit->slurpType == JSVAL_TYPE_NONFUNOBJ);
|
||||
guard(false, ptr_is_null_ins, exit);
|
||||
val = ptr_val;
|
||||
}
|
||||
|
||||
lir->insStore(val,
|
||||
lirbuf->sp,
|
||||
-tree->nativeStackBase + ptrdiff_t(info->curSlot) * sizeof(double),
|
||||
ACCSET_STACK);
|
||||
info->curSlot++;
|
||||
}
|
|
@ -1011,16 +1011,10 @@ js_NewScriptFromCG(JSContext *cx, JSCodeGenerator *cg)
|
|||
if (prologLength + mainLength <= 3) {
|
||||
/*
|
||||
* Check very short scripts to see whether they are "empty" and return
|
||||
* the const empty-script singleton if so. We are deliberately flexible
|
||||
* about whether JSOP_TRACE is in the prolog.
|
||||
* the const empty-script singleton if so.
|
||||
*/
|
||||
jsbytecode *pc = prologLength ? CG_PROLOG_BASE(cg) : CG_BASE(cg);
|
||||
|
||||
if (JSOp(*pc) == JSOP_TRACE) {
|
||||
++pc;
|
||||
if (pc == CG_PROLOG_BASE(cg) + prologLength)
|
||||
pc = CG_BASE(cg);
|
||||
}
|
||||
if ((cg->flags & TCF_NO_SCRIPT_RVAL) && JSOp(*pc) == JSOP_FALSE)
|
||||
++pc;
|
||||
|
||||
|
|
|
@ -76,8 +76,6 @@ JSScript::isEmpty() const
|
|||
if (length <= 3) {
|
||||
jsbytecode *pc = code;
|
||||
|
||||
if (JSOp(*pc) == JSOP_TRACE)
|
||||
++pc;
|
||||
if (noScriptRval && JSOp(*pc) == JSOP_FALSE)
|
||||
++pc;
|
||||
if (JSOp(*pc) == JSOP_STOP)
|
||||
|
|
|
@ -315,9 +315,6 @@ ValueToTypeChar(const Value &v)
|
|||
/* Maximum number of peer trees allowed. */
|
||||
#define MAXPEERS 9
|
||||
|
||||
/* Max number of hits to a RECURSIVE_UNLINKED exit before we trash the tree. */
|
||||
#define MAX_RECURSIVE_UNLINK_HITS 64
|
||||
|
||||
/* Max call depths for inlining. */
|
||||
#define MAX_CALLDEPTH 10
|
||||
|
||||
|
@ -1388,14 +1385,8 @@ static void
|
|||
Blacklist(jsbytecode* pc)
|
||||
{
|
||||
AUDIT(blacklisted);
|
||||
JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOP || *pc == JSOP_CALL);
|
||||
if (*pc == JSOP_CALL) {
|
||||
JS_ASSERT(*(pc + JSOP_CALL_LENGTH) == JSOP_TRACE ||
|
||||
*(pc + JSOP_CALL_LENGTH) == JSOP_NOP);
|
||||
*(pc + JSOP_CALL_LENGTH) = JSOP_NOP;
|
||||
} else if (*pc == JSOP_TRACE) {
|
||||
*pc = JSOP_NOP;
|
||||
}
|
||||
JS_ASSERT(*pc == JSOP_TRACE || *pc == JSOP_NOP);
|
||||
*pc = JSOP_NOP;
|
||||
}
|
||||
|
||||
static bool
|
||||
|
@ -1547,7 +1538,6 @@ TreeFragment::initialize(JSContext* cx, SlotList *globalSlots, bool speculate)
|
|||
this->treePCOffset = FramePCOffset(cx, cx->fp());
|
||||
#endif
|
||||
this->script = cx->fp()->getScript();
|
||||
this->recursion = Recursion_None;
|
||||
this->gcthings.clear();
|
||||
this->sprops.clear();
|
||||
this->unstableExits = NULL;
|
||||
|
@ -1604,9 +1594,8 @@ AttemptCompilation(JSContext *cx, JSObject* globalObj, jsbytecode* pc, uint32 ar
|
|||
TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
|
||||
|
||||
/* If we already permanently blacklisted the location, undo that. */
|
||||
JS_ASSERT(*pc == JSOP_NOP || *pc == JSOP_TRACE || *pc == JSOP_CALL);
|
||||
if (*pc == JSOP_NOP)
|
||||
*pc = JSOP_TRACE;
|
||||
JS_ASSERT(*pc == JSOP_NOP || *pc == JSOP_TRACE);
|
||||
*pc = JSOP_TRACE;
|
||||
ResetRecordingAttempts(cx, pc);
|
||||
|
||||
/* Breathe new life into all peer fragments at the designated loop header. */
|
||||
|
@ -2225,13 +2214,12 @@ JS_REQUIRES_STACK
|
|||
TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* fragment,
|
||||
unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
|
||||
VMSideExit* innermost, jsbytecode* outer, uint32 outerArgc,
|
||||
RecordReason recordReason, bool speculate)
|
||||
bool speculate)
|
||||
: cx(cx),
|
||||
traceMonitor(&JS_TRACE_MONITOR(cx)),
|
||||
oracle(speculate ? JS_TRACE_MONITOR(cx).oracle : NULL),
|
||||
fragment(fragment),
|
||||
tree(fragment->root),
|
||||
recordReason(recordReason),
|
||||
globalObj(tree->globalObj),
|
||||
outer(outer),
|
||||
outerArgc(outerArgc),
|
||||
|
@ -2404,10 +2392,6 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* frag
|
|||
/* read into registers all values on the stack and all globals we know so far */
|
||||
import(tree, lirbuf->sp, stackSlots, ngslots, callDepth, typeMap);
|
||||
|
||||
/* Finish handling RECURSIVE_SLURP_FAIL_EXIT in startRecorder. */
|
||||
if (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT)
|
||||
return;
|
||||
|
||||
if (fragment == fragment->root) {
|
||||
/*
|
||||
* We poll the operation callback request flag. It is updated asynchronously whenever
|
||||
|
@ -3042,19 +3026,16 @@ class FlushNativeStackFrameVisitor : public SlotVisitorBase
|
|||
const JSValueType *mTypeMap;
|
||||
double *mStack;
|
||||
Value *mStop;
|
||||
unsigned mIgnoreSlots;
|
||||
public:
|
||||
FlushNativeStackFrameVisitor(JSContext *cx,
|
||||
const JSValueType *typeMap,
|
||||
double *stack,
|
||||
Value *stop,
|
||||
unsigned ignoreSlots) :
|
||||
Value *stop) :
|
||||
mCx(cx),
|
||||
mInitTypeMap(typeMap),
|
||||
mTypeMap(typeMap),
|
||||
mStack(stack),
|
||||
mStop(stop),
|
||||
mIgnoreSlots(ignoreSlots)
|
||||
mStop(stop)
|
||||
{}
|
||||
|
||||
const JSValueType* getTypeMap()
|
||||
|
@ -3069,8 +3050,7 @@ public:
|
|||
if (vp == mStop)
|
||||
return false;
|
||||
debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), unsigned(i));
|
||||
if (unsigned(mTypeMap - mInitTypeMap) >= mIgnoreSlots)
|
||||
NativeToValue(mCx, *vp, *mTypeMap, mStack);
|
||||
NativeToValue(mCx, *vp, *mTypeMap, mStack);
|
||||
vp++;
|
||||
mTypeMap++;
|
||||
mStack++;
|
||||
|
@ -3084,22 +3064,20 @@ public:
|
|||
if ((Value *)p == mStop)
|
||||
return false;
|
||||
debug_only_printf(LC_TMTracer, "%s%u=", stackSlotKind(), 0);
|
||||
if (unsigned(mTypeMap - mInitTypeMap) >= mIgnoreSlots) {
|
||||
*p = *(JSObject **)mStack;
|
||||
*p = *(JSObject **)mStack;
|
||||
#ifdef DEBUG
|
||||
JSValueType type = *mTypeMap;
|
||||
if (type == JSVAL_TYPE_NULL) {
|
||||
debug_only_print0(LC_TMTracer, "null ");
|
||||
} else {
|
||||
JS_ASSERT(type == JSVAL_TYPE_NONFUNOBJ);
|
||||
JS_ASSERT(!(*p)->isFunction());
|
||||
debug_only_printf(LC_TMTracer,
|
||||
"object<%p:%s> ",
|
||||
(void*) *p,
|
||||
(*p)->getClass()->name);
|
||||
}
|
||||
#endif
|
||||
JSValueType type = *mTypeMap;
|
||||
if (type == JSVAL_TYPE_NULL) {
|
||||
debug_only_print0(LC_TMTracer, "null ");
|
||||
} else {
|
||||
JS_ASSERT(type == JSVAL_TYPE_NONFUNOBJ);
|
||||
JS_ASSERT(!(*p)->isFunction());
|
||||
debug_only_printf(LC_TMTracer,
|
||||
"object<%p:%s> ",
|
||||
(void*) *p,
|
||||
(*p)->getClass()->name);
|
||||
}
|
||||
#endif
|
||||
mTypeMap++;
|
||||
mStack++;
|
||||
return true;
|
||||
|
@ -3436,12 +3414,12 @@ GetClosureVar(JSContext* cx, JSObject* callee, const ClosureVarInfo* cv, double*
|
|||
*/
|
||||
static JS_REQUIRES_STACK int
|
||||
FlushNativeStackFrame(JSContext* cx, unsigned callDepth, const JSValueType* mp, double* np,
|
||||
JSStackFrame* stopFrame, unsigned ignoreSlots)
|
||||
JSStackFrame* stopFrame)
|
||||
{
|
||||
Value* stopAt = stopFrame ? &stopFrame->argv[-2] : NULL;
|
||||
|
||||
/* Root all string and object references first (we don't need to call the GC for this). */
|
||||
FlushNativeStackFrameVisitor visitor(cx, mp, np, stopAt, ignoreSlots);
|
||||
FlushNativeStackFrameVisitor visitor(cx, mp, np, stopAt);
|
||||
VisitStackSlots(visitor, cx, callDepth);
|
||||
|
||||
// Restore thisv from the now-restored argv[-1] in each pending frame.
|
||||
|
@ -3657,11 +3635,8 @@ TraceRecorder::import(TreeFragment* tree, LIns* sp, unsigned stackSlots, unsigne
|
|||
* Check whether there are any values on the stack we have to unbox and do
|
||||
* that first before we waste any time fetching the state from the stack.
|
||||
*/
|
||||
if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
|
||||
ImportBoxedStackSlotVisitor boxedStackVisitor(*this, sp, -tree->nativeStackBase, typeMap);
|
||||
VisitStackSlots(boxedStackVisitor, cx, callDepth);
|
||||
}
|
||||
|
||||
ImportBoxedStackSlotVisitor boxedStackVisitor(*this, sp, -tree->nativeStackBase, typeMap);
|
||||
VisitStackSlots(boxedStackVisitor, cx, callDepth);
|
||||
|
||||
/*
|
||||
* Remember the import type map so we can lazily import later whatever
|
||||
|
@ -4882,8 +4857,7 @@ TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit)
|
|||
* to be in an imacro here and the opcode should be either JSOP_TRACE or, in
|
||||
* case this loop was blacklisted in the meantime, JSOP_NOP.
|
||||
*/
|
||||
JS_ASSERT((*cx->regs->pc == JSOP_TRACE || *cx->regs->pc == JSOP_NOP ||
|
||||
*cx->regs->pc == JSOP_RETURN || *cx->regs->pc == JSOP_STOP) &&
|
||||
JS_ASSERT((*cx->regs->pc == JSOP_TRACE || *cx->regs->pc == JSOP_NOP) &&
|
||||
!cx->fp()->hasIMacroPC());
|
||||
|
||||
if (callDepth != 0) {
|
||||
|
@ -4896,9 +4870,6 @@ TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit)
|
|||
|
||||
JS_ASSERT_IF(exit->exitType == UNSTABLE_LOOP_EXIT,
|
||||
exit->numStackSlots == tree->nStackTypes);
|
||||
JS_ASSERT_IF(exit->exitType != UNSTABLE_LOOP_EXIT, exit->exitType == RECURSIVE_UNLINKED_EXIT);
|
||||
JS_ASSERT_IF(exit->exitType == RECURSIVE_UNLINKED_EXIT,
|
||||
exit->recursive_pc != tree->ip);
|
||||
|
||||
JS_ASSERT(fragment->root == tree);
|
||||
|
||||
|
@ -4909,9 +4880,7 @@ TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit)
|
|||
if (exit->exitType == UNSTABLE_LOOP_EXIT)
|
||||
consensus = selfTypeStability(slotMap);
|
||||
if (consensus != TypeConsensus_Okay) {
|
||||
const void* ip = exit->exitType == RECURSIVE_UNLINKED_EXIT ?
|
||||
exit->recursive_pc : tree->ip;
|
||||
TypeConsensus peerConsensus = peerTypeStability(slotMap, ip, &peer);
|
||||
TypeConsensus peerConsensus = peerTypeStability(slotMap, tree->ip, &peer);
|
||||
/* If there was a semblance of a stable peer (even if not linkable), keep the result. */
|
||||
if (peerConsensus != TypeConsensus_Bad)
|
||||
consensus = peerConsensus;
|
||||
|
@ -5087,11 +5056,6 @@ TraceRecorder::joinEdgesToEntry(TreeFragment* peer_root)
|
|||
continue;
|
||||
UnstableExit* uexit = peer->unstableExits;
|
||||
while (uexit != NULL) {
|
||||
/* :TODO: these exits go somewhere else. */
|
||||
if (uexit->exit->exitType == RECURSIVE_UNLINKED_EXIT) {
|
||||
uexit = uexit->next;
|
||||
continue;
|
||||
}
|
||||
/* Build the full typemap for this unstable exit */
|
||||
FullMapFromExit(typeMap, uexit->exit);
|
||||
/* Check its compatibility against this tree */
|
||||
|
@ -5151,9 +5115,6 @@ TraceRecorder::endLoop(VMSideExit* exit)
|
|||
return ARECORD_STOP;
|
||||
}
|
||||
|
||||
if (recordReason != Record_Branch)
|
||||
RETURN_STOP_A("control flow should have been recursive");
|
||||
|
||||
fragment->lastIns =
|
||||
lir->insGuard(LIR_x, NULL, createGuardRecord(exit));
|
||||
|
||||
|
@ -5613,33 +5574,20 @@ bool JS_REQUIRES_STACK
|
|||
TraceRecorder::startRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* f,
|
||||
unsigned stackSlots, unsigned ngslots,
|
||||
JSValueType* typeMap, VMSideExit* expectedInnerExit,
|
||||
jsbytecode* outer, uint32 outerArgc, RecordReason recordReason,
|
||||
bool speculate)
|
||||
jsbytecode* outer, uint32 outerArgc, bool speculate)
|
||||
{
|
||||
TraceMonitor *tm = &JS_TRACE_MONITOR(cx);
|
||||
JS_ASSERT(!tm->needFlush);
|
||||
JS_ASSERT_IF(cx->fp()->hasIMacroPC(), f->root != f);
|
||||
|
||||
tm->recorder = new TraceRecorder(cx, anchor, f, stackSlots, ngslots, typeMap,
|
||||
expectedInnerExit, outer, outerArgc, recordReason,
|
||||
speculate);
|
||||
expectedInnerExit, outer, outerArgc, speculate);
|
||||
|
||||
if (!tm->recorder || tm->outOfMemory() || OverfullJITCache(tm)) {
|
||||
ResetJIT(cx, FR_OOM);
|
||||
return false;
|
||||
}
|
||||
|
||||
/*
|
||||
* If slurping failed, there's no reason to start recording again. Emit LIR
|
||||
* to capture the rest of the slots, then immediately compile and finish.
|
||||
*/
|
||||
if (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) {
|
||||
tm->recorder->slurpDownFrames((jsbytecode*)anchor->recursive_pc - JSOP_CALL_LENGTH);
|
||||
if (tm->recorder)
|
||||
tm->recorder->finishAbort("Failed to slurp down frames");
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -5813,7 +5761,7 @@ SynthesizeSlowNativeFrame(TracerState& state, JSContext *cx, VMSideExit *exit)
|
|||
|
||||
static JS_REQUIRES_STACK bool
|
||||
RecordTree(JSContext* cx, TreeFragment* first, jsbytecode* outer,
|
||||
uint32 outerArgc, SlotList* globalSlots, RecordReason reason)
|
||||
uint32 outerArgc, SlotList* globalSlots)
|
||||
{
|
||||
TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
|
||||
|
||||
|
@ -5875,8 +5823,7 @@ RecordTree(JSContext* cx, TreeFragment* first, jsbytecode* outer,
|
|||
return TraceRecorder::startRecorder(cx, NULL, f, f->nStackTypes,
|
||||
f->globalSlots->length(),
|
||||
f->typeMap.data(), NULL,
|
||||
outer, outerArgc, reason,
|
||||
speculate);
|
||||
outer, outerArgc, speculate);
|
||||
}
|
||||
|
||||
static JS_REQUIRES_STACK TypeConsensus
|
||||
|
@ -5895,12 +5842,6 @@ FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, TreeFragment** peerp)
|
|||
uint16* gslots = from->globalSlots->data();
|
||||
for (unsigned i = 0; i < typeMap.length(); i++) {
|
||||
if (typeMap[i] == JSVAL_TYPE_DOUBLE) {
|
||||
if (exit->exitType == RECURSIVE_UNLINKED_EXIT) {
|
||||
if (i < exit->numStackSlots)
|
||||
oracle->markStackSlotUndemotable(cx, i, exit->recursive_pc);
|
||||
else
|
||||
oracle->markGlobalSlotUndemotable(cx, gslots[i - exit->numStackSlots]);
|
||||
}
|
||||
if (i < from->nStackTypes)
|
||||
oracle->markStackSlotUndemotable(cx, i, from->ip);
|
||||
else if (i >= exit->numStackSlots)
|
||||
|
@ -5908,16 +5849,9 @@ FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, TreeFragment** peerp)
|
|||
}
|
||||
}
|
||||
|
||||
JS_ASSERT(exit->exitType == UNSTABLE_LOOP_EXIT ||
|
||||
(exit->exitType == RECURSIVE_UNLINKED_EXIT && exit->recursive_pc));
|
||||
JS_ASSERT(exit->exitType == UNSTABLE_LOOP_EXIT);
|
||||
|
||||
TreeFragment* firstPeer = NULL;
|
||||
if (exit->exitType == UNSTABLE_LOOP_EXIT || exit->recursive_pc == from->ip) {
|
||||
firstPeer = from->first;
|
||||
} else {
|
||||
firstPeer = LookupLoop(&JS_TRACE_MONITOR(cx), exit->recursive_pc, from->globalObj,
|
||||
from->globalShape, from->argc);
|
||||
}
|
||||
TreeFragment* firstPeer = from->first;
|
||||
|
||||
for (TreeFragment* peer = firstPeer; peer; peer = peer->peer) {
|
||||
if (!peer->code())
|
||||
|
@ -5973,31 +5907,15 @@ AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsb
|
|||
return false;
|
||||
}
|
||||
|
||||
uint32 globalShape = from->globalShape;
|
||||
SlotList *globalSlots = from->globalSlots;
|
||||
|
||||
/* Don't bother recording if the exit doesn't expect this PC */
|
||||
if (exit->exitType == RECURSIVE_UNLINKED_EXIT) {
|
||||
if (++exit->hitcount >= MAX_RECURSIVE_UNLINK_HITS) {
|
||||
Blacklist((jsbytecode*)from->ip);
|
||||
TrashTree(cx, from);
|
||||
return false;
|
||||
}
|
||||
if (exit->recursive_pc != cx->regs->pc)
|
||||
return false;
|
||||
from = LookupLoop(tm, exit->recursive_pc, globalObj, globalShape, numActualArgs(cx->fp()));
|
||||
if (!from)
|
||||
return false;
|
||||
/* use stale TI for RecordTree - since from might not have one anymore. */
|
||||
}
|
||||
|
||||
JS_ASSERT(from == from->root);
|
||||
|
||||
/* If this tree has been blacklisted, don't try to record a new one. */
|
||||
if (*(jsbytecode*)from->ip == JSOP_NOP)
|
||||
return false;
|
||||
|
||||
return RecordTree(cx, from->first, outer, outerArgc, globalSlots, Record_Branch);
|
||||
return RecordTree(cx, from->first, outer, outerArgc, globalSlots);
|
||||
}
|
||||
|
||||
static JS_REQUIRES_STACK VMFragment*
|
||||
|
@ -6113,7 +6031,7 @@ AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, j
|
|||
JS_ASSERT(ngslots >= anchor->numGlobalSlots);
|
||||
bool rv = TraceRecorder::startRecorder(cx, anchor, c, stackSlots, ngslots, typeMap,
|
||||
exitedFrom, outer, numActualArgs(cx->fp()),
|
||||
Record_Branch, hits < maxHits);
|
||||
hits < maxHits);
|
||||
#ifdef MOZ_TRACEVIS
|
||||
if (!rv && tvso)
|
||||
tvso->r = R_FAIL_EXTEND_START;
|
||||
|
@ -6192,7 +6110,7 @@ TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCall
|
|||
JS_ASSERT(numActualArgs(cx->fp()) == first->argc);
|
||||
AbortRecording(cx, "No compatible inner tree");
|
||||
|
||||
return RecordingIfTrue(RecordTree(cx, first, outer, outerArgc, globalSlots, Record_Branch));
|
||||
return RecordingIfTrue(RecordTree(cx, first, outer, outerArgc, globalSlots));
|
||||
}
|
||||
|
||||
AbortableRecordingStatus status = r->attemptTreeCall(f, inlineCallCount);
|
||||
|
@ -6210,25 +6128,6 @@ TraceRecorder::recordLoopEdge(JSContext* cx, TraceRecorder* r, uintN& inlineCall
|
|||
JS_REQUIRES_STACK AbortableRecordingStatus
|
||||
TraceRecorder::attemptTreeCall(TreeFragment* f, uintN& inlineCallCount)
|
||||
{
|
||||
/*
|
||||
* It is absolutely forbidden to have recursive loops tree call themselves
|
||||
* because it could accidentally pop frames owned by the parent call, and
|
||||
* there is no way to deal with this yet. We could have to set a "start of
|
||||
* poppable rp stack" variable, and if that unequals "real start of rp stack",
|
||||
* it would be illegal to pop frames.
|
||||
* --
|
||||
* In the interim, just do tree calls knowing that they won't go into
|
||||
* recursive trees that can pop parent frames.
|
||||
*/
|
||||
if (f->script == cx->fp()->getScript()) {
|
||||
if (f->recursion >= Recursion_Unwinds) {
|
||||
Blacklist(cx->fp()->getScript()->code);
|
||||
AbortRecording(cx, "Inner tree is an unsupported type of recursion");
|
||||
return ARECORD_ABORTED;
|
||||
}
|
||||
f->recursion = Recursion_Disallowed;
|
||||
}
|
||||
|
||||
adjustCallerTypes(f);
|
||||
prepareTreeCall(f);
|
||||
|
||||
|
@ -6273,7 +6172,6 @@ TraceRecorder::attemptTreeCall(TreeFragment* f, uintN& inlineCallCount)
|
|||
TreeFragment* outerFragment = tree;
|
||||
jsbytecode* outer = (jsbytecode*) outerFragment->ip;
|
||||
switch (lr->exitType) {
|
||||
case RECURSIVE_LOOP_EXIT:
|
||||
case LOOP_EXIT:
|
||||
/* If the inner tree exited on an unknown loop exit, grow the tree around it. */
|
||||
if (innermostNestedGuard) {
|
||||
|
@ -6301,10 +6199,6 @@ TraceRecorder::attemptTreeCall(TreeFragment* f, uintN& inlineCallCount)
|
|||
case OVERFLOW_EXIT:
|
||||
traceMonitor->oracle->markInstructionUndemotable(cx->regs->pc);
|
||||
/* FALL THROUGH */
|
||||
case RECURSIVE_SLURP_FAIL_EXIT:
|
||||
case RECURSIVE_SLURP_MISMATCH_EXIT:
|
||||
case RECURSIVE_MISMATCH_EXIT:
|
||||
case RECURSIVE_EMPTY_RP_EXIT:
|
||||
case BRANCH_EXIT:
|
||||
case CASE_EXIT: {
|
||||
/* Abort recording the outer tree, extend the inner tree. */
|
||||
|
@ -6949,20 +6843,6 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr)
|
|||
return;
|
||||
}
|
||||
|
||||
/* Save the innermost FrameInfo for guardUpRecursion */
|
||||
if (innermost->exitType == RECURSIVE_MISMATCH_EXIT) {
|
||||
/* There should never be a static calldepth for a recursive mismatch. */
|
||||
JS_ASSERT(innermost->calldepth == 0);
|
||||
/* There must be at least one item on the rp stack. */
|
||||
JS_ASSERT(callstack < rp);
|
||||
/* :TODO: don't be all squirrelin' this in here */
|
||||
innermost->recursive_down = *(rp - 1);
|
||||
}
|
||||
|
||||
/* Slurp failure should have no frames */
|
||||
JS_ASSERT_IF(innermost->exitType == RECURSIVE_SLURP_FAIL_EXIT,
|
||||
innermost->calldepth == 0 && callstack == rp);
|
||||
|
||||
while (callstack < rp) {
|
||||
FrameInfo* fi = *callstack;
|
||||
/* Peek at the callee native slot in the not-yet-synthesized down frame. */
|
||||
|
@ -6974,7 +6854,7 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr)
|
|||
*/
|
||||
SynthesizeFrame(cx, *fi, callee);
|
||||
int slots = FlushNativeStackFrame(cx, 1 /* callDepth */, (*callstack)->get_typemap(),
|
||||
stack, cx->fp(), 0);
|
||||
stack, cx->fp());
|
||||
#ifdef DEBUG
|
||||
JSStackFrame* fp = cx->fp();
|
||||
debug_only_printf(LC_TMTracer,
|
||||
|
@ -7102,15 +6982,12 @@ LeaveTree(TraceMonitor *tm, TracerState& state, VMSideExit* lr)
|
|||
globalTypeMap = typeMap.data();
|
||||
}
|
||||
|
||||
/* Write back the topmost native stack frame. */
|
||||
unsigned ignoreSlots = innermost->exitType == RECURSIVE_SLURP_FAIL_EXIT ?
|
||||
innermost->numStackSlots - 1 : 0;
|
||||
#ifdef DEBUG
|
||||
int slots =
|
||||
#endif
|
||||
FlushNativeStackFrame(cx, innermost->calldepth,
|
||||
innermost->stackTypeMap(),
|
||||
stack, NULL, ignoreSlots);
|
||||
stack, NULL);
|
||||
JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
|
||||
|
||||
if (innermost->nativeCalleeWord)
|
||||
|
@ -7150,7 +7027,7 @@ ApplyBlacklistHeuristics(JSContext *cx, TreeFragment *tree)
|
|||
}
|
||||
|
||||
JS_REQUIRES_STACK MonitorResult
|
||||
MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason)
|
||||
MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount)
|
||||
{
|
||||
#ifdef MOZ_TRACEVIS
|
||||
TraceVisStateObj tvso(cx, S_MONITOR);
|
||||
|
@ -7242,7 +7119,7 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason)
|
|||
* it will walk the peer list and find us a free slot or allocate a new
|
||||
* tree if needed.
|
||||
*/
|
||||
bool rv = RecordTree(cx, f->first, NULL, 0, globalSlots, reason);
|
||||
bool rv = RecordTree(cx, f->first, NULL, 0, globalSlots);
|
||||
#ifdef MOZ_TRACEVIS
|
||||
if (!rv)
|
||||
tvso.r = R_FAIL_RECORD_TREE;
|
||||
|
@ -7273,14 +7150,6 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason)
|
|||
return MONITOR_NOT_RECORDING;
|
||||
}
|
||||
|
||||
/*
|
||||
* Trees that only unwind recursive frames usually won't do much work, and
|
||||
* most time will be spent entering and exiting ExecuteTree(). There's no
|
||||
* benefit to doing this until the down-recursive side completes.
|
||||
*/
|
||||
if (match->recursion == Recursion_Unwinds)
|
||||
return MONITOR_NOT_RECORDING;
|
||||
|
||||
VMSideExit* lr = NULL;
|
||||
VMSideExit* innermostNestedGuard = NULL;
|
||||
|
||||
|
@ -7304,7 +7173,6 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason)
|
|||
*/
|
||||
bool rv;
|
||||
switch (lr->exitType) {
|
||||
case RECURSIVE_UNLINKED_EXIT:
|
||||
case UNSTABLE_LOOP_EXIT:
|
||||
rv = AttemptToStabilizeTree(cx, globalObj, lr, NULL, 0);
|
||||
#ifdef MOZ_TRACEVIS
|
||||
|
@ -7316,10 +7184,6 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason)
|
|||
case OVERFLOW_EXIT:
|
||||
tm->oracle->markInstructionUndemotable(cx->regs->pc);
|
||||
/* FALL THROUGH */
|
||||
case RECURSIVE_SLURP_FAIL_EXIT:
|
||||
case RECURSIVE_SLURP_MISMATCH_EXIT:
|
||||
case RECURSIVE_EMPTY_RP_EXIT:
|
||||
case RECURSIVE_MISMATCH_EXIT:
|
||||
case BRANCH_EXIT:
|
||||
case CASE_EXIT:
|
||||
rv = AttemptToExtendTree(cx, lr, NULL, NULL
|
||||
|
@ -7329,7 +7193,6 @@ MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason)
|
|||
);
|
||||
return RecordingIfTrue(rv);
|
||||
|
||||
case RECURSIVE_LOOP_EXIT:
|
||||
case LOOP_EXIT:
|
||||
if (innermostNestedGuard) {
|
||||
rv = AttemptToExtendTree(cx, innermostNestedGuard, lr, NULL
|
||||
|
@ -10357,34 +10220,8 @@ TraceRecorder::putActivationObjects()
|
|||
}
|
||||
}
|
||||
|
||||
static JS_REQUIRES_STACK inline bool
|
||||
IsTraceableRecursion(JSContext *cx)
|
||||
{
|
||||
JSStackFrame *fp = cx->fp();
|
||||
JSStackFrame *down = fp->down;
|
||||
if (!down)
|
||||
return false;
|
||||
if (down->maybeScript() != fp->maybeScript())
|
||||
return false;
|
||||
if (down->isEvalFrame())
|
||||
return false;
|
||||
if (down->numActualArgs() != fp->numActualArgs())
|
||||
return false;
|
||||
if (fp->numActualArgs() != fp->numFormalArgs())
|
||||
return false;
|
||||
if (fp->hasIMacroPC() || down->hasIMacroPC())
|
||||
return false;
|
||||
if ((fp->flags & JSFRAME_CONSTRUCTING) || (down->flags & JSFRAME_CONSTRUCTING))
|
||||
return false;
|
||||
if (fp->hasBlockChain() || down->hasBlockChain())
|
||||
return false;
|
||||
if (*fp->getScript()->code != JSOP_TRACE)
|
||||
return false;
|
||||
return !fp->getFunction()->isHeavyweight();
|
||||
}
|
||||
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus
|
||||
TraceRecorder::record_EnterFrame(uintN& inlineCallCount)
|
||||
TraceRecorder::record_EnterFrame()
|
||||
{
|
||||
JSStackFrame* const fp = cx->fp();
|
||||
|
||||
|
@ -10460,71 +10297,12 @@ TraceRecorder::record_EnterFrame(uintN& inlineCallCount)
|
|||
setFrameObjPtr(fp->addressScopeChain(), scopeChain_ins);
|
||||
}
|
||||
|
||||
#if 0
|
||||
/*
|
||||
* Check for recursion. This is a special check for recursive cases that can be
|
||||
* a trace-tree, just like a loop. If recursion acts weird, for example
|
||||
* differing argc or existence of an imacpc, it's not something this code is
|
||||
* concerned about. That should pass through below to not regress pre-recursion
|
||||
* functionality.
|
||||
*/
|
||||
if (IsTraceableRecursion(cx) && tree->script == cx->fp()->getScript()) {
|
||||
if (tree->recursion == Recursion_Disallowed)
|
||||
RETURN_STOP_A("recursion not allowed in this tree");
|
||||
if (tree->script != cx->fp()->getScript())
|
||||
RETURN_STOP_A("recursion does not match original tree");
|
||||
return InjectStatus(downRecursion());
|
||||
}
|
||||
#endif
|
||||
|
||||
/* Try inlining one level in case this recursion doesn't go too deep. */
|
||||
if (fp->getScript() == fp->down->getScript() &&
|
||||
fp->down->down && fp->down->down->getScript() == fp->getScript()) {
|
||||
RETURN_STOP_A("recursion started inlining");
|
||||
}
|
||||
|
||||
#if 0
|
||||
TreeFragment* first = LookupLoop(&JS_TRACE_MONITOR(cx), cx->regs->pc, tree->globalObj,
|
||||
tree->globalShape, fp->numActualArgs());
|
||||
if (!first)
|
||||
return ARECORD_CONTINUE;
|
||||
TreeFragment* f = findNestedCompatiblePeer(first);
|
||||
if (!f) {
|
||||
/*
|
||||
* If there were no compatible peers, but there were peers at all, then it is probable that
|
||||
* an inner recursive function is type mismatching. Start a new recorder that must be
|
||||
* recursive.
|
||||
*/
|
||||
for (f = first; f; f = f->peer) {
|
||||
if (f->code() && f->recursion == Recursion_Detected) {
|
||||
/* Since this recorder is about to die, save its values. */
|
||||
if (++first->hits() <= HOTLOOP)
|
||||
return ARECORD_STOP;
|
||||
if (IsBlacklisted((jsbytecode*)f->ip))
|
||||
RETURN_STOP_A("inner recursive tree is blacklisted");
|
||||
JSContext* _cx = cx;
|
||||
SlotList* globalSlots = tree->globalSlots;
|
||||
AbortRecording(cx, "trying to compile inner recursive tree");
|
||||
JS_ASSERT(_cx->fp()->numActualArgs() == first->argc);
|
||||
RecordTree(_cx, first, NULL, 0, globalSlots, Record_EnterFrame);
|
||||
break;
|
||||
}
|
||||
}
|
||||
return ARECORD_CONTINUE;
|
||||
} else if (f) {
|
||||
/*
|
||||
* Make sure the shape of the global object still matches (this might
|
||||
* flush the JIT cache).
|
||||
*/
|
||||
JSObject* globalObj = cx->fp()->getScopeChain()->getGlobal();
|
||||
uint32 globalShape = -1;
|
||||
SlotList* globalSlots = NULL;
|
||||
if (!CheckGlobalObjectShape(cx, traceMonitor, globalObj, &globalShape, &globalSlots))
|
||||
return ARECORD_ABORTED;
|
||||
return attemptTreeCall(f, inlineCallCount);
|
||||
}
|
||||
#endif
|
||||
|
||||
return ARECORD_CONTINUE;
|
||||
}
|
||||
|
||||
|
@ -10605,16 +10383,8 @@ TraceRecorder::record_JSOP_RETURN()
|
|||
{
|
||||
/* A return from callDepth 0 terminates the current loop, except for recursion. */
|
||||
if (callDepth == 0) {
|
||||
#if 0
|
||||
if (IsTraceableRecursion(cx) && tree->recursion != Recursion_Disallowed &&
|
||||
tree->script == cx->fp()->getScript()) {
|
||||
return InjectStatus(upRecursion());
|
||||
} else
|
||||
#endif
|
||||
{
|
||||
AUDIT(returnLoopExits);
|
||||
return endLoop();
|
||||
}
|
||||
AUDIT(returnLoopExits);
|
||||
return endLoop();
|
||||
}
|
||||
|
||||
putActivationObjects();
|
||||
|
@ -15740,13 +15510,6 @@ TraceRecorder::record_JSOP_CALLELEM()
|
|||
JS_REQUIRES_STACK AbortableRecordingStatus
|
||||
TraceRecorder::record_JSOP_STOP()
|
||||
{
|
||||
#if 0
|
||||
if (callDepth == 0 && IsTraceableRecursion(cx) &&
|
||||
tree->recursion != Recursion_Disallowed &&
|
||||
tree->script == cx->fp()->getScript()) {
|
||||
return InjectStatus(upRecursion());
|
||||
}
|
||||
#endif
|
||||
JSStackFrame *fp = cx->fp();
|
||||
|
||||
if (fp->hasIMacroPC()) {
|
||||
|
@ -16435,8 +16198,6 @@ MonitorTracePoint(JSContext* cx, uintN& inlineCallCount, bool& blacklist)
|
|||
uintN count;
|
||||
TreeFragment* match = FindVMCompatiblePeer(cx, globalObj, tree, count);
|
||||
if (match) {
|
||||
JS_ASSERT(match->recursion < Recursion_Unwinds);
|
||||
|
||||
VMSideExit* lr = NULL;
|
||||
VMSideExit* innermostNestedGuard = NULL;
|
||||
|
||||
|
@ -16492,7 +16253,7 @@ MonitorTracePoint(JSContext* cx, uintN& inlineCallCount, bool& blacklist)
|
|||
return TPA_Nothing;
|
||||
if (!ScopeChainCheck(cx, tree))
|
||||
return TPA_Nothing;
|
||||
if (!RecordTree(cx, tree->first, NULL, 0, globalSlots, Record_Branch))
|
||||
if (!RecordTree(cx, tree->first, NULL, 0, globalSlots))
|
||||
return TPA_Nothing;
|
||||
|
||||
interpret:
|
||||
|
@ -16510,6 +16271,4 @@ MonitorTracePoint(JSContext* cx, uintN& inlineCallCount, bool& blacklist)
|
|||
|
||||
#endif
|
||||
|
||||
#include "jsrecursion.cpp"
|
||||
|
||||
} /* namespace js */
|
||||
|
|
|
@ -388,19 +388,7 @@ public:
|
|||
_(UNSTABLE_LOOP) \
|
||||
_(TIMEOUT) \
|
||||
_(DEEP_BAIL) \
|
||||
_(STATUS) \
|
||||
/* Exit is almost recursive and wants a peer at recursive_pc */ \
|
||||
_(RECURSIVE_UNLINKED) \
|
||||
/* Exit is recursive, and there are no more frames */ \
|
||||
_(RECURSIVE_LOOP) \
|
||||
/* Exit is recursive, but type-mismatched guarding on a down frame */ \
|
||||
_(RECURSIVE_MISMATCH) \
|
||||
/* Exit is recursive, and the JIT wants to try slurping interp frames */ \
|
||||
_(RECURSIVE_EMPTY_RP) \
|
||||
/* Slurping interp frames in up-recursion failed */ \
|
||||
_(RECURSIVE_SLURP_FAIL) \
|
||||
/* Tried to slurp an interp frame, but the pc or argc mismatched */ \
|
||||
_(RECURSIVE_SLURP_MISMATCH)
|
||||
_(STATUS)
|
||||
|
||||
enum ExitType {
|
||||
#define MAKE_EXIT_CODE(x) x##_EXIT,
|
||||
|
@ -424,11 +412,7 @@ struct VMSideExit : public nanojit::SideExit
|
|||
uint32 numStackSlotsBelowCurrentFrame;
|
||||
ExitType exitType;
|
||||
uintN lookupFlags;
|
||||
jsbytecode* recursive_pc;
|
||||
FrameInfo* recursive_down;
|
||||
unsigned hitcount;
|
||||
unsigned slurpFailSlot;
|
||||
JSValueType slurpType;
|
||||
|
||||
/*
|
||||
* Ordinarily 0. If a slow native function is atop the stack, the 1 bit is
|
||||
|
@ -615,21 +599,6 @@ struct UnstableExit
|
|||
UnstableExit* next;
|
||||
};
|
||||
|
||||
enum RecordReason
|
||||
{
|
||||
Record_Branch,
|
||||
Record_EnterFrame,
|
||||
Record_LeaveFrame
|
||||
};
|
||||
|
||||
enum RecursionStatus
|
||||
{
|
||||
Recursion_None, /* No recursion has been compiled yet. */
|
||||
Recursion_Disallowed, /* This tree cannot be recursive. */
|
||||
Recursion_Unwinds, /* Tree is up-recursive only. */
|
||||
Recursion_Detected /* Tree has down recursion and maybe up recursion. */
|
||||
};
|
||||
|
||||
struct LinkableFragment : public VMFragment
|
||||
{
|
||||
LinkableFragment(const void* _ip, nanojit::Allocator* alloc
|
||||
|
@ -687,7 +656,6 @@ struct TreeFragment : public LinkableFragment
|
|||
uintN treePCOffset;
|
||||
#endif
|
||||
JSScript* script;
|
||||
RecursionStatus recursion;
|
||||
UnstableExit* unstableExits;
|
||||
Queue<VMSideExit*> sideExits;
|
||||
ptrdiff_t nativeStackBase;
|
||||
|
@ -910,9 +878,6 @@ class TraceRecorder
|
|||
/* The root fragment representing the tree. */
|
||||
TreeFragment* const tree;
|
||||
|
||||
/* The reason we started recording. */
|
||||
RecordReason const recordReason;
|
||||
|
||||
/* The global object from the start of recording until now. */
|
||||
JSObject* const globalObj;
|
||||
|
||||
|
@ -1082,18 +1047,6 @@ class TraceRecorder
|
|||
JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, VMSideExit* exit);
|
||||
JS_REQUIRES_STACK nanojit::LIns* guard_xov(nanojit::LOpcode op, nanojit::LIns* d0,
|
||||
nanojit::LIns* d1, VMSideExit* exit);
|
||||
JS_REQUIRES_STACK nanojit::LIns* slurpNonDoubleObjectSlot(nanojit::LIns* val_ins, ptrdiff_t offset,
|
||||
JSValueType type, VMSideExit* exit);
|
||||
JS_REQUIRES_STACK nanojit::LIns* slurpObjectSlot(nanojit::LIns* val_ins, ptrdiff_t offset,
|
||||
JSValueType type, VMSideExit* exit);
|
||||
JS_REQUIRES_STACK nanojit::LIns* slurpDoubleSlot(nanojit::LIns* val_ins, ptrdiff_t offset,
|
||||
VMSideExit* exit);
|
||||
JS_REQUIRES_STACK nanojit::LIns* slurpSlot(nanojit::LIns* val_ins, ptrdiff_t offset, Value* vp, VMSideExit* exit);
|
||||
JS_REQUIRES_STACK void slurpSlot(nanojit::LIns* val_ins, ptrdiff_t offset, Value* vp, SlurpInfo* info);
|
||||
JS_REQUIRES_STACK void slurpFrameObjPtrSlot(nanojit::LIns* val_ins, ptrdiff_t offset, JSObject** p, SlurpInfo* info);
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus slurpDownFrames(jsbytecode* return_pc);
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus upRecursion();
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus downRecursion();
|
||||
|
||||
nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
|
||||
|
||||
|
@ -1448,7 +1401,7 @@ class TraceRecorder
|
|||
TraceRecorder(JSContext* cx, VMSideExit*, VMFragment*,
|
||||
unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
|
||||
VMSideExit* expectedInnerExit, jsbytecode* outerTree,
|
||||
uint32 outerArgc, RecordReason reason, bool speculate);
|
||||
uint32 outerArgc, bool speculate);
|
||||
|
||||
/* The destructor should only be called through finish*, not directly. */
|
||||
~TraceRecorder();
|
||||
|
@ -1467,7 +1420,7 @@ class TraceRecorder
|
|||
friend class DetermineTypesVisitor;
|
||||
friend class RecursiveSlotMap;
|
||||
friend class UpRecursiveSlotMap;
|
||||
friend MonitorResult MonitorLoopEdge(JSContext*, uintN&, RecordReason);
|
||||
friend MonitorResult MonitorLoopEdge(JSContext*, uintN&);
|
||||
friend TracePointAction MonitorTracePoint(JSContext*, uintN &inlineCallCount,
|
||||
bool &blacklist);
|
||||
friend void AbortRecording(JSContext*, const char*);
|
||||
|
@ -1477,8 +1430,7 @@ public:
|
|||
startRecorder(JSContext*, VMSideExit*, VMFragment*,
|
||||
unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,
|
||||
VMSideExit* expectedInnerExit, jsbytecode* outerTree,
|
||||
uint32 outerArgc, RecordReason reason,
|
||||
bool speculate);
|
||||
uint32 outerArgc, bool speculate);
|
||||
|
||||
/* Accessors. */
|
||||
VMFragment* getFragment() const { return fragment; }
|
||||
|
@ -1488,7 +1440,7 @@ public:
|
|||
|
||||
/* Entry points / callbacks from the interpreter. */
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus monitorRecording(JSOp op);
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus record_EnterFrame(uintN& inlineCallCount);
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus record_EnterFrame();
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus record_LeaveFrame();
|
||||
JS_REQUIRES_STACK AbortableRecordingStatus record_SetPropHit(PropertyCacheEntry* entry,
|
||||
JSScopeProperty* sprop);
|
||||
|
@ -1550,7 +1502,7 @@ public:
|
|||
#define TRACE_2(x,a,b) TRACE_ARGS(x, (a, b))
|
||||
|
||||
extern JS_REQUIRES_STACK MonitorResult
|
||||
MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, RecordReason reason);
|
||||
MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount);
|
||||
|
||||
extern JS_REQUIRES_STACK TracePointAction
|
||||
MonitorTracePoint(JSContext*, uintN& inlineCallCount, bool& blacklist);
|
||||
|
|
|
@ -5,5 +5,5 @@ function caller(obj) {
|
|||
assertJit();
|
||||
return x;
|
||||
}
|
||||
trap(caller, 9, "var x = 'success'; nop()");
|
||||
trap(caller, 7, "var x = 'success'; nop()");
|
||||
assertEq(caller(this), "success");
|
||||
|
|
|
@ -6,5 +6,5 @@ function caller(obj) {
|
|||
var x = ({ dana : "zuul" });
|
||||
return x;
|
||||
}
|
||||
trap(caller, 22, "x = 'success'; nop()");
|
||||
trap(caller, 20, "x = 'success'; nop()");
|
||||
assertEq(caller(this), "success");
|
||||
|
|
|
@ -6,5 +6,5 @@ function caller(obj) {
|
|||
var x = "failure";
|
||||
return x;
|
||||
}
|
||||
trap(caller, 16, "x = 'success'; nop()");
|
||||
trap(caller, 14, "x = 'success'; nop()");
|
||||
assertEq(caller(this), "success");
|
||||
|
|
|
@ -4,7 +4,7 @@ function main() { x = "failure"; }
|
|||
function success() { x = "success"; }
|
||||
|
||||
/* The JSOP_STOP in a. */
|
||||
trap(main, 8, "success()");
|
||||
trap(main, 6, "success()");
|
||||
main();
|
||||
|
||||
assertEq(x, "success");
|
||||
|
|
|
@ -3,5 +3,5 @@ function main() {
|
|||
return "failure";
|
||||
}
|
||||
/* JSOP_RETURN in main. */
|
||||
trap(main, 4, "'success'");
|
||||
trap(main, 3, "'success'");
|
||||
assertEq(main(), "success");
|
||||
|
|
|
@ -3,5 +3,5 @@ function main() {
|
|||
return 1;
|
||||
}
|
||||
/* JSOP_RETURN in main. */
|
||||
trap(main, 2, "0");
|
||||
trap(main, 1, "0");
|
||||
assertEq(main(), 0);
|
||||
|
|
|
@ -3,7 +3,7 @@ x = "notset";
|
|||
function myparent(nested) {
|
||||
if (nested) {
|
||||
/* myparent call in myparent. */
|
||||
trap(myparent, 40, "failure()");
|
||||
trap(myparent, 37, "failure()");
|
||||
} else {
|
||||
x = "success";
|
||||
myparent(true);
|
||||
|
|
|
@ -4,14 +4,14 @@ x = "notset";
|
|||
function child() {
|
||||
x = "failure1";
|
||||
/* JSOP_STOP in parent. */
|
||||
trap(parent, 11, "success()");
|
||||
trap(parent, 10, "success()");
|
||||
}
|
||||
|
||||
function parent() {
|
||||
x = "failure2";
|
||||
}
|
||||
/* First op in parent. */
|
||||
trap(parent, 1, "child()");
|
||||
trap(parent, 0, "child()");
|
||||
|
||||
function success() {
|
||||
x = "success";
|
||||
|
|
|
@ -2,7 +2,7 @@ setDebug(true);
|
|||
x = "notset";
|
||||
function child() {
|
||||
/* JSOP_STOP in parent. */
|
||||
trap(parent, 19, "success()");
|
||||
trap(parent, 17, "success()");
|
||||
}
|
||||
function parent() {
|
||||
child();
|
||||
|
|
|
@ -6,14 +6,14 @@ function doNothing() { }
|
|||
function myparent(nested) {
|
||||
if (nested) {
|
||||
/* JSOP_CALL to doNothing in myparent with nested = true. */
|
||||
trap(myparent, 26, "success()");
|
||||
trap(myparent, 24, "success()");
|
||||
doNothing();
|
||||
} else {
|
||||
doNothing();
|
||||
}
|
||||
}
|
||||
/* JSOP_CALL to doNothing in myparent with nested = false. */
|
||||
trap(myparent, 37, "myparent(true)");
|
||||
trap(myparent, 34, "myparent(true)");
|
||||
|
||||
function success() {
|
||||
x = "success";
|
||||
|
|
|
@ -2,7 +2,7 @@ setDebug(true);
|
|||
x = "notset";
|
||||
function main() {
|
||||
/* The JSOP_STOP in a. */
|
||||
trap(main, 27, "success()");
|
||||
trap(main, 25, "success()");
|
||||
x = "failure";
|
||||
}
|
||||
function success() { x = "success"; }
|
||||
|
|
|
@ -2,14 +2,14 @@ setDebug(true);
|
|||
x = "notset";
|
||||
function child() {
|
||||
/* JSOP_STOP in parent */
|
||||
untrap(parent, 11);
|
||||
untrap(parent, 10);
|
||||
x = "success";
|
||||
}
|
||||
function parent() {
|
||||
x = "failure";
|
||||
}
|
||||
/* JSOP_STOP in parent */
|
||||
trap(parent, 11, "child()");
|
||||
trap(parent, 10, "child()");
|
||||
|
||||
parent();
|
||||
assertEq(x, "success");
|
||||
|
|
Загрузка…
Ссылка в новой задаче