Cleaned up and refactored AttemptToStabilizeTree (bug 509089, r=gal).

This commit is contained in:
David Anderson 2009-08-10 22:15:52 -07:00
Родитель 63c66f15ac
Коммит 6dd7b0c70a
2 изменённых файлов: 186 добавлений и 159 удалений

Просмотреть файл

@ -712,6 +712,16 @@ MarkSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
oracle.markGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]); oracle.markGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
} }
static JS_REQUIRES_STACK inline bool
IsSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot)
{
if (slot < ti->nStackTypes)
return oracle.isStackSlotUndemotable(cx, slot);
uint16* gslots = ti->globalSlots->data();
return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
}
struct PCHashEntry : public JSDHashEntryStub { struct PCHashEntry : public JSDHashEntryStub {
size_t count; size_t count;
}; };
@ -807,6 +817,9 @@ struct VMFragment : public Fragment
globalShape(_globalShape), globalShape(_globalShape),
argc(_argc) argc(_argc)
{} {}
inline TreeInfo* getTreeInfo() {
return (TreeInfo*)vmprivate;
}
VMFragment* next; VMFragment* next;
JSObject* globalObj; JSObject* globalObj;
uint32 globalShape; uint32 globalShape;
@ -1601,6 +1614,15 @@ TypeMap::matches(TypeMap& other) const
return !memcmp(data(), other.data(), length()); return !memcmp(data(), other.data(), length());
} }
void
TypeMap::fromRaw(JSTraceType* other, unsigned numSlots)
{
unsigned oldLength = length();
setLength(length() + numSlots);
for (unsigned i = 0; i < numSlots; i++)
get(oldLength + i) = other[i];
}
/* /*
* Use the provided storage area to create a new type map that contains the * Use the provided storage area to create a new type map that contains the
* partial type map with the rest of it filled up from the complete type * partial type map with the rest of it filled up from the complete type
@ -1640,24 +1662,6 @@ SpecializeTreesToMissingGlobals(JSContext* cx, JSObject* globalObj, TreeInfo* ro
} }
} }
static inline JSTraceType*
GetStackTypeMap(nanojit::SideExit* exit)
{
return (JSTraceType*)(((VMSideExit*)exit) + 1);
}
static inline JSTraceType*
GetGlobalTypeMap(nanojit::SideExit* exit)
{
return GetStackTypeMap(exit) + ((VMSideExit*)exit)->numStackSlots;
}
static inline JSTraceType*
GetFullTypeMap(nanojit::SideExit* exit)
{
return GetStackTypeMap(exit);
}
static void static void
TrashTree(JSContext* cx, Fragment* f); TrashTree(JSContext* cx, Fragment* f);
@ -3184,7 +3188,7 @@ TraceRecorder::snapshot(ExitType exitType)
VMSideExit* e = exits[n]; VMSideExit* e = exits[n];
if (e->pc == pc && e->imacpc == fp->imacpc && if (e->pc == pc && e->imacpc == fp->imacpc &&
ngslots == e->numGlobalSlots && ngslots == e->numGlobalSlots &&
!memcmp(GetFullTypeMap(exits[n]), typemap, typemap_size)) { !memcmp(exits[n]->fullTypeMap(), typemap, typemap_size)) {
AUDIT(mergedLoopExits); AUDIT(mergedLoopExits);
JS_ARENA_RELEASE(&cx->tempPool, mark); JS_ARENA_RELEASE(&cx->tempPool, mark);
return e; return e;
@ -3229,7 +3233,7 @@ TraceRecorder::snapshot(ExitType exitType)
exit->rp_adj = exit->calldepth * sizeof(FrameInfo*); exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
exit->nativeCalleeWord = 0; exit->nativeCalleeWord = 0;
exit->lookupFlags = js_InferFlags(cx, 0); exit->lookupFlags = js_InferFlags(cx, 0);
memcpy(GetFullTypeMap(exit), typemap, typemap_size); memcpy(exit->fullTypeMap(), typemap, typemap_size);
JS_ARENA_RELEASE(&cx->tempPool, mark); JS_ARENA_RELEASE(&cx->tempPool, mark);
return exit; return exit;
@ -3429,6 +3433,19 @@ TraceRecorder::compile(JSTraceMonitor* tm)
AUDIT(traceCompleted); AUDIT(traceCompleted);
} }
static void
JoinPeers(Assembler* assm, VMSideExit* exit, VMFragment* target)
{
exit->target = target;
assm->patch(exit);
if (exit->root() == target)
return;
target->getTreeInfo()->dependentTrees.addUnique(exit->root());
exit->root()->getTreeInfo()->linkedTrees.addUnique(target);
}
static bool static bool
JoinPeersIfCompatible(Assembler* assm, Fragment* stableFrag, TreeInfo* stableTree, JoinPeersIfCompatible(Assembler* assm, Fragment* stableFrag, TreeInfo* stableTree,
VMSideExit* exit) VMSideExit* exit)
@ -3437,15 +3454,11 @@ JoinPeersIfCompatible(Assembler* assm, Fragment* stableFrag, TreeInfo* stableTre
/* Must have a matching type unstable exit. */ /* Must have a matching type unstable exit. */
if ((exit->numGlobalSlots + exit->numStackSlots != stableTree->typeMap.length()) || if ((exit->numGlobalSlots + exit->numStackSlots != stableTree->typeMap.length()) ||
memcmp(GetFullTypeMap(exit), stableTree->typeMap.data(), stableTree->typeMap.length())) { memcmp(exit->fullTypeMap(), stableTree->typeMap.data(), stableTree->typeMap.length())) {
return false; return false;
} }
exit->target = stableFrag; JoinPeers(assm, exit, (VMFragment*)stableFrag);
assm->patch(exit);
stableTree->dependentTrees.addUnique(exit->from->root);
((TreeInfo*)exit->from->root->vmprivate)->linkedTrees.addUnique(stableFrag);
return true; return true;
} }
@ -3801,6 +3814,38 @@ TraceRecorder::closeLoop(SlotMap& slotMap, VMSideExit* exit, TypeConsensus& cons
return true; return true;
} }
static void
FullMapFromExit(TypeMap& typeMap, VMSideExit* exit)
{
typeMap.setLength(0);
typeMap.fromRaw(exit->stackTypeMap(), exit->numStackSlots);
typeMap.fromRaw(exit->globalTypeMap(), exit->numGlobalSlots);
/* Include globals that were later specialized at the root of the tree. */
if (exit->numGlobalSlots < exit->root()->getTreeInfo()->nGlobalTypes()) {
typeMap.fromRaw(exit->root()->getTreeInfo()->globalTypeMap() + exit->numGlobalSlots,
exit->root()->getTreeInfo()->nGlobalTypes() - exit->numGlobalSlots);
}
}
static TypeConsensus
TypeMapLinkability(JSContext* cx, const TypeMap& typeMap, VMFragment* peer)
{
const TypeMap& peerMap = peer->getTreeInfo()->typeMap;
unsigned minSlots = JS_MIN(typeMap.length(), peerMap.length());
TypeConsensus consensus = TypeConsensus_Okay;
for (unsigned i = 0; i < minSlots; i++) {
if (typeMap[i] == peerMap[i])
continue;
if (typeMap[i] == TT_INT32 && peerMap[i] == TT_DOUBLE &&
IsSlotUndemotable(cx, peer->getTreeInfo(), i)) {
consensus = TypeConsensus_Undemotes;
} else {
return TypeConsensus_Bad;
}
}
return consensus;
}
JS_REQUIRES_STACK void JS_REQUIRES_STACK void
TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, VMFragment* peer_root) TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, VMFragment* peer_root)
{ {
@ -3841,7 +3886,7 @@ TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, VMFragment* peer_root)
unsigned stackCount = 0; unsigned stackCount = 0;
unsigned globalCount = 0; unsigned globalCount = 0;
t1 = treeInfo->stackTypeMap(); t1 = treeInfo->stackTypeMap();
t2 = GetStackTypeMap(uexit->exit); t2 = uexit->exit->stackTypeMap();
for (unsigned i = 0; i < uexit->exit->numStackSlots; i++) { for (unsigned i = 0; i < uexit->exit->numStackSlots; i++) {
if (t2[i] == TT_INT32 && t1[i] == TT_DOUBLE) { if (t2[i] == TT_INT32 && t1[i] == TT_DOUBLE) {
stackDemotes[stackCount++] = i; stackDemotes[stackCount++] = i;
@ -3851,7 +3896,7 @@ TraceRecorder::joinEdgesToEntry(Fragmento* fragmento, VMFragment* peer_root)
} }
} }
t1 = treeInfo->globalTypeMap(); t1 = treeInfo->globalTypeMap();
t2 = GetGlobalTypeMap(uexit->exit); t2 = uexit->exit->globalTypeMap();
for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++) { for (unsigned i = 0; i < uexit->exit->numGlobalSlots; i++) {
if (t2[i] == TT_INT32 && t1[i] == TT_DOUBLE) { if (t2[i] == TT_INT32 && t1[i] == TT_DOUBLE) {
globalDemotes[globalCount++] = i; globalDemotes[globalCount++] = i;
@ -4015,10 +4060,10 @@ TraceRecorder::emitTreeCall(Fragment* inner, VMSideExit* exit)
#ifdef DEBUG #ifdef DEBUG
JSTraceType* map; JSTraceType* map;
size_t i; size_t i;
map = GetGlobalTypeMap(exit); map = exit->globalTypeMap();
for (i = 0; i < exit->numGlobalSlots; i++) for (i = 0; i < exit->numGlobalSlots; i++)
JS_ASSERT(map[i] != TT_JSVAL); JS_ASSERT(map[i] != TT_JSVAL);
map = GetStackTypeMap(exit); map = exit->stackTypeMap();
for (i = 0; i < exit->numStackSlots; i++) for (i = 0; i < exit->numStackSlots; i++)
JS_ASSERT(map[i] != TT_JSVAL); JS_ASSERT(map[i] != TT_JSVAL);
#endif #endif
@ -4027,9 +4072,9 @@ TraceRecorder::emitTreeCall(Fragment* inner, VMSideExit* exit)
* first extending from the inner. Make a new typemap here. * first extending from the inner. Make a new typemap here.
*/ */
TypeMap fullMap; TypeMap fullMap;
fullMap.add(GetStackTypeMap(exit), exit->numStackSlots); fullMap.add(exit->stackTypeMap(), exit->numStackSlots);
fullMap.add(GetGlobalTypeMap(exit), exit->numGlobalSlots); fullMap.add(exit->globalTypeMap(), exit->numGlobalSlots);
TreeInfo* innerTree = (TreeInfo*)exit->from->root->vmprivate; TreeInfo* innerTree = exit->root()->getTreeInfo();
if (exit->numGlobalSlots < innerTree->nGlobalTypes()) { if (exit->numGlobalSlots < innerTree->nGlobalTypes()) {
fullMap.add(innerTree->globalTypeMap() + exit->numGlobalSlots, fullMap.add(innerTree->globalTypeMap() + exit->numGlobalSlots,
innerTree->nGlobalTypes() - exit->numGlobalSlots); innerTree->nGlobalTypes() - exit->numGlobalSlots);
@ -4651,14 +4696,58 @@ RecordTree(JSContext* cx, JSTraceMonitor* tm, Fragment* f, jsbytecode* outer,
return true; return true;
} }
static JS_REQUIRES_STACK inline bool static TypeConsensus
IsSlotUndemotable(JSContext* cx, TreeInfo* ti, unsigned slot) FindLoopEdgeTarget(JSContext* cx, VMSideExit* exit, VMFragment** peerp)
{ {
if (slot < ti->nStackTypes) VMFragment* from = exit->root();
return oracle.isStackSlotUndemotable(cx, slot); TreeInfo* from_ti = from->getTreeInfo();
uint16* gslots = ti->globalSlots->data(); JS_ASSERT(from->code());
return oracle.isGlobalSlotUndemotable(cx, gslots[slot - ti->nStackTypes]);
TypeMap typeMap;
FullMapFromExit(typeMap, exit);
JS_ASSERT(typeMap.length() - exit->numStackSlots == from_ti->nGlobalTypes());
/* Mark all double slots as undemotable */
for (unsigned i = 0; i < typeMap.length(); i++) {
if (typeMap[i] == TT_DOUBLE)
MarkSlotUndemotable(cx, from_ti, i);
}
VMFragment* firstPeer = (VMFragment*)from->first;
for (VMFragment* peer = firstPeer; peer; peer = (VMFragment*)peer->peer) {
TreeInfo* peer_ti = peer->getTreeInfo();
if (!peer_ti)
continue;
JS_ASSERT(peer->argc == from->argc);
JS_ASSERT(exit->numStackSlots == peer_ti->nStackTypes);
TypeConsensus consensus = TypeMapLinkability(cx, typeMap, peer);
if (consensus == TypeConsensus_Okay) {
*peerp = peer;
return consensus;
} else if (consensus == TypeConsensus_Undemotes) {
return consensus;
}
}
return TypeConsensus_Bad;
}
UnstableExit*
TreeInfo::removeUnstableExit(VMSideExit* exit)
{
/* Now erase this exit from the unstable exit list. */
UnstableExit** tail = &this->unstableExits;
for (UnstableExit* uexit = this->unstableExits; uexit != NULL; uexit = uexit->next) {
if (uexit->exit == exit) {
*tail = uexit->next;
delete uexit;
return *tail;
}
tail = &uexit->next;
}
JS_NOT_REACHED("exit not in unstable exit list");
return NULL;
} }
static JS_REQUIRES_STACK bool static JS_REQUIRES_STACK bool
@ -4671,117 +4760,36 @@ AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsb
return false; return false;
} }
VMFragment* from = (VMFragment*)exit->from->root; VMFragment* from = exit->root();
TreeInfo* from_ti = (TreeInfo*)from->vmprivate; TreeInfo* from_ti = from->getTreeInfo();
JS_ASSERT(exit->from->root->code()); VMFragment* peer;
TypeConsensus consensus = FindLoopEdgeTarget(cx, exit, &peer);
/* if (consensus == TypeConsensus_Okay) {
* The loop edge exit might not know about all types since the tree could TreeInfo* peer_ti = peer->getTreeInfo();
* have been further specialized since it was recorded. Fill in the missing JS_ASSERT(from_ti->globalSlots == peer_ti->globalSlots);
* types from the entry type map. JS_ASSERT(from_ti->nStackTypes == peer_ti->nStackTypes);
*/ /* Patch this exit to its peer */
JSTraceType* m = GetFullTypeMap(exit); JoinPeers(tm->assembler, exit, peer);
unsigned ngslots = exit->numGlobalSlots; /*
if (ngslots < from_ti->nGlobalTypes()) { * Update peer global types. The |from| fragment should already be updated because it on
uint32 partial = exit->numStackSlots + exit->numGlobalSlots; * the execution path, and somehow connected to the entry trace.
m = (JSTraceType*)alloca(from_ti->typeMap.length() * sizeof(JSTraceType));
memcpy(m, GetFullTypeMap(exit), partial);
memcpy(m + partial, from_ti->globalTypeMap() + exit->numGlobalSlots,
from_ti->nGlobalTypes() - exit->numGlobalSlots);
ngslots = from_ti->nGlobalTypes();
}
JS_ASSERT(exit->numStackSlots + ngslots == from_ti->typeMap.length());
/*
* If we see any doubles along the loop edge, mark those slots undemotable
* since we know now for a fact that they can contain doubles.
*/
for (unsigned i = 0; i < from_ti->typeMap.length(); i++) {
if (m[i] == TT_DOUBLE)
MarkSlotUndemotable(cx, from_ti, i);
}
bool bound = false;
for (Fragment* f = from->first; f != NULL; f = f->peer) {
if (!f->code())
continue;
TreeInfo* ti = (TreeInfo*)f->vmprivate;
JS_ASSERT(exit->numStackSlots == ti->nStackTypes);
/* Check the minimum number of slots that need to be compared. */
unsigned checkSlots = JS_MIN(from_ti->typeMap.length(), ti->typeMap.length());
JSTraceType* m2 = ti->typeMap.data();
/* Analyze the exit typemap against the peer typemap.
* Two conditions are important:
* 1) Typemaps are identical: these peers can be attached.
* 2) Typemaps do not match, but only contain I->D mismatches.
* In this case, the original tree must be trashed because it
* will never connect to any peer.
*/ */
bool matched = true; if (peer_ti->nGlobalTypes() < peer_ti->globalSlots->length())
bool undemote = false; SpecializeTreesToMissingGlobals(cx, globalObj, peer_ti);
for (uint32 i = 0; i < checkSlots; i++) { JS_ASSERT(from_ti->nGlobalTypes() == from_ti->globalSlots->length());
/* If the types are equal we're okay. */ /* This exit is no longer unstable, so remove it. */
if (m[i] == m2[i]) from_ti->removeUnstableExit(exit);
continue; debug_only_stmt(DumpPeerStability(tm, peer->ip, from->globalObj, from->globalShape, from->argc);)
matched = false;
/*
* If there's an I->D that cannot be resolved, flag it.
* Otherwise, break and go to the next peer.
*/
if (m[i] == TT_INT32 && m2[i] == TT_DOUBLE && IsSlotUndemotable(cx, ti, i)) {
undemote = true;
} else {
undemote = false;
break;
}
}
if (matched) {
JS_ASSERT(from_ti->globalSlots == ti->globalSlots);
JS_ASSERT(from_ti->nStackTypes == ti->nStackTypes);
/* Capture missing globals on both trees and link the fragments together. */
if (from != f) {
ti->dependentTrees.addUnique(from);
from_ti->linkedTrees.addUnique(f);
}
if (ti->nGlobalTypes() < ti->globalSlots->length())
SpecializeTreesToMissingGlobals(cx, globalObj, ti);
exit->target = f;
Assembler *assm = JS_TRACE_MONITOR(cx).assembler;
assm->patch(exit);
/* Now erase this exit from the unstable exit list. */
UnstableExit** tail = &from_ti->unstableExits;
for (UnstableExit* uexit = from_ti->unstableExits; uexit != NULL; uexit = uexit->next) {
if (uexit->exit == exit) {
*tail = uexit->next;
delete uexit;
bound = true;
break;
}
tail = &uexit->next;
}
JS_ASSERT(bound);
debug_only_stmt( DumpPeerStability(tm, f->ip, from->globalObj, from->globalShape, from->argc); )
break;
} else if (undemote) {
/* The original tree is unconnectable, so trash it. */
TrashTree(cx, f);
/* We shouldn't attempt to record now, since we'll hit a duplicate. */
return false;
}
}
if (bound)
return false; return false;
} else if (consensus == TypeConsensus_Undemotes) {
/* The original tree is unconnectable, so trash it. */
TrashTree(cx, peer);
return false;
}
VMFragment* root = (VMFragment*)from->root; return RecordTree(cx, tm, from->first, outer, outerArgc, from->globalObj,
return RecordTree(cx, tm, from->first, outer, outerArgc, root->globalObj, from->globalShape, from_ti->globalSlots, cx->fp->argc);
root->globalShape, from_ti->globalSlots, cx->fp->argc);
} }
static JS_REQUIRES_STACK bool static JS_REQUIRES_STACK bool
@ -4800,7 +4808,7 @@ AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, j
return false; return false;
} }
Fragment* f = anchor->from->root; Fragment* f = anchor->root();
JS_ASSERT(f->vmprivate); JS_ASSERT(f->vmprivate);
TreeInfo* ti = (TreeInfo*)f->vmprivate; TreeInfo* ti = (TreeInfo*)f->vmprivate;
@ -4850,7 +4858,7 @@ AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, j
*/ */
ngslots = anchor->numGlobalSlots; ngslots = anchor->numGlobalSlots;
stackSlots = anchor->numStackSlots; stackSlots = anchor->numStackSlots;
typeMap = GetFullTypeMap(anchor); typeMap = anchor->fullTypeMap();
} else { } else {
/* /*
* If we side-exited on a loop exit and continue on a nesting * If we side-exited on a loop exit and continue on a nesting
@ -4861,10 +4869,10 @@ AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, j
*/ */
VMSideExit* e1 = anchor; VMSideExit* e1 = anchor;
VMSideExit* e2 = exitedFrom; VMSideExit* e2 = exitedFrom;
fullMap.add(GetStackTypeMap(e1), e1->numStackSlotsBelowCurrentFrame); fullMap.add(e1->stackTypeMap(), e1->numStackSlotsBelowCurrentFrame);
fullMap.add(GetStackTypeMap(e2), e2->numStackSlots); fullMap.add(e2->stackTypeMap(), e2->numStackSlots);
stackSlots = fullMap.length(); stackSlots = fullMap.length();
fullMap.add(GetGlobalTypeMap(e2), e2->numGlobalSlots); fullMap.add(e2->globalTypeMap(), e2->numGlobalSlots);
if (e2->numGlobalSlots < e1->numGlobalSlots) { if (e2->numGlobalSlots < e1->numGlobalSlots) {
/* /*
* Watch out for an extremely rare case (bug 502714). The sequence of events is: * Watch out for an extremely rare case (bug 502714). The sequence of events is:
@ -4880,7 +4888,7 @@ AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, j
* typemap entry for X. The correct entry is in the inner guard's TreeInfo, * typemap entry for X. The correct entry is in the inner guard's TreeInfo,
* analogous to the solution for bug 476653. * analogous to the solution for bug 476653.
*/ */
TreeInfo* innerTree = (TreeInfo*)e2->from->root->vmprivate; TreeInfo* innerTree = e2->root()->getTreeInfo();
unsigned slots = e2->numGlobalSlots; unsigned slots = e2->numGlobalSlots;
if (innerTree->nGlobalTypes() > slots) { if (innerTree->nGlobalTypes() > slots) {
unsigned addSlots = JS_MIN(innerTree->nGlobalTypes() - slots, unsigned addSlots = JS_MIN(innerTree->nGlobalTypes() - slots,
@ -4889,7 +4897,7 @@ AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, j
slots += addSlots; slots += addSlots;
} }
if (slots < e1->numGlobalSlots) if (slots < e1->numGlobalSlots)
fullMap.add(GetGlobalTypeMap(e1) + slots, e1->numGlobalSlots - slots); fullMap.add(e1->globalTypeMap() + slots, e1->numGlobalSlots - slots);
JS_ASSERT(slots == e1->numGlobalSlots); JS_ASSERT(slots == e1->numGlobalSlots);
} }
ngslots = e1->numGlobalSlots; ngslots = e1->numGlobalSlots;
@ -5574,7 +5582,7 @@ LeaveTree(InterpState& state, VMSideExit* lr)
* (Some opcodes, like JSOP_CALLELEM, produce two values, hence the * (Some opcodes, like JSOP_CALLELEM, produce two values, hence the
* loop.) * loop.)
*/ */
JSTraceType* typeMap = GetStackTypeMap(innermost); JSTraceType* typeMap = innermost->stackTypeMap();
for (int i = 1; i <= cs.ndefs; i++) { for (int i = 1; i <= cs.ndefs; i++) {
NativeToValue(cx, NativeToValue(cx,
regs->sp[-i], regs->sp[-i],
@ -5693,7 +5701,7 @@ LeaveTree(InterpState& state, VMSideExit* lr)
/* Are there enough globals? */ /* Are there enough globals? */
if (innermost->numGlobalSlots == ngslots) { if (innermost->numGlobalSlots == ngslots) {
/* Yes. This is the ideal fast path. */ /* Yes. This is the ideal fast path. */
globalTypeMap = GetGlobalTypeMap(innermost); globalTypeMap = innermost->globalTypeMap();
} else { } else {
/* /*
* No. Merge the typemap of the innermost entry and exit together. This * No. Merge the typemap of the innermost entry and exit together. This
@ -5702,11 +5710,11 @@ LeaveTree(InterpState& state, VMSideExit* lr)
* is lazily added into a tree, all dependent and linked trees are * is lazily added into a tree, all dependent and linked trees are
* immediately specialized (see bug 476653). * immediately specialized (see bug 476653).
*/ */
TreeInfo* ti = (TreeInfo*)innermost->from->root->vmprivate; TreeInfo* ti = innermost->root()->getTreeInfo();
JS_ASSERT(ti->nGlobalTypes() == ngslots); JS_ASSERT(ti->nGlobalTypes() == ngslots);
JS_ASSERT(ti->nGlobalTypes() > innermost->numGlobalSlots); JS_ASSERT(ti->nGlobalTypes() > innermost->numGlobalSlots);
globalTypeMap = (JSTraceType*)alloca(ngslots * sizeof(JSTraceType)); globalTypeMap = (JSTraceType*)alloca(ngslots * sizeof(JSTraceType));
memcpy(globalTypeMap, GetGlobalTypeMap(innermost), innermost->numGlobalSlots); memcpy(globalTypeMap, innermost->globalTypeMap(), innermost->numGlobalSlots);
memcpy(globalTypeMap + innermost->numGlobalSlots, memcpy(globalTypeMap + innermost->numGlobalSlots,
ti->globalTypeMap() + innermost->numGlobalSlots, ti->globalTypeMap() + innermost->numGlobalSlots,
ti->nGlobalTypes() - innermost->numGlobalSlots); ti->nGlobalTypes() - innermost->numGlobalSlots);
@ -5717,7 +5725,7 @@ LeaveTree(InterpState& state, VMSideExit* lr)
int slots = int slots =
#endif #endif
FlushNativeStackFrame(cx, innermost->calldepth, FlushNativeStackFrame(cx, innermost->calldepth,
GetStackTypeMap(innermost), innermost->stackTypeMap(),
stack, NULL); stack, NULL);
JS_ASSERT(unsigned(slots) == innermost->numStackSlots); JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
@ -12764,7 +12772,7 @@ DumpPeerStability(JSTraceMonitor* tm, const void* ip, JSObject* globalObj, uint3
UnstableExit* uexit = ti->unstableExits; UnstableExit* uexit = ti->unstableExits;
while (uexit != NULL) { while (uexit != NULL) {
debug_only_print0(LC_TMRecorder, "EXIT "); debug_only_print0(LC_TMRecorder, "EXIT ");
JSTraceType* m = GetFullTypeMap(uexit->exit); JSTraceType* m = uexit->exit->fullTypeMap();
debug_only_print0(LC_TMRecorder, "STACK="); debug_only_print0(LC_TMRecorder, "STACK=");
for (unsigned i = 0; i < uexit->exit->numStackSlots; i++) for (unsigned i = 0; i < uexit->exit->numStackSlots; i++)
debug_only_printf(LC_TMRecorder, "%c", typeChar[m[i]]); debug_only_printf(LC_TMRecorder, "%c", typeChar[m[i]]);

Просмотреть файл

@ -313,6 +313,7 @@ public:
JS_REQUIRES_STACK void captureMissingGlobalTypes(JSContext* cx, JSObject* globalObj, SlotList& slots, JS_REQUIRES_STACK void captureMissingGlobalTypes(JSContext* cx, JSObject* globalObj, SlotList& slots,
unsigned stackSlots); unsigned stackSlots);
bool matches(TypeMap& other) const; bool matches(TypeMap& other) const;
void fromRaw(JSTraceType* other, unsigned numSlots);
}; };
#define JS_TM_EXITCODES(_) \ #define JS_TM_EXITCODES(_) \
@ -390,6 +391,22 @@ struct VMSideExit : public nanojit::SideExit
void setNativeCallee(JSObject *callee, bool constructing) { void setNativeCallee(JSObject *callee, bool constructing) {
nativeCalleeWord = uintptr_t(callee) | (constructing ? 1 : 0); nativeCalleeWord = uintptr_t(callee) | (constructing ? 1 : 0);
} }
inline JSTraceType* stackTypeMap() {
return (JSTraceType*)(this + 1);
}
inline JSTraceType* globalTypeMap() {
return (JSTraceType*)(this + 1) + this->numStackSlots;
}
inline JSTraceType* fullTypeMap() {
return stackTypeMap();
}
inline VMFragment* root() {
return (VMFragment*)from->root;
}
}; };
struct VMAllocator : public nanojit::Allocator struct VMAllocator : public nanojit::Allocator
@ -508,6 +525,8 @@ public:
inline JSTraceType* stackTypeMap() { inline JSTraceType* stackTypeMap() {
return typeMap.data(); return typeMap.data();
} }
UnstableExit* removeUnstableExit(VMSideExit* exit);
}; };
#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__))) #if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))