Implemented multiple type specialized trees per entry point (bug 450833, r=gal)

This commit is contained in:
David Anderson 2008-10-27 20:42:49 -07:00
Родитель b1d2219a45
Коммит 610531b27c
8 изменённых файлов: 918 добавлений и 292 удалений

Просмотреть файл

@ -49,3 +49,4 @@ JITSTAT(unstableLoopVariable)
JITSTAT(breakLoopExits) JITSTAT(breakLoopExits)
JITSTAT(returnLoopExits) JITSTAT(returnLoopExits)
JITSTAT(mergedLoopExits) JITSTAT(mergedLoopExits)
JITSTAT(noCompatInnerTrees)

Просмотреть файл

@ -108,18 +108,6 @@ class TypeMap;
# define CLS(T) void* # define CLS(T) void*
#endif #endif
/*
* Fragment quick cache entry.
*/
typedef struct JSFragmentCacheEntry {
jsbytecode* pc;
CLS(nanojit::Fragment) fragment;
} JSFragmentCacheEntry;
#define JS_FRAGMENT_CACHE_LOG2 2
#define JS_FRAGMENT_CACHE_SIZE JS_BIT(JS_FRAGMENT_CACHE_LOG2)
#define JS_FRAGMENT_CACHE_MASK JS_BITMASK(JS_FRAGMENT_CACHE_LOG2)
/* /*
* Trace monitor. Every JSThread (if JS_THREADSAFE) or JSRuntime (if not * Trace monitor. Every JSThread (if JS_THREADSAFE) or JSRuntime (if not
* JS_THREADSAFE) has an associated trace monitor that keeps track of loop * JS_THREADSAFE) has an associated trace monitor that keeps track of loop
@ -138,7 +126,6 @@ typedef struct JSTraceMonitor {
uint32 globalShape; uint32 globalShape;
CLS(SlotList) globalSlots; CLS(SlotList) globalSlots;
CLS(TypeMap) globalTypeMap; CLS(TypeMap) globalTypeMap;
JSFragmentCacheEntry fcache[JS_FRAGMENT_CACHE_SIZE];
jsval *recoveryDoublePool; jsval *recoveryDoublePool;
jsval *recoveryDoublePoolPtr; jsval *recoveryDoublePoolPtr;
uint32 jitCacheGen; uint32 jitCacheGen;

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -168,6 +168,13 @@ public:
bool matches(TypeMap& other) const; bool matches(TypeMap& other) const;
}; };
struct UnstableExit
{
nanojit::Fragment* fragment;
nanojit::SideExit* exit;
UnstableExit* next;
};
class TreeInfo MMGC_SUBCLASS_DECL { class TreeInfo MMGC_SUBCLASS_DECL {
nanojit::Fragment* fragment; nanojit::Fragment* fragment;
public: public:
@ -176,14 +183,15 @@ public:
ptrdiff_t nativeStackBase; ptrdiff_t nativeStackBase;
unsigned maxCallDepth; unsigned maxCallDepth;
TypeMap stackTypeMap; TypeMap stackTypeMap;
unsigned mismatchCount;
Queue<nanojit::Fragment*> dependentTrees; Queue<nanojit::Fragment*> dependentTrees;
unsigned branchCount; unsigned branchCount;
Queue<nanojit::SideExit*> sideExits; Queue<nanojit::SideExit*> sideExits;
UnstableExit* unstableExits;
TreeInfo(nanojit::Fragment* _fragment) { TreeInfo(nanojit::Fragment* _fragment) : unstableExits(NULL) {
fragment = _fragment; fragment = _fragment;
} }
~TreeInfo();
}; };
struct FrameInfo { struct FrameInfo {
@ -235,6 +243,9 @@ class TraceRecorder : public GCObject {
jsval* global_dslots; jsval* global_dslots;
JSTraceableNative* pendingTraceableNative; JSTraceableNative* pendingTraceableNative;
bool terminate; bool terminate;
intptr_t terminate_ip_adj;
nanojit::Fragment* outerToBlacklist;
nanojit::Fragment* promotedPeer;
bool isRootFragment; bool isRootFragment;
bool isGlobal(jsval* p) const; bool isGlobal(jsval* p) const;
@ -255,8 +266,10 @@ class TraceRecorder : public GCObject {
nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset); nanojit::LIns* writeBack(nanojit::LIns* i, nanojit::LIns* base, ptrdiff_t offset);
void set(jsval* p, nanojit::LIns* l, bool initializing = false); void set(jsval* p, nanojit::LIns* l, bool initializing = false);
bool checkType(jsval& v, uint8 type, bool& recompile); bool checkType(jsval& v, uint8 t, jsval*& stage_val, nanojit::LIns*& stage_ins,
bool verifyTypeStability(); unsigned& stage_count);
bool deduceTypeStability(nanojit::Fragment* root_peer, nanojit::Fragment** stable_peer,
unsigned* demotes);
jsval& argval(unsigned n) const; jsval& argval(unsigned n) const;
jsval& varval(unsigned n) const; jsval& varval(unsigned n) const;
@ -340,7 +353,7 @@ public:
TraceRecorder(JSContext* cx, nanojit::SideExit*, nanojit::Fragment*, TreeInfo*, TraceRecorder(JSContext* cx, nanojit::SideExit*, nanojit::Fragment*, TreeInfo*,
unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap, unsigned ngslots, uint8* globalTypeMap, uint8* stackTypeMap,
nanojit::SideExit* expectedInnerExit); nanojit::SideExit* expectedInnerExit, nanojit::Fragment* outerToBlacklist);
~TraceRecorder(); ~TraceRecorder();
uint8 determineSlotType(jsval* vp) const; uint8 determineSlotType(jsval* vp) const;
@ -348,11 +361,12 @@ public:
nanojit::Fragment* getFragment() const { return fragment; } nanojit::Fragment* getFragment() const { return fragment; }
bool isLoopHeader(JSContext* cx) const; bool isLoopHeader(JSContext* cx) const;
void compile(nanojit::Fragmento* fragmento); void compile(nanojit::Fragmento* fragmento);
void closeLoop(nanojit::Fragmento* fragmento); bool closeLoop(nanojit::Fragmento* fragmento, bool& demote, unsigned *demotes);
void endLoop(nanojit::Fragmento* fragmento); void endLoop(nanojit::Fragmento* fragmento);
void joinEdgesToEntry(nanojit::Fragmento* fragmento, nanojit::Fragment* peer_root);
void blacklist() { fragment->blacklist(); } void blacklist() { fragment->blacklist(); }
bool adjustCallerTypes(nanojit::Fragment* f); bool adjustCallerTypes(nanojit::Fragment* f, unsigned* demote_slots, bool& trash);
bool selectCallablePeerFragment(nanojit::Fragment** first); nanojit::Fragment* findNestedCompatiblePeer(nanojit::Fragment* f, nanojit::Fragment** empty);
void prepareTreeCall(nanojit::Fragment* inner); void prepareTreeCall(nanojit::Fragment* inner);
void emitTreeCall(nanojit::Fragment* inner, nanojit::SideExit* exit); void emitTreeCall(nanojit::Fragment* inner, nanojit::SideExit* exit);
unsigned getCallDepth() const; unsigned getCallDepth() const;
@ -365,9 +379,12 @@ public:
bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj); bool record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
bool record_FastNativeCallComplete(); bool record_FastNativeCallComplete();
nanojit::Fragment* getOuterToBlacklist() { return outerToBlacklist; }
void deepAbort() { deepAborted = true; } void deepAbort() { deepAborted = true; }
bool wasDeepAborted() { return deepAborted; } bool wasDeepAborted() { return deepAborted; }
bool walkedOutOfLoop() { return terminate; } bool walkedOutOfLoop() { return terminate; }
void setPromotedPeer(nanojit::Fragment* peer) { promotedPeer = peer; }
TreeInfo* getTreeInfo() { return treeInfo; }
#define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \ #define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
bool record_##op(); bool record_##op();

Просмотреть файл

@ -204,7 +204,7 @@ namespace nanojit
} }
f->releaseTreeMem(this); f->releaseTreeMem(this);
delete f; delete f;
} }
verbose_only( enterCounts->clear();) verbose_only( enterCounts->clear();)
verbose_only( mergeCounts->clear();) verbose_only( mergeCounts->clear();)
@ -511,6 +511,12 @@ namespace nanojit
NanoAssert(_pages == 0); NanoAssert(_pages == 0);
} }
void Fragment::resetHits()
{
blacklistLevel >>= 1;
_hits = 0;
}
void Fragment::blacklist() void Fragment::blacklist()
{ {
blacklistLevel++; blacklistLevel++;
@ -522,6 +528,7 @@ namespace nanojit
GC *gc = _core->gc; GC *gc = _core->gc;
Fragment *f = new (gc) Fragment(ip); Fragment *f = new (gc) Fragment(ip);
f->blacklistLevel = 5; f->blacklistLevel = 5;
f->recordAttempts = 0;
return f; return f;
} }

Просмотреть файл

@ -176,6 +176,7 @@ namespace nanojit
void setCode(NIns* codee, Page* pages) { _code = codee; _pages = pages; } void setCode(NIns* codee, Page* pages) { _code = codee; _pages = pages; }
GuardRecord* links() { return _links; } GuardRecord* links() { return _links; }
int32_t& hits() { return _hits; } int32_t& hits() { return _hits; }
void resetHits();
void blacklist(); void blacklist();
bool isBlacklisted() { return _hits < 0; } bool isBlacklisted() { return _hits < 0; }
debug_only( bool hasOnlyTreeLinks(); ) debug_only( bool hasOnlyTreeLinks(); )
@ -215,6 +216,7 @@ namespace nanojit
const void* ip; const void* ip;
uint32_t guardCount; uint32_t guardCount;
uint32_t xjumpCount; uint32_t xjumpCount;
uint32_t recordAttempts;
int32_t blacklistLevel; int32_t blacklistLevel;
NIns* fragEntry; NIns* fragEntry;
int32_t calldepth; int32_t calldepth;

Просмотреть файл

@ -172,8 +172,11 @@ namespace nanojit
NESTED_EXIT, NESTED_EXIT,
MISMATCH_EXIT, MISMATCH_EXIT,
OOM_EXIT, OOM_EXIT,
OVERFLOW_EXIT OVERFLOW_EXIT,
UNSTABLE_LOOP_EXIT
}; };
struct GuardRecord;
class LIns; class LIns;

Просмотреть файл

@ -1548,9 +1548,9 @@ function testNestedExitStackOuter() {
} }
testNestedExitStackOuter.expected = 81; testNestedExitStackOuter.expected = 81;
testNestedExitStackOuter.jitstats = { testNestedExitStackOuter.jitstats = {
recorderStarted: 3, recorderStarted: 5,
recorderAborted: 0, recorderAborted: 2,
traceTriggered: 7 traceTriggered: 9
}; };
test(testNestedExitStackOuter); test(testNestedExitStackOuter);