Merge tracemonkey to mozilla-central.

This commit is contained in:
Robert Sayre 2009-05-13 23:37:16 -04:00
Родитель 52d9f24ccf 292efeccc2
Коммит d9910c9a12
8 изменённых файлов: 130 добавлений и 73 удалений

Просмотреть файл

@ -142,24 +142,26 @@ struct JSTraceMonitor {
jsval *reservedDoublePool;
jsval *reservedDoublePoolPtr;
struct GlobalState globalStates[MONITOR_N_GLOBAL_STATES];
struct VMFragment* vmfragments[FRAGMENT_TABLE_SIZE];
struct GlobalState globalStates[MONITOR_N_GLOBAL_STATES];
struct VMFragment* vmfragments[FRAGMENT_TABLE_SIZE];
JSDHashTable recordAttempts;
/*
* If nonzero, do not flush the JIT cache after a deep bail. That would
* free JITted code pages that we will later return to. Instead, set
* the needFlush flag so that it can be flushed later.
*/
uintN prohibitFlush;
JSPackedBool needFlush;
/*
* Maximum size of the code cache before we start flushing. 1/16 of this
* size is used as threshold for the regular expression code cache.
*/
uint32 maxCodeCacheBytes;
/*
* If nonzero, do not flush the JIT cache after a deep bail. That would
* free JITted code pages that we will later return to. Instead, set the
* needFlush flag so that it can be flushed later.
*
* NB: needFlush and useReservedObjects are packed together.
*/
uintN prohibitFlush;
JSPackedBool needFlush;
/*
* reservedObjects is a linked list (via fslots[0]) of preallocated JSObjects.
* The JIT uses this to ensure that leaving a trace tree can't fail.
@ -236,11 +238,6 @@ struct JSThreadData {
/* Property cache for faster call/get/set invocation. */
JSPropertyCache propertyCache;
/*
* N.B. JS_ON_TRACE(cx) is true if JIT code is on the stack in the current
* thread, regardless of whether cx is the context in which that trace is
* executing. cx must be a context on the current thread.
*/
#ifdef JS_TRACER
/* Trace-tree JIT recorder/interpreter state. */
JSTraceMonitor traceMonitor;
@ -261,7 +258,7 @@ struct JSThreadData {
* that can be accessed without a global lock.
*/
struct JSThread {
/* Linked list of all contexts active on this thread. */
/* Linked list of all contexts in use on this thread. */
JSCList contextList;
/* Opaque thread-id, from NSPR's PR_GetCurrentThread(). */
@ -276,6 +273,7 @@ struct JSThread {
/* Indicates that the thread is waiting in ClaimTitle from jslock.cpp. */
JSTitle *titleToShare;
/* Factored out of JSThread for !JS_THREADSAFE embedding in JSRuntime. */
JSThreadData data;
};
@ -348,6 +346,20 @@ struct JSRuntime {
/* Context create/destroy callback. */
JSContextCallback cxCallback;
/*
* Shape regenerated whenever a prototype implicated by an "add property"
* property cache fill and induced trace guard has a readonly property or a
* setter defined on it. This number proxies for the shapes of all objects
* along the prototype chain of all objects in the runtime on which such an
* add-property result has been cached/traced.
*
* See bug 492355 for more details.
*
* This comes early in JSRuntime to minimize the immediate format used by
* trace-JITted code that reads it.
*/
uint32 protoHazardShape;
/* Garbage collector state, used by jsgc.c. */
JSGCChunkInfo *gcChunkList;
JSGCArenaList gcArenaList[GC_NUM_FREELISTS];

Просмотреть файл

@ -629,8 +629,14 @@ js_GetCallObject(JSContext *cx, JSStackFrame *fp)
if (!env)
return NULL;
/* Root env. */
/* Root env before js_DefineNativeProperty (-> JSClass.addProperty). */
fp->scopeChain = env;
if (!js_DefineNativeProperty(cx, fp->scopeChain, ATOM_TO_JSID(lambdaName),
OBJECT_TO_JSVAL(fp->callee), NULL, NULL,
JSPROP_PERMANENT | JSPROP_READONLY,
0, 0, NULL)) {
return NULL;
}
}
callobj = js_NewObjectWithGivenProto(cx, &js_CallClass, NULL,
@ -641,14 +647,6 @@ js_GetCallObject(JSContext *cx, JSStackFrame *fp)
JS_SetPrivate(cx, callobj, fp);
JS_ASSERT(fp->fun == GET_FUNCTION_PRIVATE(cx, fp->callee));
STOBJ_SET_SLOT(callobj, JSSLOT_CALLEE, OBJECT_TO_JSVAL(fp->callee));
if (lambdaName &&
!js_DefineNativeProperty(cx, fp->scopeChain, ATOM_TO_JSID(lambdaName),
OBJECT_TO_JSVAL(fp->callee), NULL, NULL,
JSPROP_PERMANENT | JSPROP_READONLY,
0, 0, NULL)) {
return NULL;
}
fp->callobj = callobj;
/*

Просмотреть файл

@ -3467,8 +3467,13 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind)
JS_ASSERT(!rt->gcUntracedArenaStackTop);
JS_ASSERT(rt->gcTraceLaterCount == 0);
/* Reset the property cache's type id generator so we can compress ids. */
/*
* Reset the property cache's type id generator so we can compress ids.
* Same for the protoHazardShape proxy-shape standing in for all object
* prototypes having readonly or setter properties.
*/
rt->shapeGen = 0;
rt->protoHazardShape = 0;
/*
* Mark phase.

Просмотреть файл

@ -110,12 +110,12 @@ js_GenerateShape(JSContext *cx, JSBool gcLocked)
JS_REQUIRES_STACK JSPropCacheEntry *
js_FillPropertyCache(JSContext *cx, JSObject *obj,
uintN scopeIndex, uintN protoIndex, JSObject *pobj,
JSScopeProperty *sprop, JSBool addedSprop)
JSScopeProperty *sprop, JSBool adding)
{
JSPropertyCache *cache;
jsbytecode *pc;
JSScope *scope;
jsuword kshape, khash;
jsuword kshape, vshape, khash;
JSOp op;
const JSCodeSpec *cs;
jsuword vword;
@ -157,12 +157,15 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj,
* but vcap vs. scope shape tests ensure nothing malfunctions.
*/
JS_ASSERT_IF(scopeIndex == 0 && protoIndex == 0, obj == pobj);
if (protoIndex != 0) {
JSObject *tmp;
JS_ASSERT(pobj != obj);
if (protoIndex != 0) {
JSObject *tmp = obj;
for (uintN i = 0; i != scopeIndex; i++)
tmp = OBJ_GET_PARENT(cx, tmp);
JS_ASSERT(tmp != pobj);
protoIndex = 1;
tmp = obj;
for (;;) {
tmp = OBJ_GET_PROTO(cx, tmp);
@ -180,6 +183,7 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj,
++protoIndex;
}
}
if (scopeIndex > PCVCAP_SCOPEMASK || protoIndex > PCVCAP_PROTOMASK) {
PCMETER(cache->longchains++);
return JS_NO_PROP_CACHE_FILL;
@ -256,7 +260,7 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj,
} else {
/* Best we can do is to cache sprop (still a nice speedup). */
vword = SPROP_TO_PCVAL(sprop);
if (addedSprop &&
if (adding &&
sprop == scope->lastProp &&
scope->shape == sprop->shape) {
/*
@ -294,12 +298,21 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj,
if (proto && OBJ_IS_NATIVE(proto))
kshape = OBJ_SHAPE(proto);
}
/*
* When adding we predict no prototype object will later gain a
* readonly property or setter.
*/
vshape = cx->runtime->protoHazardShape;
}
}
} while (0);
if (kshape == 0)
if (kshape == 0) {
kshape = OBJ_SHAPE(obj);
vshape = scope->shape;
}
khash = PROPERTY_CACHE_HASH_PC(pc, kshape);
if (obj == pobj) {
JS_ASSERT(scopeIndex == 0 && protoIndex == 0);
@ -312,8 +325,14 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj,
pcoff = (JOF_TYPE(cs->format) == JOF_SLOTATOM) ? SLOTNO_LEN : 0;
GET_ATOM_FROM_BYTECODE(cx->fp->script, pc, pcoff, atom);
}
JS_ASSERT_IF(scopeIndex == 0,
protoIndex != 1 || OBJ_GET_PROTO(cx, obj) == pobj);
#ifdef DEBUG
if (scopeIndex == 0) {
JS_ASSERT(protoIndex != 0);
JS_ASSERT((protoIndex == 1) == (OBJ_GET_PROTO(cx, obj) == pobj));
}
#endif
if (scopeIndex != 0 || protoIndex != 1) {
khash = PROPERTY_CACHE_HASH_ATOM(atom, obj, pobj);
PCMETER(if (PCVCAP_TAG(cache->table[khash].vcap) <= 1)
@ -339,7 +358,7 @@ js_FillPropertyCache(JSContext *cx, JSObject *obj,
PCMETER(PCVAL_IS_NULL(entry->vword) || cache->recycles++);
entry->kpc = pc;
entry->kshape = kshape;
entry->vcap = PCVCAP_MAKE(scope->shape, scopeIndex, protoIndex);
entry->vcap = PCVCAP_MAKE(vshape, scopeIndex, protoIndex);
entry->vword = vword;
cache->empty = JS_FALSE;
@ -4590,11 +4609,13 @@ js_Interpret(JSContext *cx)
PCMETER(cache->pctestentry = entry);
PCMETER(cache->tests++);
PCMETER(cache->settests++);
if (entry->kpc == regs.pc && entry->kshape == kshape) {
JSScope *scope;
if (entry->kpc == regs.pc &&
entry->kshape == kshape &&
PCVCAP_SHAPE(entry->vcap) == rt->protoHazardShape) {
JS_ASSERT(PCVCAP_TAG(entry->vcap) == 0);
JS_LOCK_OBJ(cx, obj);
scope = OBJ_SCOPE(obj);
JSScope *scope = OBJ_SCOPE(obj);
if (scope->shape == kshape) {
JS_ASSERT(PCVAL_IS_SPROP(entry->vword));
sprop = PCVAL_TO_SPROP(entry->vword);
@ -6303,7 +6324,11 @@ js_Interpret(JSContext *cx)
PCMETER(cache->tests++);
PCMETER(cache->initests++);
if (entry->kpc == regs.pc && entry->kshape == kshape) {
if (entry->kpc == regs.pc &&
entry->kshape == kshape &&
PCVCAP_SHAPE(entry->vcap) == rt->protoHazardShape) {
JS_ASSERT(PCVCAP_TAG(entry->vcap) == 0);
PCMETER(cache->pchits++);
PCMETER(cache->inipchits++);
@ -6341,7 +6366,6 @@ js_Interpret(JSContext *cx)
* obj, not a proto-property, and there cannot have been
* any deletions of prior properties.
*/
JS_ASSERT(PCVCAP_MAKE(sprop->shape, 0, 0) == entry->vcap);
JS_ASSERT(!SCOPE_HAD_MIDDLE_DELETE(scope));
JS_ASSERT(!scope->table ||
!SCOPE_HAS_PROPERTY(scope, sprop));

Просмотреть файл

@ -235,7 +235,7 @@ typedef struct JSInlineFrame {
#define PCVCAP_TAGMASK JS_BITMASK(PCVCAP_TAGBITS)
#define PCVCAP_TAG(t) ((t) & PCVCAP_TAGMASK)
#define PCVCAP_MAKE(t,s,p) (((t) << PCVCAP_TAGBITS) | \
#define PCVCAP_MAKE(t,s,p) ((uint32(t) << PCVCAP_TAGBITS) | \
((s) << PCVCAP_PROTOBITS) | \
(p))
#define PCVCAP_SHAPE(t) ((t) >> PCVCAP_TAGBITS)
@ -351,7 +351,7 @@ typedef struct JSPropertyCache {
extern JS_REQUIRES_STACK JSPropCacheEntry *
js_FillPropertyCache(JSContext *cx, JSObject *obj,
uintN scopeIndex, uintN protoIndex, JSObject *pobj,
JSScopeProperty *sprop, JSBool addedSprop);
JSScopeProperty *sprop, JSBool adding);
/*
* Property cache lookup macros. PROPERTY_CACHE_TEST is designed to inline the

Просмотреть файл

@ -286,7 +286,7 @@ js_SetProtoOrParent(JSContext *cx, JSObject *obj, uint32 slot, JSObject *pobj,
JS_ASSERT_IF(!checkForCycles, obj != pobj);
if (slot == JSSLOT_PROTO) {
JS_UNLOCK_OBJ(cx, obj);
JS_LOCK_OBJ(cx, obj);
bool ok = !!js_GetMutableScope(cx, obj);
JS_UNLOCK_OBJ(cx, obj);
if (!ok)
@ -3746,17 +3746,19 @@ js_DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, jsval value,
#endif /* JS_HAS_GETTER_SETTER */
/*
* Purge the property cache of now-shadowed id in obj's scope chain.
* Do this early, before locking obj to avoid nesting locks.
*
* But first, purge the entire cache if obj is a prototype (we approximate
* this via OBJ_IS_DELEGATE) and we are defining a non-shadowable property
* on it (see bug 452189).
* Purge the property cache of any properties named by id that are about to
* be shadowed in obj's scope chain. We do this before locking obj to avoid
* nesting locks.
*/
js_PurgeScopeChain(cx, obj, id);
/*
* Check whether a readonly property or setter is being defined on a known
* prototype object. See the comment in jscntxt.h before protoHazardShape's
* member declaration.
*/
if (OBJ_IS_DELEGATE(cx, obj) && (attrs & (JSPROP_READONLY | JSPROP_SETTER)))
js_PurgePropertyCache(cx, &JS_PROPERTY_CACHE(cx));
else
js_PurgeScopeChain(cx, obj, id);
cx->runtime->protoHazardShape = js_GenerateShape(cx, false);
/* Lock if object locking is required by this implementation. */
JS_LOCK_OBJ(cx, obj);

Просмотреть файл

@ -336,13 +336,15 @@ struct JSScopeProperty {
#define SPROP_HAS_STUB_SETTER(sprop) (!(sprop)->setter)
static inline void
js_MakeScopeShapeUnique(JSContext* cx, JSScope* scope) {
js_MakeScopeShapeUnique(JSContext *cx, JSScope *scope)
{
js_LeaveTraceIfGlobalObject(cx, scope->object);
scope->shape = js_GenerateShape(cx, JS_FALSE);
}
static inline void
js_ExtendScopeShape(JSContext *cx, JSScope *scope, JSScopeProperty *sprop) {
js_ExtendScopeShape(JSContext *cx, JSScope *scope, JSScopeProperty *sprop)
{
js_LeaveTraceIfGlobalObject(cx, scope->object);
if (!scope->lastProp ||
scope->shape == scope->lastProp->shape) {

Просмотреть файл

@ -3413,13 +3413,13 @@ js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
VMSideExit* expectedInnerExit, jsbytecode* outer, uint32 outerArgc)
{
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
JS_ASSERT(f->root != f || !cx->fp->imacpc);
if (JS_TRACE_MONITOR(cx).needFlush) {
FlushJITCache(cx);
return false;
}
JS_ASSERT(f->root != f || !cx->fp->imacpc);
/* start recording if no exception during construction */
tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
stackSlots, ngslots, typeMap,
@ -3776,7 +3776,11 @@ JS_REQUIRES_STACK static bool
js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, jsbytecode* outer, uint32 outerArgc)
{
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
JS_ASSERT(!tm->needFlush);
if (tm->needFlush) {
FlushJITCache(cx);
return false;
}
VMFragment* from = (VMFragment*)exit->from->root;
TreeInfo* from_ti = (TreeInfo*)from->vmprivate;
@ -3883,7 +3887,12 @@ js_AttemptToStabilizeTree(JSContext* cx, VMSideExit* exit, jsbytecode* outer, ui
static JS_REQUIRES_STACK bool
js_AttemptToExtendTree(JSContext* cx, VMSideExit* anchor, VMSideExit* exitedFrom, jsbytecode* outer)
{
JS_ASSERT(!JS_TRACE_MONITOR(cx).needFlush);
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
if (tm->needFlush) {
FlushJITCache(cx);
return false;
}
Fragment* f = anchor->from->root;
JS_ASSERT(f->vmprivate);
TreeInfo* ti = (TreeInfo*)f->vmprivate;
@ -4598,10 +4607,10 @@ LeaveTree(InterpState& state, VMSideExit* lr)
stack, NULL);
JS_ASSERT(unsigned(slots) == innermost->numStackSlots);
if (innermost->nativeCalleeWord) {
if (innermost->nativeCalleeWord)
SynthesizeSlowNativeFrame(cx, innermost);
cx->nativeVp = NULL;
}
cx->nativeVp = NULL;
#ifdef DEBUG
// Verify that our state restoration worked.
@ -5282,15 +5291,16 @@ js_PurgeScriptFragments(JSContext* cx, JSScript* script)
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
for (VMFragment **f = &(tm->vmfragments[i]); *f; ) {
VMFragment* frag = *f;
/* Disable future use of any script-associated VMFragment.*/
if (JS_UPTRDIFF((*f)->ip, script->code) < script->length) {
if (JS_UPTRDIFF(frag->ip, script->code) < script->length) {
JS_ASSERT(frag->root == frag);
debug_only_v(printf("Disconnecting VMFragment %p "
"with ip %p, in range [%p,%p).\n",
(void*)(*f), (*f)->ip, script->code,
(void*)frag, frag->ip, script->code,
script->code + script->length));
VMFragment* next = (*f)->next;
if (tm->fragmento)
tm->fragmento->clearFragment(*f);
VMFragment* next = frag->next;
js_TrashTree(cx, frag);
*f = next;
} else {
f = &((*f)->next);
@ -7956,11 +7966,8 @@ TraceRecorder::record_SetPropHit(JSPropCacheEntry* entry, JSScopeProperty* sprop
LIns* obj_ins = get(&l);
JSScope* scope = OBJ_SCOPE(obj);
#ifdef DEBUG
JS_ASSERT(scope->object == obj);
JS_ASSERT(scope->shape == PCVCAP_SHAPE(entry->vcap));
JS_ASSERT(SCOPE_HAS_PROPERTY(scope, sprop));
#endif
if (!isValidSlot(scope, sprop))
return JSRS_STOP;
@ -7996,10 +8003,17 @@ TraceRecorder::record_SetPropHit(JSPropCacheEntry* entry, JSScopeProperty* sprop
ABORT_TRACE("non-native map");
LIns* shape_ins = addName(lir->insLoad(LIR_ld, map_ins, offsetof(JSScope, shape)), "shape");
guard(true, addName(lir->ins2i(LIR_eq, shape_ins, entry->kshape), "guard(shape)"),
guard(true, addName(lir->ins2i(LIR_eq, shape_ins, entry->kshape), "guard(kshape)"),
BRANCH_EXIT);
if (entry->kshape != PCVCAP_SHAPE(entry->vcap)) {
uint32 vshape = PCVCAP_SHAPE(entry->vcap);
if (entry->kshape != vshape) {
LIns *vshape_ins = lir->insLoad(LIR_ld,
lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, runtime)),
offsetof(JSRuntime, protoHazardShape));
guard(true, addName(lir->ins2i(LIR_eq, vshape_ins, vshape), "guard(vshape)"),
MISMATCH_EXIT);
LIns* args[] = { INS_CONSTPTR(sprop), obj_ins, cx_ins };
LIns* ok_ins = lir->insCall(&js_AddProperty_ci, args);
guard(false, lir->ins_eq0(ok_ins), OOM_EXIT);