Merge tracemonkey to mozilla-central.

This commit is contained in:
Robert Sayre 2009-10-07 01:39:01 -04:00
Родитель 3d8f3ca274 6d65673d6c
Коммит 1ed03f8f31
3449 изменённых файлов: 11814 добавлений и 5627 удалений

Просмотреть файл

@ -66,6 +66,10 @@ DIRS += jsapi-tests
endif
endif
ifdef ENABLE_TESTS
DIRS += tests
endif
MODULE = js
LIBRARY_NAME = mozjs
STATIC_LIBRARY_NAME = js_static
@ -231,6 +235,7 @@ INSTALLED_HEADERS += \
Native$(NANOJIT_ARCH).h \
RegAlloc.h \
nanojit.h \
VMPI.h \
$(NULL)
CPPSRCS += \
@ -245,6 +250,7 @@ CPPSRCS += \
avmplus.cpp \
Native$(NANOJIT_ARCH).cpp \
jsbuiltins.cpp \
VMPI.cpp \
$(NULL)
ifdef WINCE

Просмотреть файл

@ -5183,6 +5183,7 @@ MAKEFILES="
shell/Makefile
lirasm/Makefile
jsapi-tests/Makefile
tests/Makefile
config/Makefile
config/autoconf.mk
config/mkdepend/Makefile

Просмотреть файл

@ -2603,12 +2603,10 @@ JS_RemoveExternalStringFinalizer(JSStringFinalizeOp finalizer)
JS_PUBLIC_API(JSString *)
JS_NewExternalString(JSContext *cx, jschar *chars, size_t length, intN type)
{
JSString *str;
CHECK_REQUEST(cx);
JS_ASSERT((uintN) type < (uintN) (GCX_NTYPES - GCX_EXTERNAL_STRING));
JS_ASSERT(uintN(type) < JS_EXTERNAL_STRING_LIMIT);
str = js_NewGCString(cx, (uintN) type + GCX_EXTERNAL_STRING);
JSString *str = js_NewGCExternalString(cx, uintN(type));
if (!str)
return NULL;
str->initFlat(chars, length);
@ -4869,7 +4867,7 @@ JS_CompileUCFunctionForPrincipals(JSContext *cx, JSObject *obj,
#endif
out:
cx->weakRoots.newborn[JSTRACE_OBJECT] = FUN_OBJECT(fun);
cx->weakRoots.newbornObject = FUN_OBJECT(fun);
JS_POP_TEMP_ROOT(cx, &tvr);
out2:
@ -5369,6 +5367,18 @@ JS_GetStringLength(JSString *str)
return str->length();
}
JS_PUBLIC_API(const char *)
JS_GetStringBytesZ(JSContext *cx, JSString *str)
{
return js_GetStringBytes(cx, str);
}
JS_PUBLIC_API(const jschar *)
JS_GetStringCharsZ(JSContext *cx, JSString *str)
{
return js_UndependString(cx, str);
}
JS_PUBLIC_API(intN)
JS_CompareStrings(JSString *str1, JSString *str2)
{

Просмотреть файл

@ -584,7 +584,9 @@ JS_END_EXTERN_C
class JSAutoRequest {
public:
JSAutoRequest(JSContext *cx) : mContext(cx), mSaveDepth(0) {
JSAutoRequest(JSContext *cx JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx), mSaveDepth(0) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_BeginRequest(mContext);
}
~JSAutoRequest() {
@ -601,6 +603,7 @@ class JSAutoRequest {
protected:
JSContext *mContext;
jsrefcount mSaveDepth;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
#if 0
private:
@ -611,7 +614,9 @@ class JSAutoRequest {
class JSAutoSuspendRequest {
public:
JSAutoSuspendRequest(JSContext *cx) : mContext(cx), mSaveDepth(0) {
JSAutoSuspendRequest(JSContext *cx JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx), mSaveDepth(0) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
if (mContext) {
mSaveDepth = JS_SuspendRequest(mContext);
}
@ -630,6 +635,7 @@ class JSAutoSuspendRequest {
protected:
JSContext *mContext;
jsrefcount mSaveDepth;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
#if 0
private:
@ -989,7 +995,9 @@ JS_END_EXTERN_C
class JSAutoLocalRootScope {
public:
JSAutoLocalRootScope(JSContext *cx) : mContext(cx) {
JSAutoLocalRootScope(JSContext *cx JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_EnterLocalRootScope(mContext);
}
~JSAutoLocalRootScope() {
@ -1002,6 +1010,7 @@ class JSAutoLocalRootScope {
protected:
JSContext *mContext;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
#if 0
private:
@ -2418,6 +2427,12 @@ JS_GetStringChars(JSString *str);
extern JS_PUBLIC_API(size_t)
JS_GetStringLength(JSString *str);
extern JS_PUBLIC_API(const char *)
JS_GetStringBytesZ(JSContext *cx, JSString *str);
extern JS_PUBLIC_API(const jschar *)
JS_GetStringCharsZ(JSContext *cx, JSString *str);
extern JS_PUBLIC_API(intN)
JS_CompareStrings(JSString *str1, JSString *str2);

Просмотреть файл

@ -327,7 +327,7 @@ ResizeSlots(JSContext *cx, JSObject *obj, uint32 oldlen, uint32 newlen)
}
slots = obj->dslots ? obj->dslots - 1 : NULL;
newslots = (jsval *) cx->realloc(slots, (newlen + 1) * sizeof(jsval));
newslots = (jsval *) cx->realloc(slots, (size_t(newlen) + 1) * sizeof(jsval));
if (!newslots)
return JS_FALSE;
@ -3391,7 +3391,7 @@ js_NewEmptyArray(JSContext* cx, JSObject* proto)
{
JS_ASSERT(OBJ_IS_ARRAY(cx, proto));
JSObject* obj = js_NewGCObject(cx, GCX_OBJECT);
JSObject* obj = js_NewGCObject(cx);
if (!obj)
return NULL;
@ -3463,7 +3463,7 @@ js_NewArrayObject(JSContext *cx, jsuint length, jsval *vector, JSBool holey)
JS_POP_TEMP_ROOT(cx, &tvr);
/* Set/clear newborn root, in case we lost it. */
cx->weakRoots.newborn[GCX_OBJECT] = obj;
cx->weakRoots.newbornObject = obj;
return obj;
}

Просмотреть файл

@ -454,8 +454,7 @@ js_string_uninterner(JSDHashTable *table, JSDHashEntryHdr *hdr,
JS_ASSERT(entry->keyAndFlags != 0);
str = (JSString *)ATOM_ENTRY_KEY(entry);
/* Pass null as context. */
js_FinalizeStringRT(rt, str, js_GetExternalStringGCType(str), NULL);
js_FinalizeStringRT(rt, str);
return JS_DHASH_NEXT;
}

Просмотреть файл

@ -1,5 +1,5 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4; -*-
* vim: set ts=8 sw=4 et tw=99:
* vim: set ts=4 sw=4 et tw=99:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
@ -151,6 +151,24 @@ js_UnboxInt32(jsval v)
}
JS_DEFINE_CALLINFO_1(extern, INT32, js_UnboxInt32, JSVAL, 1, 1)
JSBool FASTCALL
js_TryUnboxInt32(jsval v, int32* i32p)
{
if (JS_LIKELY(JSVAL_IS_INT(v))) {
*i32p = JSVAL_TO_INT(v);
return JS_TRUE;
}
if (!JSVAL_IS_DOUBLE(v))
return JS_FALSE;
int32 i;
jsdouble d = *JSVAL_TO_DOUBLE(v);
if (!JSDOUBLE_IS_INT(d, i))
return JS_FALSE;
*i32p = i;
return JS_TRUE;
}
JS_DEFINE_CALLINFO_2(extern, BOOL, js_TryUnboxInt32, JSVAL, INT32PTR, 1, 1)
int32 FASTCALL
js_DoubleToInt32(jsdouble d)
{
@ -389,7 +407,7 @@ js_NewNullClosure(JSContext* cx, JSObject* funobj, JSObject* proto, JSObject* pa
JSFunction *fun = (JSFunction*) funobj;
JS_ASSERT(GET_FUNCTION_PRIVATE(cx, funobj) == fun);
JSObject* closure = js_NewGCObject(cx, GCX_OBJECT);
JSObject* closure = js_NewGCObject(cx);
if (!closure)
return NULL;
@ -406,6 +424,44 @@ js_NewNullClosure(JSContext* cx, JSObject* funobj, JSObject* proto, JSObject* pa
}
JS_DEFINE_CALLINFO_4(extern, OBJECT, js_NewNullClosure, CONTEXT, OBJECT, OBJECT, OBJECT, 0, 0)
JS_REQUIRES_STACK JSBool FASTCALL
js_PopInterpFrame(JSContext* cx, InterpState* state)
{
JS_ASSERT(cx->fp && cx->fp->down);
JSInlineFrame* ifp = (JSInlineFrame*)cx->fp;
/*
* Mirror frame popping code from inline_return in js_Interpret. There are
* some things we just don't want to handle. In those cases, the trace will
* MISMATCH_EXIT.
*/
if (ifp->hookData)
return JS_FALSE;
if (cx->version != ifp->callerVersion)
return JS_FALSE;
if (cx->fp->flags & JSFRAME_CONSTRUCTING)
return JS_FALSE;
if (cx->fp->imacpc)
return JS_FALSE;
/* Update display table. */
if (cx->fp->script->staticLevel < JS_DISPLAY_SIZE)
cx->display[cx->fp->script->staticLevel] = cx->fp->displaySave;
/* Pop the frame and its memory. */
cx->fp = cx->fp->down;
JS_ASSERT(cx->fp->regs == &ifp->callerRegs);
cx->fp->regs = ifp->frame.regs;
/* Don't release |ifp->mark| yet, since ExecuteTree uses |cx->stackPool|. */
state->stackMark = ifp->mark;
/* Update the inline call count. */
*state->inlineCallCountp = *state->inlineCallCountp - 1;
return JS_TRUE;
}
JS_DEFINE_CALLINFO_2(extern, BOOL, js_PopInterpFrame, CONTEXT, INTERPSTATE, 0, 0)
JSString* FASTCALL
js_ConcatN(JSContext *cx, JSString **strArray, uint32 size)
{

Просмотреть файл

@ -203,6 +203,7 @@ struct ClosureVarInfo;
#define _JS_CTYPE_INT32 _JS_CTYPE(int32, _JS_I32, "","i", INFALLIBLE)
#define _JS_CTYPE_INT32_RETRY _JS_CTYPE(int32, _JS_I32, --, --, FAIL_NEG)
#define _JS_CTYPE_INT32_FAIL _JS_CTYPE(int32, _JS_I32, --, --, FAIL_STATUS)
#define _JS_CTYPE_INT32PTR _JS_CTYPE(int32 *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_UINT32 _JS_CTYPE(uint32, _JS_I32, "","i", INFALLIBLE)
#define _JS_CTYPE_UINT32_RETRY _JS_CTYPE(uint32, _JS_I32, --, --, FAIL_NEG)
#define _JS_CTYPE_UINT32_FAIL _JS_CTYPE(uint32, _JS_I32, --, --, FAIL_STATUS)
@ -227,6 +228,7 @@ struct ClosureVarInfo;
#define _JS_CTYPE_CHARPTR _JS_CTYPE(char *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_APNPTR _JS_CTYPE(js_ArgsPrivateNative *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_CVIPTR _JS_CTYPE(const ClosureVarInfo *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_FRAMEINFO _JS_CTYPE(FrameInfo *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_EXPAND(tokens) tokens
@ -502,6 +504,7 @@ JS_DECLARE_CALLINFO(js_BoxDouble)
JS_DECLARE_CALLINFO(js_BoxInt32)
JS_DECLARE_CALLINFO(js_UnboxDouble)
JS_DECLARE_CALLINFO(js_UnboxInt32)
JS_DECLARE_CALLINFO(js_TryUnboxInt32)
JS_DECLARE_CALLINFO(js_dmod)
JS_DECLARE_CALLINFO(js_imod)
JS_DECLARE_CALLINFO(js_DoubleToInt32)
@ -521,5 +524,6 @@ JS_DECLARE_CALLINFO(js_BooleanOrUndefinedToString)
JS_DECLARE_CALLINFO(js_Arguments)
JS_DECLARE_CALLINFO(js_NewNullClosure)
JS_DECLARE_CALLINFO(js_ConcatN)
JS_DECLARE_CALLINFO(js_PopInterpFrame)
#endif /* jsbuiltins_h___ */

Просмотреть файл

@ -116,7 +116,6 @@ PurgeThreadData(JSContext *cx, JSThreadData *data)
# ifdef JS_TRACER
JSTraceMonitor *tm = &data->traceMonitor;
tm->reservedDoublePoolPtr = tm->reservedDoublePool;
/*
* If we are about to regenerate shapes, we have to flush the JIT cache,
@ -126,12 +125,14 @@ PurgeThreadData(JSContext *cx, JSThreadData *data)
tm->needFlush = JS_TRUE;
/*
* We want to keep tm->reservedObjects after the GC. So, unless we are
* shutting down, we don't purge them here and rather mark them during
* We want to keep reserved doubles and objects after the GC. So, unless we
* are shutting down, we don't purge them here and rather mark them during
* the GC, see MarkReservedObjects in jsgc.cpp.
*/
if (cx->runtime->state == JSRTS_LANDING)
if (cx->runtime->state == JSRTS_LANDING) {
tm->reservedDoublePoolPtr = tm->reservedDoublePool;
tm->reservedObjects = NULL;
}
# endif
/* Destroy eval'ed scripts. */
@ -719,22 +720,10 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
#endif
if (last) {
/* Clear builtin functions, which are recreated on demand. */
memset(rt->builtinFunctions, 0, sizeof rt->builtinFunctions);
js_GC(cx, GC_LAST_CONTEXT);
DUMP_EVAL_CACHE_METER(cx);
DUMP_FUNCTION_METER(cx);
/*
* Free the script filename table if it exists and is empty. Do this
* after the last GC to avoid finalizers tripping on free memory.
*/
if (rt->scriptFilenameTable &&
rt->scriptFilenameTable->nentries == 0) {
js_FinishRuntimeScriptState(rt);
}
/* Take the runtime down, now that it has no contexts or atoms. */
JS_LOCK_GC(rt);
rt->state = JSRTS_DOWN;

Просмотреть файл

@ -126,6 +126,7 @@ struct VMFragment;
#ifdef __cplusplus
struct REHashKey;
struct REHashFn;
class FrameInfoCache;
typedef nanojit::HashMap<REHashKey, nanojit::Fragment*, REHashFn> REHashMap;
#endif
@ -154,12 +155,30 @@ struct JSTraceMonitor {
*/
JSContext *tracecx;
CLS(VMAllocator) dataAlloc; /* A chunk allocator for LIR. */
/*
* There are 3 allocators here. This might seem like overkill, but they
* have different lifecycles, and by keeping them separate we keep the
* amount of retained memory down significantly.
*
* The dataAlloc has the lifecycle of the monitor. It's flushed only
* when the monitor is flushed.
*
* The traceAlloc has the same flush lifecycle as the dataAlloc, but
* it is also *marked* when a recording starts and rewinds to the mark
* point if recording aborts. So you can put things in it that are only
* reachable on a successful record/compile cycle.
*
* The tempAlloc is flushed after each recording, successful or not.
*/
CLS(VMAllocator) dataAlloc; /* A chunk allocator for fragments. */
CLS(VMAllocator) traceAlloc; /* An allocator for trace metadata. */
CLS(VMAllocator) tempAlloc; /* A temporary chunk allocator. */
CLS(nanojit::CodeAlloc) codeAlloc; /* An allocator for native code. */
CLS(nanojit::Assembler) assembler;
CLS(nanojit::LirBuffer) lirbuf;
CLS(nanojit::LirBuffer) reLirBuf;
CLS(FrameInfoCache) frameCache;
#ifdef DEBUG
CLS(nanojit::LabelMap) labels;
#endif
@ -344,7 +363,7 @@ struct JSThread {
/* Indicates that the thread is waiting in ClaimTitle from jslock.cpp. */
JSTitle *titleToShare;
JSGCThing *gcFreeLists[GC_NUM_FREELISTS];
JSGCThing *gcFreeLists[FINALIZE_LIMIT];
/* Factored out of JSThread for !JS_THREADSAFE embedding in JSRuntime. */
JSThreadData data;
@ -431,7 +450,7 @@ struct JSRuntime {
/* Garbage collector state, used by jsgc.c. */
JSGCChunkInfo *gcChunkList;
JSGCArenaList gcArenaList[GC_NUM_FREELISTS];
JSGCArenaList gcArenaList[FINALIZE_LIMIT];
JSGCDoubleArenaList gcDoubleArenaList;
JSDHashTable gcRootsHash;
JSDHashTable *gcLocksHash;
@ -1246,20 +1265,28 @@ FrameAtomBase(JSContext *cx, JSStackFrame *fp)
class JSAutoTempValueRooter
{
public:
JSAutoTempValueRooter(JSContext *cx, size_t len, jsval *vec)
JSAutoTempValueRooter(JSContext *cx, size_t len, jsval *vec
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_PUSH_TEMP_ROOT(mContext, len, vec, &mTvr);
}
explicit JSAutoTempValueRooter(JSContext *cx, jsval v = JSVAL_NULL)
explicit JSAutoTempValueRooter(JSContext *cx, jsval v = JSVAL_NULL
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_PUSH_SINGLE_TEMP_ROOT(mContext, v, &mTvr);
}
JSAutoTempValueRooter(JSContext *cx, JSString *str)
JSAutoTempValueRooter(JSContext *cx, JSString *str
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_PUSH_TEMP_ROOT_STRING(mContext, str, &mTvr);
}
JSAutoTempValueRooter(JSContext *cx, JSObject *obj)
JSAutoTempValueRooter(JSContext *cx, JSObject *obj
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_PUSH_TEMP_ROOT_OBJECT(mContext, obj, &mTvr);
}
@ -1280,13 +1307,16 @@ class JSAutoTempValueRooter
#endif
JSTempValueRooter mTvr;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
class JSAutoTempIdRooter
{
public:
explicit JSAutoTempIdRooter(JSContext *cx, jsid id = INT_TO_JSID(0))
explicit JSAutoTempIdRooter(JSContext *cx, jsid id = INT_TO_JSID(0)
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_PUSH_SINGLE_TEMP_ROOT(mContext, ID_TO_VALUE(id), &mTvr);
}
@ -1300,11 +1330,15 @@ class JSAutoTempIdRooter
private:
JSContext *mContext;
JSTempValueRooter mTvr;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
class JSAutoIdArray {
public:
JSAutoIdArray(JSContext *cx, JSIdArray *ida) : cx(cx), idArray(ida) {
JSAutoIdArray(JSContext *cx, JSIdArray *ida
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: cx(cx), idArray(ida) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
if (ida)
JS_PUSH_TEMP_ROOT(cx, ida->length, ida->vector, &tvr);
}
@ -1329,15 +1363,18 @@ class JSAutoIdArray {
JSContext * const cx;
JSIdArray * const idArray;
JSTempValueRooter tvr;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
/* The auto-root for enumeration object and its state. */
class JSAutoEnumStateRooter : public JSTempValueRooter
{
public:
JSAutoEnumStateRooter(JSContext *cx, JSObject *obj, jsval *statep)
JSAutoEnumStateRooter(JSContext *cx, JSObject *obj, jsval *statep
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx), mStatep(statep)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
JS_ASSERT(obj);
JS_ASSERT(statep);
JS_PUSH_TEMP_ROOT_COMMON(cx, obj, this, JSTVU_ENUMERATOR, object);
@ -1355,13 +1392,16 @@ class JSAutoEnumStateRooter : public JSTempValueRooter
private:
JSContext *mContext;
jsval *mStatep;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
class JSAutoResolveFlags
{
public:
JSAutoResolveFlags(JSContext *cx, uintN flags)
JSAutoResolveFlags(JSContext *cx, uintN flags
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: mContext(cx), mSaved(cx->resolveFlags) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
cx->resolveFlags = flags;
}
@ -1370,6 +1410,7 @@ class JSAutoResolveFlags
private:
JSContext *mContext;
uintN mSaved;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
#endif /* __cpluscplus */

Просмотреть файл

@ -2159,7 +2159,9 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
#ifdef DEBUG
JSStackFrame *caller = cg->compiler->callerFrame;
#endif
JS_ASSERT(caller);
JS_ASSERT(caller->script);
JSTreeContext *tc = cg;
while (tc->staticLevel != level)
@ -2168,10 +2170,14 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
JSCodeGenerator *evalcg = (JSCodeGenerator *) tc;
JS_ASSERT(evalcg->flags & TCF_COMPILE_N_GO);
JS_ASSERT(!(evalcg->flags & TCF_IN_FOR_INIT));
JS_ASSERT(caller->script);
JS_ASSERT(caller->fun && caller->varobj == evalcg->scopeChain);
#endif
/*
* Don't generate upvars on the left side of a for loop. See
* bug 470758 and bug 520513.
*/
if (evalcg->flags & TCF_IN_FOR_INIT)
return JS_TRUE;
if (cg->staticLevel == level) {
pn->pn_op = JSOP_GETUPVAR;
@ -6269,26 +6275,39 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
case TOK_NEW:
case TOK_LP:
{
bool callop = (PN_TYPE(pn) == TOK_LP);
uintN oldflags;
/*
* Emit function call or operator new (constructor call) code.
* Emit callable invocation or operator new (constructor call) code.
* First, emit code for the left operand to evaluate the callable or
* constructable object expression.
*
* For operator new applied to other expressions than E4X ones, we emit
* JSOP_GETPROP instead of JSOP_CALLPROP, etc. This is necessary to
* interpose the lambda-initialized method read barrier -- see the code
* in jsops.cpp for JSOP_LAMBDA followed by JSOP_{SET,INIT}PROP.
*
* Then (or in a call case that has no explicit reference-base object)
* we emit JSOP_NULL as a placeholder local GC root to hold the |this|
* parameter: in the operator new case, the newborn instance; in the
* base-less call case, a cookie meaning "use the global object as the
* |this| value" (or in ES5 strict mode, "use undefined", so we should
* use JSOP_PUSH instead of JSOP_NULL -- see bug 514570).
*/
pn2 = pn->pn_head;
switch (pn2->pn_type) {
case TOK_NAME:
if (!EmitNameOp(cx, cg, pn2, JS_TRUE))
if (!EmitNameOp(cx, cg, pn2, callop))
return JS_FALSE;
break;
case TOK_DOT:
if (!EmitPropOp(cx, pn2, PN_OP(pn2), cg, JS_TRUE))
if (!EmitPropOp(cx, pn2, PN_OP(pn2), cg, callop))
return JS_FALSE;
break;
case TOK_LB:
JS_ASSERT(pn2->pn_op == JSOP_GETELEM);
if (!EmitElemOp(cx, pn2, JSOP_CALLELEM, cg))
if (!EmitElemOp(cx, pn2, callop ? JSOP_CALLELEM : JSOP_GETELEM, cg))
return JS_FALSE;
break;
case TOK_UNARYOP:
@ -6296,6 +6315,7 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
if (pn2->pn_op == JSOP_XMLNAME) {
if (!EmitXMLName(cx, pn2, JSOP_CALLXMLNAME, cg))
return JS_FALSE;
callop = true; /* suppress JSOP_NULL after */
break;
}
#endif
@ -6307,9 +6327,11 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
*/
if (!js_EmitTree(cx, cg, pn2))
return JS_FALSE;
if (js_Emit1(cx, cg, JSOP_NULL) < 0)
return JS_FALSE;
callop = false; /* trigger JSOP_NULL after */
break;
}
if (!callop && js_Emit1(cx, cg, JSOP_NULL) < 0)
return JS_FALSE;
/* Remember start of callable-object bytecode for decompilation hint. */
off = top;
@ -6332,6 +6354,11 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
argc = pn->pn_count - 1;
if (js_Emit3(cx, cg, PN_OP(pn), ARGC_HI(argc), ARGC_LO(argc)) < 0)
return JS_FALSE;
if (PN_OP(pn) == JSOP_CALL) {
/* Add a trace hint opcode for recursion. */
if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
return JS_FALSE;
}
if (PN_OP(pn) == JSOP_EVAL)
EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
break;

Просмотреть файл

@ -1410,10 +1410,14 @@ fun_resolve(JSContext *cx, JSObject *obj, jsval id, uintN flags,
fun = GET_FUNCTION_PRIVATE(cx, obj);
/*
* No need to reflect fun.prototype in 'fun.prototype = ... '.
* No need to reflect fun.prototype in 'fun.prototype = ... '. Assert that
* fun is not a compiler-created function object, which must never leak to
* script or embedding code and then be mutated.
*/
if (flags & JSRESOLVE_ASSIGNING)
if (flags & JSRESOLVE_ASSIGNING) {
JS_ASSERT(!js_IsInternalFunctionObject(obj));
return JS_TRUE;
}
/*
* Ok, check whether id is 'prototype' and bootstrap the function object's
@ -1421,7 +1425,7 @@ fun_resolve(JSContext *cx, JSObject *obj, jsval id, uintN flags,
*/
atom = cx->runtime->atomState.classPrototypeAtom;
if (id == ATOM_KEY(atom)) {
JSObject *proto;
JS_ASSERT(!js_IsInternalFunctionObject(obj));
/*
* Beware of the wacky case of a user function named Object -- trying
@ -1434,7 +1438,8 @@ fun_resolve(JSContext *cx, JSObject *obj, jsval id, uintN flags,
* Make the prototype object to have the same parent as the function
* object itself.
*/
proto = js_NewObject(cx, &js_ObjectClass, NULL, OBJ_GET_PARENT(cx, obj));
JSObject *proto =
js_NewObject(cx, &js_ObjectClass, NULL, OBJ_GET_PARENT(cx, obj));
if (!proto)
return JS_FALSE;
@ -1457,6 +1462,8 @@ fun_resolve(JSContext *cx, JSObject *obj, jsval id, uintN flags,
atom = OFFSET_TO_ATOM(cx->runtime, lfp->atomOffset);
if (id == ATOM_KEY(atom)) {
JS_ASSERT(!js_IsInternalFunctionObject(obj));
if (!js_DefineNativeProperty(cx, obj,
ATOM_TO_JSID(atom), JSVAL_VOID,
fun_getProperty, JS_PropertyStub,

Просмотреть файл

@ -1,4 +1,4 @@
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
@ -107,7 +107,7 @@ typedef union JSLocalNames {
#define JSFUN_KINDMASK 0xc000 /* encode interp vs. native and closure
optimization level -- see above */
#define FUN_OBJECT(fun) (&(fun)->object)
#define FUN_OBJECT(fun) (static_cast<JSObject *>(fun))
#define FUN_KIND(fun) ((fun)->flags & JSFUN_KINDMASK)
#define FUN_SET_KIND(fun,k) ((fun)->flags = ((fun)->flags & ~JSFUN_KINDMASK) | (k))
#define FUN_INTERPRETED(fun) (FUN_KIND(fun) >= JSFUN_INTERPRETED)
@ -128,8 +128,7 @@ typedef union JSLocalNames {
JS_ASSERT((fun)->flags & JSFUN_TRCINFO), \
fun->u.n.trcinfo)
struct JSFunction {
JSObject object; /* GC'ed object header */
struct JSFunction : public JSObject {
uint16 nargs; /* maximum number of specified arguments,
reflected as f.length/f.arity */
uint16 flags; /* flags, see JSFUN_* below and in jsapi.h */
@ -161,8 +160,8 @@ struct JSFunction {
} u;
JSAtom *atom; /* name for diagnostics and decompiling */
bool optimizedClosure() { return FUN_KIND(this) > JSFUN_INTERPRETED; }
bool needsWrapper() { return FUN_NULL_CLOSURE(this) && u.i.skipmin != 0; }
bool optimizedClosure() const { return FUN_KIND(this) > JSFUN_INTERPRETED; }
bool needsWrapper() const { return FUN_NULL_CLOSURE(this) && u.i.skipmin != 0; }
uintN countArgsAndVars() const {
JS_ASSERT(FUN_INTERPRETED(this));
@ -222,6 +221,19 @@ extern JS_FRIEND_DATA(JSClass) js_FunctionClass;
(JS_ASSERT(HAS_FUNCTION_CLASS(funobj)), \
(JSFunction *) (funobj)->getPrivate())
/*
* Return true if this is a compiler-created internal function accessed by
* its own object. Such a function object must not be accessible to script
* or embedding code.
*/
inline bool
js_IsInternalFunctionObject(JSObject *funobj)
{
JS_ASSERT(HAS_FUNCTION_CLASS(funobj));
JSFunction *fun = (JSFunction *) funobj->getPrivate();
return funobj == fun && (fun->flags & JSFUN_LAMBDA) && !funobj->getParent();
}
struct js_ArgsPrivateNative;
inline js_ArgsPrivateNative *

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -58,30 +58,7 @@ JS_BEGIN_EXTERN_C
*/
#define JSTRACE_LIMIT 4
/*
* We use the trace kinds as the types for all GC things except external
* strings.
*/
#define GCX_OBJECT JSTRACE_OBJECT /* JSObject */
#define GCX_DOUBLE JSTRACE_DOUBLE /* jsdouble */
#define GCX_STRING JSTRACE_STRING /* JSString */
#define GCX_XML JSTRACE_XML /* JSXML */
#define GCX_EXTERNAL_STRING JSTRACE_LIMIT /* JSString with external
chars */
/*
* The number of defined GC types and the maximum limit for the number of
* possible GC types.
*/
#define GCX_NTYPES (GCX_EXTERNAL_STRING + 8)
#define GCX_LIMIT_LOG2 4 /* type index bits */
#define GCX_LIMIT JS_BIT(GCX_LIMIT_LOG2)
/* GC flag definitions, must fit in 8 bits (type index goes in the low bits). */
#define GCF_TYPEMASK JS_BITMASK(GCX_LIMIT_LOG2)
#define GCF_MARK JS_BIT(GCX_LIMIT_LOG2)
#define GCF_FINAL JS_BIT(GCX_LIMIT_LOG2 + 1)
#define GCF_LOCKSHIFT (GCX_LIMIT_LOG2 + 2) /* lock bit shift */
#define GCF_LOCK JS_BIT(GCF_LOCKSHIFT) /* lock request bit in API */
const uintN JS_EXTERNAL_STRING_LIMIT = 8;
/*
* Get the type of the external string or -1 if the string was not created
@ -149,19 +126,6 @@ typedef struct JSPtrTable {
extern JSBool
js_RegisterCloseableIterator(JSContext *cx, JSObject *obj);
/*
* The private JSGCThing struct, which describes a gcFreeList element.
*/
struct JSGCThing {
JSGCThing *next;
uint8 *flagp;
};
#define GC_NBYTES_MAX (10 * sizeof(JSGCThing))
#define GC_NUM_FREELISTS (GC_NBYTES_MAX / sizeof(JSGCThing))
#define GC_FREELIST_NBYTES(i) (((i) + 1) * sizeof(JSGCThing))
#define GC_FREELIST_INDEX(n) (((n) / sizeof(JSGCThing)) - 1)
/*
* Allocates a new GC thing of the given size. After a successful allocation
* the caller must fully initialize the thing before calling any function that
@ -169,16 +133,19 @@ struct JSGCThing {
* values stored in the partially initialized thing.
*/
extern JSObject*
js_NewGCObject(JSContext *cx, uintN flags);
js_NewGCObject(JSContext *cx);
extern JSString*
js_NewGCString(JSContext *cx, uintN flags);
js_NewGCString(JSContext *cx);
extern JSString*
js_NewGCExternalString(JSContext *cx, uintN type);
extern JSFunction*
js_NewGCFunction(JSContext *cx, uintN flags);
js_NewGCFunction(JSContext *cx);
extern JSXML*
js_NewGCXML(JSContext *cx, uintN flags);
js_NewGCXML(JSContext *cx);
/*
* Allocate a new double jsval and store the result in *vp. vp must be a root.
@ -284,6 +251,36 @@ typedef enum JSGCInvocationKind {
extern void
js_GC(JSContext *cx, JSGCInvocationKind gckind);
/*
* The kind of GC thing with a finalizer. The external strings follow the
* ordinary string to simplify js_GetExternalStringGCType.
*/
enum JSFinalizeGCThingKind {
FINALIZE_OBJECT,
FINALIZE_FUNCTION,
#if JS_HAS_XML_SUPPORT
FINALIZE_XML,
#endif
FINALIZE_STRING,
FINALIZE_EXTERNAL_STRING0,
FINALIZE_EXTERNAL_STRING1,
FINALIZE_EXTERNAL_STRING2,
FINALIZE_EXTERNAL_STRING3,
FINALIZE_EXTERNAL_STRING4,
FINALIZE_EXTERNAL_STRING5,
FINALIZE_EXTERNAL_STRING6,
FINALIZE_EXTERNAL_STRING7,
FINALIZE_EXTERNAL_STRING_LAST = FINALIZE_EXTERNAL_STRING7,
FINALIZE_LIMIT
};
static inline bool
IsFinalizableStringKind(unsigned thingKind)
{
return unsigned(FINALIZE_STRING) <= thingKind &&
thingKind <= unsigned(FINALIZE_EXTERNAL_STRING_LAST);
}
typedef struct JSGCArenaInfo JSGCArenaInfo;
typedef struct JSGCArenaList JSGCArenaList;
typedef struct JSGCChunkInfo JSGCChunkInfo;
@ -292,6 +289,7 @@ struct JSGCArenaList {
JSGCArenaInfo *last; /* last allocated GC arena */
uint32 lastCount; /* number of allocated things in the last
arena */
uint32 thingKind; /* one of JSFinalizeGCThingKind */
uint32 thingSize; /* size of things to allocate on this list
*/
JSGCThing *freeList; /* list of free GC things */
@ -315,7 +313,13 @@ js_DestroyScriptsToGC(JSContext *cx, JSThreadData *data);
struct JSWeakRoots {
/* Most recently created things by type, members of the GC's root set. */
void *newborn[GCX_NTYPES];
JSObject *newbornObject;
jsdouble *newbornDouble;
JSString *newbornString;
#if JS_HAS_XML_SUPPORT
JSXML *newbornXML;
#endif
JSString *newbornExternalString[JS_EXTERNAL_STRING_LIMIT];
/* Atom root for the last-looked-up atom on this context. */
jsval lastAtom;
@ -348,15 +352,8 @@ class JSFreePointerListTask : public JSBackgroundTask {
};
#endif
/*
* Free the chars held by str when it is finalized by the GC. When type is
* less then zero, it denotes an internal string. Otherwise it denotes the
* type of the external string allocated with JS_NewExternalString.
*
* This function always needs rt but can live with null cx.
*/
extern void
js_FinalizeStringRT(JSRuntime *rt, JSString *str, intN type, JSContext *cx);
js_FinalizeStringRT(JSRuntime *rt, JSString *str);
#ifdef DEBUG_notme
#define JS_GCMETER 1
@ -405,7 +402,7 @@ typedef struct JSGCStats {
uint32 closelater; /* number of close hooks scheduled to run */
uint32 maxcloselater; /* max number of close hooks scheduled to run */
JSGCArenaStats arenaStats[GC_NUM_FREELISTS];
JSGCArenaStats arenaStats[FINALIZE_LIST_LIMIT];
JSGCArenaStats doubleArenaStats;
} JSGCStats;

Просмотреть файл

@ -2787,12 +2787,6 @@ js_Interpret(JSContext *cx)
#endif /* !JS_THREADED_INTERP */
#ifdef JS_TRACER
/* We cannot reenter the interpreter while recording. */
if (TRACE_RECORDER(cx))
js_AbortRecording(cx, "attempt to reenter interpreter while recording");
#endif
/* Check for too deep of a native thread stack. */
JS_CHECK_RECURSION(cx, return JS_FALSE);
@ -2854,14 +2848,8 @@ js_Interpret(JSContext *cx)
#define MONITOR_BRANCH_TRACEVIS
#endif
#define MONITOR_BRANCH() \
#define RESTORE_INTERP_VARS() \
JS_BEGIN_MACRO \
if (TRACING_ENABLED(cx)) { \
if (js_MonitorLoopEdge(cx, inlineCallCount)) { \
JS_ASSERT(TRACE_RECORDER(cx)); \
MONITOR_BRANCH_TRACEVIS; \
ENABLE_INTERRUPTS(); \
} \
fp = cx->fp; \
script = fp->script; \
atoms = FrameAtomBase(cx, fp); \
@ -2869,12 +2857,23 @@ js_Interpret(JSContext *cx)
JS_ASSERT(fp->regs == &regs); \
if (cx->throwing) \
goto error; \
JS_END_MACRO
#define MONITOR_BRANCH(reason) \
JS_BEGIN_MACRO \
if (TRACING_ENABLED(cx)) { \
if (js_MonitorLoopEdge(cx, inlineCallCount, reason)) { \
JS_ASSERT(TRACE_RECORDER(cx)); \
MONITOR_BRANCH_TRACEVIS; \
ENABLE_INTERRUPTS(); \
} \
RESTORE_INTERP_VARS(); \
} \
JS_END_MACRO
#else /* !JS_TRACER */
#define MONITOR_BRANCH() ((void) 0)
#define MONITOR_BRANCH(reason) ((void) 0)
#endif /* !JS_TRACER */
@ -2900,13 +2899,13 @@ js_Interpret(JSContext *cx)
CHECK_BRANCH(); \
if (op == JSOP_NOP) { \
if (TRACE_RECORDER(cx)) { \
MONITOR_BRANCH(); \
MONITOR_BRANCH(Monitor_Branch); \
op = (JSOp) *regs.pc; \
} else { \
op = (JSOp) *++regs.pc; \
} \
} else if (op == JSOP_TRACE) { \
MONITOR_BRANCH(); \
MONITOR_BRANCH(Monitor_Branch); \
op = (JSOp) *regs.pc; \
} \
} \
@ -2989,6 +2988,15 @@ js_Interpret(JSContext *cx)
}
#endif /* JS_HAS_GENERATORS */
#ifdef JS_TRACER
/*
* We cannot reenter the interpreter while recording; wait to abort until
* after cx->fp->regs is set.
*/
if (TRACE_RECORDER(cx))
js_AbortRecording(cx, "attempt to reenter interpreter while recording");
#endif
/*
* It is important that "op" be initialized before calling DO_OP because
* it is possible for "op" to be specially assigned during the normal

Просмотреть файл

@ -762,10 +762,6 @@ js_FinishRuntimeNumberState(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
js_UnlockGCThingRT(rt, rt->jsNaN);
js_UnlockGCThingRT(rt, rt->jsNegativeInfinity);
js_UnlockGCThingRT(rt, rt->jsPositiveInfinity);
rt->jsNaN = NULL;
rt->jsNegativeInfinity = NULL;
rt->jsPositiveInfinity = NULL;

Просмотреть файл

@ -2216,14 +2216,14 @@ js_NewObjectWithGivenProto(JSContext *cx, JSClass *clasp, JSObject *proto,
*/
JSObject* obj;
if (clasp == &js_FunctionClass && !objectSize) {
obj = (JSObject*) js_NewGCFunction(cx, GCX_OBJECT);
obj = (JSObject*) js_NewGCFunction(cx);
#ifdef DEBUG
memset((uint8 *) obj + sizeof(JSObject), JS_FREE_PATTERN,
sizeof(JSFunction) - sizeof(JSObject));
#endif
} else {
JS_ASSERT(!objectSize || objectSize == sizeof(JSObject));
obj = js_NewGCObject(cx, GCX_OBJECT);
obj = js_NewGCObject(cx);
}
if (!obj)
goto out;
@ -2248,7 +2248,7 @@ js_NewObjectWithGivenProto(JSContext *cx, JSClass *clasp, JSObject *proto,
}
/* Check that the newborn root still holds the object. */
JS_ASSERT_IF(!cx->localRootStack, cx->weakRoots.newborn[GCX_OBJECT] == obj);
JS_ASSERT_IF(!cx->localRootStack, cx->weakRoots.newbornObject == obj);
/*
* Do not call debug hooks on trace, because we might be in a non-_FAIL
@ -2260,7 +2260,7 @@ js_NewObjectWithGivenProto(JSContext *cx, JSClass *clasp, JSObject *proto,
cx->debugHooks->objectHook(cx, obj, JS_TRUE,
cx->debugHooks->objectHookData);
JS_UNKEEP_ATOMS(cx->runtime);
cx->weakRoots.newborn[GCX_OBJECT] = obj;
cx->weakRoots.newbornObject = obj;
}
out:
@ -2325,7 +2325,7 @@ NewNativeObject(JSContext* cx, JSClass* clasp, JSObject* proto,
JSObject *parent, jsval privateSlotValue)
{
JS_ASSERT(JS_ON_TRACE(cx));
JSObject* obj = js_NewGCObject(cx, GCX_OBJECT);
JSObject* obj = js_NewGCObject(cx);
if (!obj)
return NULL;
@ -2664,7 +2664,7 @@ js_CloneBlockObject(JSContext *cx, JSObject *proto, JSStackFrame *fp)
JS_ASSERT(!OBJ_IS_CLONED_BLOCK(proto));
JS_ASSERT(STOBJ_GET_CLASS(proto) == &js_BlockClass);
JSObject *clone = js_NewGCObject(cx, GCX_OBJECT);
JSObject *clone = js_NewGCObject(cx);
if (!clone)
return NULL;
@ -3264,7 +3264,7 @@ js_NewNativeObject(JSContext *cx, JSClass *clasp, JSObject *proto,
JS_ASSERT(proto->map->ops == &js_ObjectOps);
JS_ASSERT(OBJ_GET_CLASS(cx, proto) == clasp);
JSObject* obj = js_NewGCObject(cx, GCX_OBJECT);
JSObject* obj = js_NewGCObject(cx);
if (!obj)
return NULL;
@ -3852,11 +3852,11 @@ js_DefineNativeProperty(JSContext *cx, JSObject *obj, jsid id, jsval value,
}
}
added = !scope->lookup(id);
sprop = scope->add(cx, id, getter, setter, SPROP_INVALID_SLOT, attrs,
flags, shortid);
if (!sprop)
goto error;
added = true;
}
/* Store value before calling addProperty, in case the latter GC's. */
@ -5513,7 +5513,7 @@ js_GetClassPrototype(JSContext *cx, JSObject *scope, jsid id,
* instance that delegates to this object, or just query the
* prototype for its class.
*/
cx->weakRoots.newborn[GCX_OBJECT] = JSVAL_TO_GCTHING(v);
cx->weakRoots.newbornObject = JSVAL_TO_OBJECT(v);
}
}
*protop = JSVAL_IS_OBJECT(v) ? JSVAL_TO_OBJECT(v) : NULL;

Просмотреть файл

@ -4099,6 +4099,9 @@ Decompile(SprintStack *ss, jsbytecode *pc, intN nb, JSOp nextop)
* compiler optimizes that to |if (true)|.
*/
pc2 = pc + len;
op = JSOp(*pc2);
if (op == JSOP_TRACE || op == JSOP_NOP)
pc2 += JSOP_NOP_LENGTH;
LOCAL_ASSERT(pc2 < endpc ||
endpc < outer->code + outer->length);
LOCAL_ASSERT(ss2.top == 1);

Просмотреть файл

@ -76,21 +76,23 @@
#ifdef JS_TRACER
TraceRecorder* tr = TRACE_RECORDER(cx);
if (tr) {
JSRecordingStatus status = TraceRecorder::monitorRecording(cx, tr, op);
AbortableRecordingStatus status = TraceRecorder::monitorRecording(cx, tr, op);
switch (status) {
case JSRS_CONTINUE:
case ARECORD_CONTINUE:
moreInterrupts = true;
break;
case JSRS_IMACRO:
case ARECORD_IMACRO:
atoms = COMMON_ATOMS_START(&rt->atomState);
op = JSOp(*regs.pc);
DO_OP(); /* keep interrupting for op. */
break;
case JSRS_ERROR:
case ARECORD_ERROR:
// The code at 'error:' aborts the recording.
goto error;
case JSRS_STOP:
case ARECORD_ABORTED:
break;
case ARECORD_STOP:
/* A 'stop' error should have already aborted recording. */
default:
JS_NOT_REACHED("Bad recording status");
}
@ -168,13 +170,13 @@
goto error;
/*
* We must ensure that different "with" blocks have different
* stack depth associated with them. This allows the try handler
* search to properly recover the scope chain. Thus we must keep
* the stack at least at the current level.
* We must ensure that different "with" blocks have different stack depth
* associated with them. This allows the try handler search to properly
* recover the scope chain. Thus we must keep the stack at least at the
* current level.
*
* We set sp[-1] to the current "with" object to help asserting
* the enter/leave balance in [leavewith].
* We set sp[-1] to the current "with" object to help asserting the
* enter/leave balance in [leavewith].
*/
regs.sp[-1] = OBJECT_TO_JSVAL(fp->scopeChain);
END_CASE(JSOP_ENTERWITH)
@ -192,16 +194,16 @@
BEGIN_CASE(JSOP_RETRVAL) /* fp->rval already set */
BEGIN_CASE(JSOP_STOP)
/*
* When the inlined frame exits with an exception or an error, ok
* will be false after the inline_return label.
* When the inlined frame exits with an exception or an error, ok will be
* false after the inline_return label.
*/
ASSERT_NOT_THROWING(cx);
CHECK_BRANCH();
if (fp->imacpc) {
/*
* If we are at the end of an imacro, return to its caller in
* the current frame.
* If we are at the end of an imacro, return to its caller in the
* current frame.
*/
JS_ASSERT(op == JSOP_STOP);
@ -245,8 +247,8 @@
hook = cx->debugHooks->callHook;
if (hook) {
/*
* Do not pass &ok directly as exposing the address
* inhibits optimizations and uninitialised warnings.
* Do not pass &ok directly as exposing the address inhibits
* optimizations and uninitialised warnings.
*/
status = ok;
hook(cx, fp, JS_FALSE, &status, hookData);
@ -257,9 +259,9 @@
/*
* If fp has a call object, sync values and clear the back-
* pointer. This can happen for a lightweight function if it
* calls eval unexpectedly (in a way that is hidden from the
* compiler). See bug 325540.
* pointer. This can happen for a lightweight function if it calls eval
* unexpectedly (in a way that is hidden from the compiler). See bug
* 325540.
*/
fp->putActivationObjects(cx);
@ -279,9 +281,8 @@
}
/*
* If inline-constructing, replace primitive rval with the new
* object passed in via |this|, and instrument this constructor
* invocation
* If inline-constructing, replace primitive rval with the new object
* passed in via |this|, and instrument this constructor invocation.
*/
if (fp->flags & JSFRAME_CONSTRUCTING) {
if (JSVAL_IS_PRIMITIVE(fp->rval))
@ -296,6 +297,8 @@
regs.sp -= 1 + (size_t) ifp->frame.argc;
regs.sp[-1] = fp->rval;
bool recursive = fp->script == fp->down->script;
/* Restore cx->fp and release the inline frame's space. */
cx->fp = fp = fp->down;
JS_ASSERT(fp->regs == &ifp->callerRegs);
@ -309,10 +312,31 @@
/* Resume execution in the calling frame. */
inlineCallCount--;
if (JS_LIKELY(ok)) {
TRACE_0(LeaveFrame);
JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, script, regs.pc)].length
== JSOP_CALL_LENGTH);
#ifdef DEBUG
JSOp traceOp = js_GetOpcode(cx, script, regs.pc +
JSOP_CALL_LENGTH);
JS_ASSERT_IF(*regs.pc == JSOP_CALL && !fp->imacpc,
traceOp == JSOP_TRACE || traceOp == JSOP_NOP);
#endif
TRACE_0(LeaveFrame);
if (!TRACE_RECORDER(cx) && recursive) {
if (*(regs.pc + JSOP_CALL_LENGTH) == JSOP_TRACE) {
regs.pc += JSOP_CALL_LENGTH;
MONITOR_BRANCH(Monitor_LeaveFrame);
op = (JSOp)*regs.pc;
DO_OP();
}
}
if (*(regs.pc + JSOP_CALL_LENGTH) == JSOP_TRACE ||
*(regs.pc + JSOP_CALL_LENGTH) == JSOP_NOP) {
JS_STATIC_ASSERT(JSOP_TRACE_LENGTH == JSOP_NOP_LENGTH);
regs.pc += JSOP_CALL_LENGTH;
len = JSOP_TRACE_LENGTH;
} else {
len = JSOP_CALL_LENGTH;
}
DO_NEXT_OP(len);
}
goto error;
@ -528,10 +552,10 @@
BEGIN_CASE(JSOP_FORELEM)
/*
* JSOP_FORELEM simply dups the property identifier at top of stack
* and lets the subsequent JSOP_ENUMELEM opcode sequence handle the
* left-hand side expression evaluation and assignment. This opcode
* exists solely to help the decompiler.
* JSOP_FORELEM simply dups the property identifier at top of stack and
* lets the subsequent JSOP_ENUMELEM opcode sequence handle the left-hand
* side expression evaluation and assignment. This opcode exists solely to
* help the decompiler.
*/
JS_ASSERT(regs.sp - 2 >= StackBase(fp));
rval = FETCH_OPND(-1);
@ -680,20 +704,20 @@
JSPropCacheEntry *entry;
/*
* We can skip the property lookup for the global object. If
* the property does not exist anywhere on the scope chain,
* JSOP_SETNAME adds the property to the global.
* We can skip the property lookup for the global object. If the
* property does not exist anywhere on the scope chain, JSOP_SETNAME
* adds the property to the global.
*
* As a consequence of this optimization for the global object
* we run its JSRESOLVE_ASSIGNING-tolerant resolve hooks only
* in JSOP_SETNAME, after the interpreter evaluates the right-
* hand-side of the assignment, and not here.
* As a consequence of this optimization for the global object we run
* its JSRESOLVE_ASSIGNING-tolerant resolve hooks only in JSOP_SETNAME,
* after the interpreter evaluates the right- hand-side of the
* assignment, and not here.
*
* This should be transparent to the hooks because the script,
* instead of name = rhs, could have used global.name = rhs
* given a global object reference, which also calls the hooks
* only after evaluating the rhs. We desire such resolve hook
* equivalence between the two forms.
* This should be transparent to the hooks because the script, instead
* of name = rhs, could have used global.name = rhs given a global
* object reference, which also calls the hooks only after evaluating
* the rhs. We desire such resolve hook equivalence between the two
* forms.
*/
obj = fp->scopeChain;
if (!OBJ_GET_PARENT(cx, obj))
@ -1007,9 +1031,9 @@
/*
* This instruction can be executed in three contexts. (1) is normal
* execution. (2) is while recording, during an imacro 'imacop'.
* (3) is during a failed recording or when trace execution aborts
* during a recorded imacro.
* execution. (2) is while recording, during an imacro 'imacop'. (3) is
* during a failed recording or when trace execution aborts during a
* recorded imacro.
* 1. !imacro : N args on stack, pc is regs.pc
* 2. imacro && recording : N args on stack, pc is fp->imacpc
* 3. imacro && !recording : N+2 args on stack, pc is fp->imacpc
@ -1207,8 +1231,8 @@
BEGIN_CASE(JSOP_ELEMINC)
BEGIN_CASE(JSOP_ELEMDEC)
/*
* Delay fetching of id until we have the object to ensure
* the proper evaluation order. See bug 372331.
* Delay fetching of id until we have the object to ensure the proper
* evaluation order. See bug 372331.
*/
id = 0;
i = -2;
@ -1549,9 +1573,9 @@
jsuint length;
/*
* We know that the array is created with only its 'length'
* private data in a fixed slot at JSSLOT_ARRAY_LENGTH. See
* also JSOP_ARRAYPUSH, far below.
* We know that the array is created with only its 'length' private
* data in a fixed slot at JSSLOT_ARRAY_LENGTH. See also
* JSOP_ARRAYPUSH, far below.
*/
length = obj->fslots[JSSLOT_ARRAY_LENGTH];
if (length <= JSVAL_INT_MAX) {
@ -1686,23 +1710,23 @@
uint32 kshape = OBJ_SHAPE(obj);
/*
* Open-code PROPERTY_CACHE_TEST, specializing for two
* important set-property cases. First:
* Open-code PROPERTY_CACHE_TEST, specializing for two important
* set-property cases. First:
*
* function f(a, b, c) {
* var o = {p:a, q:b, r:c};
* return o;
* }
*
* or similar real-world cases, which evolve a newborn
* native object predicatably through some bounded number
* of property additions. And second:
* or similar real-world cases, which evolve a newborn native
* object predicatably through some bounded number of property
* additions. And second:
*
* o.p = x;
*
* in a frequently executed method or loop body, where p
* will (possibly after the first iteration) always exist
* in native object o.
* in a frequently executed method or loop body, where p will
* (possibly after the first iteration) always exist in native
* object o.
*/
entry = &cache->table[PROPERTY_CACHE_HASH_PC(regs.pc, kshape)];
PCMETER(cache->pctestentry = entry);
@ -1721,11 +1745,10 @@
JS_ASSERT(!scope->sealed());
/*
* Fastest path: check whether the cached sprop is
* already in scope and call NATIVE_SET and break
* to get out of the do-while(0). But we can call
* NATIVE_SET only if obj owns scope or sprop is
* shared.
* Fastest path: check whether the cached sprop is already
* in scope and call NATIVE_SET and break to get out of the
* do-while(0). But we can call NATIVE_SET only if obj owns
* scope or sprop is shared.
*/
bool checkForAdd;
if (sprop->attrs & JSPROP_SHARED) {
@ -1764,15 +1787,14 @@
SPROP_HAS_STUB_SETTER(sprop) &&
(slot = sprop->slot) == scope->freeslot) {
/*
* Fast path: adding a plain old property that
* was once at the frontier of the property
* tree, whose slot is next to claim among the
* allocated slots in obj, where scope->table
* has not been created yet.
* Fast path: adding a plain old property that was once
* at the frontier of the property tree, whose slot is
* next to claim among the allocated slots in obj,
* where scope->table has not been created yet.
*
* We may want to remove hazard conditions
* above and inline compensation code here,
* depending on real-world workloads.
* We may want to remove hazard conditions above and
* inline compensation code here, depending on
* real-world workloads.
*/
JS_ASSERT(!(obj->getClass()->flags &
JSCLASS_SHARE_ALL_PROPERTIES));
@ -1781,9 +1803,9 @@
PCMETER(cache->addpchits++);
/*
* Beware classes such as Function that use
* the reserveSlots hook to allocate a number
* of reserved slots that may vary with obj.
* Beware classes such as Function that use the
* reserveSlots hook to allocate a number of reserved
* slots that may vary with obj.
*/
if (slot < STOBJ_NSLOTS(obj) &&
!OBJ_GET_CLASS(cx, obj)->reserveSlots) {
@ -1796,15 +1818,14 @@
}
/*
* If this obj's number of reserved slots
* differed, or if something created a hash
* table for scope, we must pay the price of
* JSScope::add.
* If this obj's number of reserved slots differed, or
* if something created a hash table for scope, we must
* pay the price of JSScope::add.
*
* If slot does not match the cached sprop's
* slot, update the cache entry in the hope
* that obj and other instances with the same
* number of reserved slots are now "hot".
* If slot does not match the cached sprop's slot,
* update the cache entry in the hope that obj and
* other instances with the same number of reserved
* slots are now "hot".
*/
if (slot != sprop->slot || scope->table) {
JSScopeProperty *sprop2 =
@ -1830,20 +1851,19 @@
}
/*
* No method change check here because here we
* are adding a new property, not updating an
* existing slot's value that might contain a
* method of a branded scope.
* No method change check here because here we are
* adding a new property, not updating an existing
* slot's value that might contain a method of a
* branded scope.
*/
TRACE_2(SetPropHit, entry, sprop);
LOCKED_OBJ_SET_SLOT(obj, slot, rval);
JS_UNLOCK_SCOPE(cx, scope);
/*
* Purge the property cache of the id we may
* have just shadowed in obj's scope and proto
* chains. We do this after unlocking obj's
* scope to avoid lock nesting.
* Purge the property cache of the id we may have just
* shadowed in obj's scope and proto chains. We do this
* after unlocking obj's scope to avoid lock nesting.
*/
js_PurgeScopeChain(cx, obj, sprop->id);
break;
@ -1998,9 +2018,8 @@
JS_ASSERT(vp >= StackBase(fp));
/*
* Assign lval, obj, and fun exactly as the code at inline_call:
* expects to find them, to avoid nesting a js_Interpret call via
* js_InvokeConstructor.
* Assign lval, obj, and fun exactly as the code at inline_call: expects to
* find them, to avoid nesting a js_Interpret call via js_InvokeConstructor.
*/
lval = *vp;
if (VALUE_IS_FUNCTION(cx, lval)) {
@ -2107,8 +2126,8 @@
}
/*
* Move args if the missing ones overflow arena a, then
* push undefined for the missing args.
* Move args if the missing ones overflow arena a, then push
* undefined for the missing args.
*/
if (missing) {
memcpy(newsp, vp, (2 + argc) * sizeof(jsval));
@ -2189,8 +2208,6 @@
newifp->hookData = NULL;
}
TRACE_0(EnterFrame);
inlineCallCount++;
JS_RUNTIME_METER(rt, inlineCalls);
@ -2204,6 +2221,23 @@
jsdtrace_function_args(cx, fp, fun, fp->argc, fp->argv);
#endif
#ifdef JS_TRACER
if (TRACE_RECORDER(cx)) {
TRACE_1(EnterFrame, inlineCallCount);
RESTORE_INTERP_VARS();
} else if (fp->script == fp->down->script &&
*fp->down->regs->pc == JSOP_CALL) {
#ifdef DEBUG
JSOp traceOp = js_GetOpcode(cx, fp->script,
fp->regs->pc);
JS_ASSERT_IF(!fp->imacpc, traceOp == JSOP_TRACE ||
traceOp == JSOP_NOP);
#endif
if (*fp->regs->pc == JSOP_TRACE)
MONITOR_BRANCH(Monitor_EnterFrame);
}
#endif
/* Load first op and dispatch it (safe since JSOP_STOP). */
op = (JSOp) *regs.pc;
DO_OP();
@ -2244,14 +2278,16 @@
regs.sp = vp + 1;
if (!ok) {
/*
* If we are executing the JSOP_NEXTITER imacro and a Stopiteration
* exception is raised, transform it into a JSVAL_HOLE return value.
* The tracer generates equivalent code by calling CatchStopIteration_tn.
* If we are executing the JSOP_NEXTITER imacro and a
* Stopiteration exception is raised, transform it into a
* JSVAL_HOLE return value. The tracer generates equivalent
* code by calling CatchStopIteration_tn.
*/
if (fp->imacpc && *fp->imacpc == JSOP_NEXTITER &&
cx->throwing && js_ValueIsStopIteration(cx->exception)) {
// pc may point to JSOP_DUP here due to bug 474854.
JS_ASSERT(*regs.pc == JSOP_CALL || *regs.pc == JSOP_DUP);
JS_ASSERT(*regs.pc == JSOP_CALL ||
*regs.pc == JSOP_DUP);
cx->throwing = JS_FALSE;
cx->exception = JSVAL_VOID;
regs.sp[-1] = JSVAL_HOLE;
@ -2375,8 +2411,8 @@
BEGIN_CASE(JSOP_INDEXBASE)
/*
* Here atoms can exceed script->atomMap.length as we use atoms
* as a segment register for object literals as well.
* Here atoms can exceed script->atomMap.length as we use atoms as a
* segment register for object literals as well.
*/
atoms += GET_INDEXBASE(regs.pc);
END_CASE(JSOP_INDEXBASE)
@ -2412,28 +2448,28 @@
JSObject *funobj;
/*
* Push a regexp object for the atom mapped by the bytecode at pc,
* cloning the literal's regexp object if necessary, to simulate in
* the pre-compile/execute-later case what ECMA specifies for the
* compile-and-go case: that scanning each regexp literal creates
* a single corresponding RegExp object.
* Push a regexp object for the atom mapped by the bytecode at pc, cloning
* the literal's regexp object if necessary, to simulate in the
* pre-compile/execute-later case what ECMA specifies for the
* compile-and-go case: that scanning each regexp literal creates a single
* corresponding RegExp object.
*
* To support pre-compilation transparently, we must handle the
* case where a regexp object literal is used in a different global
* at execution time from the global with which it was scanned at
* compile time. We do this by re-wrapping the JSRegExp private
* data struct with a cloned object having the right prototype and
* parent, and having its own lastIndex property value storage.
* To support pre-compilation transparently, we must handle the case where
* a regexp object literal is used in a different global at execution time
* from the global with which it was scanned at compile time. We do this
* by re-wrapping the JSRegExp private data struct with a cloned object
* having the right prototype and parent, and having its own lastIndex
* property value storage.
*
* Unlike JSOP_DEFFUN and other prolog bytecodes that may clone
* literal objects, we don't want to pay a script prolog execution
* price for all regexp literals in a script (many may not be used
* by a particular execution of that script, depending on control
* flow), so we initialize lazily here.
* Unlike JSOP_DEFFUN and other prolog bytecodes that may clone literal
* objects, we don't want to pay a script prolog execution price for all
* regexp literals in a script (many may not be used by a particular
* execution of that script, depending on control flow), so we initialize
* lazily here.
*
* XXX This code is specific to regular expression objects. If we
* need a similar op for other kinds of object literals, we should
* push cloning down under JSObjectOps and reuse code here.
* XXX This code is specific to regular expression objects. If we need a
* similar op for other kinds of object literals, we should push cloning
* down under JSObjectOps and reuse code here.
*/
index = GET_FULL_INDEX(0);
JS_ASSERT(index < script->regexps()->length);
@ -2441,10 +2477,10 @@
slot = index;
if (fp->fun) {
/*
* We're in function code, not global or eval code (in eval
* code, JSOP_REGEXP is never emitted). The cloned funobj
* contains script->regexps()->length reserved slots
* for the cloned regexps; see fun_reserveSlots, jsfun.c.
* We're in function code, not global or eval code (in eval code,
* JSOP_REGEXP is never emitted). The cloned funobj contains
* script->regexps()->length reserved slots for the cloned regexps; see
* fun_reserveSlots, jsfun.c.
*/
funobj = JSVAL_TO_OBJECT(fp->argv[-2]);
slot += JSCLASS_RESERVED_SLOTS(&js_FunctionClass);
@ -2456,11 +2492,11 @@
rval = JSVAL_NULL;
} else {
/*
* We're in global code. The code generator reserved a slot
* for the regexp among script->nfixed slots. All such slots
* are initialized to null, not void, for faster testing in
* JSOP_*GVAR cases. To simplify index calculations we count
* regexps in the reverse order down from script->nslots - 1.
* We're in global code. The code generator reserved a slot for the
* regexp among script->nfixed slots. All such slots are initialized to
* null, not void, for faster testing in JSOP_*GVAR cases. To simplify
* index calculations we count regexps in the reverse order down from
* script->nslots - 1.
*/
JS_ASSERT(slot < script->nfixed);
slot = script->nfixed - slot - 1;
@ -2477,28 +2513,28 @@
obj2 = parent;
/*
* If obj's parent is not obj2, we must clone obj so that it
* has the right parent, and therefore, the right prototype.
* If obj's parent is not obj2, we must clone obj so that it has the
* right parent, and therefore, the right prototype.
*
* Yes, this means we assume that the correct RegExp.prototype
* to which regexp instances (including literals) delegate can
* be distinguished solely by the instance's parent, which was
* set to the parent of the RegExp constructor function object
* when the instance was created. In other words,
* Yes, this means we assume that the correct RegExp.prototype to which
* regexp instances (including literals) delegate can be distinguished
* solely by the instance's parent, which was set to the parent of the
* RegExp constructor function object when the instance was created.
* In other words,
*
* (/x/.__parent__ == RegExp.__parent__) implies
* (/x/.__proto__ == RegExp.prototype)
*
* (unless you assign a different object to RegExp.prototype
* at runtime, in which case, ECMA doesn't specify operation,
* and you get what you deserve).
* (unless you assign a different object to RegExp.prototype at
* runtime, in which case, ECMA doesn't specify operation, and you get
* what you deserve).
*
* This same coupling between instance parent and constructor
* parent turns up everywhere (see jsobj.c's FindClassObject,
* js_ConstructObject, and js_NewObject). It's fundamental to
* the design of the language when you consider multiple global
* objects and separate compilation and execution, even though
* it is not specified fully in ECMA.
* This same coupling between instance parent and constructor parent
* turns up everywhere (see jsobj.c's FindClassObject,
* js_ConstructObject, and js_NewObject). It's fundamental to the
* design of the language when you consider multiple global objects and
* separate compilation and execution, even though it is not specified
* fully in ECMA.
*/
obj = script->getRegExp(index);
if (OBJ_GET_PARENT(cx, obj) != obj2) {
@ -2546,9 +2582,9 @@
len = GET_JUMP_OFFSET(pc2);
/*
* ECMAv2+ forbids conversion of discriminant, so we will skip to
* the default case if the discriminant isn't already an int jsval.
* (This opcode is emitted only for dense jsint-domain switches.)
* ECMAv2+ forbids conversion of discriminant, so we will skip to the
* default case if the discriminant isn't already an int jsval. (This
* opcode is emitted only for dense jsint-domain switches.)
*/
rval = POP_OPND();
if (JSVAL_IS_INT(rval)) {
@ -2579,9 +2615,9 @@
len = GET_JUMPX_OFFSET(pc2);
/*
* ECMAv2+ forbids conversion of discriminant, so we will skip to
* the default case if the discriminant isn't already an int jsval.
* (This opcode is emitted only for dense jsint-domain switches.)
* ECMAv2+ forbids conversion of discriminant, so we will skip to the
* default case if the discriminant isn't already an int jsval. (This
* opcode is emitted only for dense jsint-domain switches.)
*/
rval = POP_OPND();
if (JSVAL_IS_INT(rval)) {
@ -2616,8 +2652,8 @@
do_lookup_switch:
/*
* JSOP_LOOKUPSWITCH and JSOP_LOOKUPSWITCHX are never used if
* any atom index in it would exceed 64K limit.
* JSOP_LOOKUPSWITCH and JSOP_LOOKUPSWITCHX are never used if any atom
* index in it would exceed 64K limit.
*/
JS_ASSERT(!fp->imacpc);
JS_ASSERT(atoms == script->atomMap.vector);
@ -2650,6 +2686,7 @@
break; \
} \
}
if (JSVAL_IS_STRING(lval)) {
str = JSVAL_TO_STRING(lval);
SEARCH_PAIRS(
@ -2692,7 +2729,7 @@
cx->throwing = JS_TRUE;
cx->exception = rval;
goto error;
default:;
default:
break;
}
JS_ASSERT(status == JSTRAP_CONTINUE);
@ -2955,11 +2992,10 @@
uint32 old;
/*
* A top-level function defined in Global or Eval code (see
* ECMA-262 Ed. 3), or else a SpiderMonkey extension: a named
* function statement in a compound statement (not at the top
* statement level of global code, or at the top level of a
* function body).
* A top-level function defined in Global or Eval code (see ECMA-262
* Ed. 3), or else a SpiderMonkey extension: a named function statement in
* a compound statement (not at the top statement level of global code, or
* at the top level of a function body).
*/
LOAD_FUNCTION(0);
obj = FUN_OBJECT(fun);
@ -2967,8 +3003,8 @@
if (FUN_NULL_CLOSURE(fun)) {
/*
* Even a null closure needs a parent for principals finding.
* FIXME: bug 476950, although debugger users may also demand
* some kind of scope link for debugger-assisted eval-in-frame.
* FIXME: bug 476950, although debugger users may also demand some kind
* of scope link for debugger-assisted eval-in-frame.
*/
obj2 = fp->scopeChain;
} else {
@ -2988,13 +3024,13 @@
}
/*
* If static link is not current scope, clone fun's object to link
* to the current scope via parent. We do this to enable sharing of
* compiled functions among multiple equivalent scopes, amortizing
* the cost of compilation over a number of executions. Examples
* include XUL scripts and event handlers shared among Firefox or
* other Mozilla app chrome windows, and user-defined JS functions
* precompiled and then shared among requests in server-side JS.
* If static link is not current scope, clone fun's object to link to the
* current scope via parent. We do this to enable sharing of compiled
* functions among multiple equivalent scopes, amortizing the cost of
* compilation over a number of executions. Examples include XUL scripts
* and event handlers shared among Firefox or other Mozilla app chrome
* windows, and user-defined JS functions precompiled and then shared among
* requests in server-side JS.
*/
if (OBJ_GET_PARENT(cx, obj) != obj2) {
obj = js_CloneFunctionObject(cx, fun, obj2);
@ -3004,8 +3040,8 @@
/*
* Protect obj from any GC hiding below JSObject::setProperty or
* JSObject::defineProperty. All paths from here must flow through
* the fp->scopeChain code below the parent->defineProperty call.
* JSObject::defineProperty. All paths from here must flow through the
* fp->scopeChain code below the parent->defineProperty call.
*/
MUST_FLOW_THROUGH("restore_scope");
fp->scopeChain = obj;
@ -3021,9 +3057,9 @@
: JSPROP_ENUMERATE | JSPROP_PERMANENT;
/*
* Load function flags that are also property attributes. Getters
* and setters do not need a slot, their value is stored elsewhere
* in the property itself, not in obj slots.
* Load function flags that are also property attributes. Getters and
* setters do not need a slot, their value is stored elsewhere in the
* property itself, not in obj slots.
*/
getter = setter = JS_PropertyStub;
flags = JSFUN_GSFLAG2ATTR(fun->flags);
@ -3039,19 +3075,17 @@
}
/*
* We define the function as a property of the variable object and
* not the current scope chain even for the case of function
* expression statements and functions defined by eval inside let
* or with blocks.
* We define the function as a property of the variable object and not the
* current scope chain even for the case of function expression statements
* and functions defined by eval inside let or with blocks.
*/
parent = fp->varobj;
JS_ASSERT(parent);
/*
* Check for a const property of the same name -- or any kind
* of property if executing with the strict option. We check
* here at runtime as well as at compile-time, to handle eval
* as well as multiple HTML script tags.
* Check for a const property of the same name -- or any kind of property
* if executing with the strict option. We check here at runtime as well
* as at compile-time, to handle eval as well as multiple HTML script tags.
*/
id = ATOM_TO_JSID(fun->atom);
prop = NULL;
@ -3060,16 +3094,15 @@
goto restore_scope;
/*
* We deviate from 10.1.2 in ECMA 262 v3 and under eval use for
* function declarations JSObject::setProperty, not
* JSObject::defineProperty, to preserve the JSOP_PERMANENT
* attribute of existing properties and make sure that such
* properties cannot be deleted.
* We deviate from 10.1.2 in ECMA 262 v3 and under eval use for function
* declarations JSObject::setProperty, not JSObject::defineProperty, to
* preserve the JSOP_PERMANENT attribute of existing properties and make
* sure that such properties cannot be deleted.
*
* We also use JSObject::setProperty for the existing properties of
* Call objects with matching attributes to preserve the native
* getters and setters that store the value of the property in the
* interpreter frame, see bug 467495.
* We also use JSObject::setProperty for the existing properties of Call
* objects with matching attributes to preserve the native getters and
* setters that store the value of the property in the interpreter frame,
* see bug 467495.
*/
doSet = (attrs == JSPROP_ENUMERATE);
JS_ASSERT_IF(doSet, fp->flags & JSFRAME_EVAL);
@ -3080,9 +3113,8 @@
!(old & (JSPROP_GETTER|JSPROP_SETTER)) &&
(old & (JSPROP_ENUMERATE|JSPROP_PERMANENT)) == attrs)) {
/*
* js_CheckRedeclaration must reject attempts to add a
* getter or setter to an existing property without a
* getter or setter.
* js_CheckRedeclaration must reject attempts to add a getter or
* setter to an existing property without a getter or setter.
*/
JS_ASSERT(!(attrs & ~(JSPROP_ENUMERATE|JSPROP_PERMANENT)));
JS_ASSERT(!(old & JSPROP_READONLY));
@ -3152,11 +3184,11 @@
BEGIN_CASE(JSOP_DEFLOCALFUN)
/*
* Define a local function (i.e., one nested at the top level of
* another function), parented by the current scope chain, stored
* in a local variable slot that the compiler allocated. This is
* an optimization over JSOP_DEFFUN that avoids requiring a call
* object for the outer function's activation.
* Define a local function (i.e., one nested at the top level of another
* function), parented by the current scope chain, stored in a local
* variable slot that the compiler allocated. This is an optimization over
* JSOP_DEFFUN that avoids requiring a call object for the outer function's
* activation.
*/
LOAD_FUNCTION(SLOTNO_LEN);
JS_ASSERT(FUN_INTERPRETED(fun));
@ -3230,11 +3262,11 @@
* Optimize ({method: function () { ... }, ...}) and
* this.method = function () { ... }; bytecode sequences.
*
* Note that we jump to the entry points for JSOP_SETPROP
* and JSOP_INITPROP without calling the trace recorder,
* because the record hooks for those ops are essentially
* no-ops (this can't change given the predictive shape
* guarding the recorder must do).
* Note that we jump to the entry points for JSOP_SETPROP and
* JSOP_INITPROP without calling the trace recorder, because
* the record hooks for those ops are essentially no-ops (this
* can't change given the predictive shape guarding the
* recorder must do).
*/
if (op == JSOP_SETMETHOD) {
#ifdef DEBUG
@ -3363,8 +3395,8 @@
}
/*
* Getters and setters are just like watchpoints from an access
* control point of view.
* Getters and setters are just like watchpoints from an access control
* point of view.
*/
if (!obj->checkAccess(cx, id, JSACC_WATCH, &rtmp, &attrs))
goto error;
@ -3442,7 +3474,7 @@
JS_ASSERT(regs.sp - StackBase(fp) >= 1);
lval = FETCH_OPND(-1);
JS_ASSERT(JSVAL_IS_OBJECT(lval));
cx->weakRoots.newborn[GCX_OBJECT] = JSVAL_TO_GCTHING(lval);
cx->weakRoots.newbornObject = JSVAL_TO_OBJECT(lval);
END_CASE(JSOP_ENDINIT)
BEGIN_CASE(JSOP_INITPROP)
@ -3491,10 +3523,10 @@
JS_ASSERT(!(sprop->attrs & JSPROP_READONLY));
/*
* If this property has a non-stub setter, it must be
* __proto__, __parent__, or another "shared prototype"
* built-in. Force a miss to save code size here and let
* the standard code path take care of business.
* If this property has a non-stub setter, it must be __proto__,
* __parent__, or another "shared prototype" built-in. Force a miss
* to save code size here and let the standard code path take care
* of business.
*/
if (!SPROP_HAS_STUB_SETTER(sprop))
goto do_initprop_miss;
@ -3508,17 +3540,17 @@
}
/*
* Detect a repeated property name and force a miss to
* share the strict warning code and cope with complexity
* managed by JSScope::add.
* Detect a repeated property name and force a miss to share the
* strict warning code and cope with complexity managed by
* JSScope::add.
*/
if (sprop->parent != scope->lastProp)
goto do_initprop_miss;
/*
* Otherwise this entry must be for a direct property of
* obj, not a proto-property, and there cannot have been
* any deletions of prior properties.
* Otherwise this entry must be for a direct property of obj, not a
* proto-property, and there cannot have been any deletions of
* prior properties.
*/
JS_ASSERT(!scope->hadMiddleDelete());
JS_ASSERT_IF(scope->table, !scope->has(sprop));
@ -3567,9 +3599,9 @@
}
/*
* No method change check here because here we are adding a
* new property, not updating an existing slot's value that
* might contain a method of a branded scope.
* No method change check here because here we are adding a new
* property, not updating an existing slot's value that might
* contain a method of a branded scope.
*/
TRACE_2(SetPropHit, entry, sprop);
LOCKED_OBJ_SET_SLOT(obj, slot, rval);
@ -3621,8 +3653,8 @@
FETCH_ELEMENT_ID(obj, -2, id);
/*
* Check for property redeclaration strict warning (we may be in
* an object initialiser, not an array initialiser).
* Check for property redeclaration strict warning (we may be in an object
* initialiser, not an array initialiser).
*/
if (!js_CheckRedeclaration(cx, obj, id, JSPROP_INITIALIZER, NULL, NULL))
goto error;
@ -3653,7 +3685,10 @@
slot = GET_UINT16(regs.pc);
JS_ASSERT(slot + 1 < fp->script->nfixed);
lval = fp->slots[slot];
if (JSVAL_IS_VOID(lval)) {
if (!JSVAL_IS_PRIMITIVE(lval)) {
obj = JSVAL_TO_OBJECT(lval);
} else {
JS_ASSERT(JSVAL_IS_VOID(lval));
obj = js_NewArrayObject(cx, 0, NULL);
if (!obj)
goto error;
@ -3743,10 +3778,10 @@
JS_ASSERT(JSVAL_IS_BOOLEAN(lval));
if (JSVAL_TO_BOOLEAN(lval)) {
/*
* Exception was pending during finally, throw it *before* we
* adjust pc, because pc indexes into script->trynotes. This
* turns out not to be necessary, but it seems clearer. And
* it points out a FIXME: 350509, due to Igor Bukanov.
* Exception was pending during finally, throw it *before* we adjust
* pc, because pc indexes into script->trynotes. This turns out not to
* be necessary, but it seems clearer. And it points out a FIXME:
* 350509, due to Igor Bukanov.
*/
cx->throwing = JS_TRUE;
cx->exception = rval;
@ -3784,8 +3819,8 @@
BEGIN_CASE(JSOP_SETLOCALPOP)
/*
* The stack must have a block with at least one local slot below
* the exception object.
* The stack must have a block with at least one local slot below the
* exception object.
*/
JS_ASSERT((size_t) (regs.sp - StackBase(fp)) >= 2);
slot = GET_UINT16(regs.pc);
@ -3795,8 +3830,8 @@
BEGIN_CASE(JSOP_IFPRIMTOP)
/*
* If the top of stack is of primitive type, jump to our target.
* Otherwise advance to the next opcode.
* If the top of stack is of primitive type, jump to our target. Otherwise
* advance to the next opcode.
*/
JS_ASSERT(regs.sp > StackBase(fp));
rval = FETCH_OPND(-1);
@ -3812,11 +3847,8 @@
i = GET_INT8(regs.pc);
if (!JSVAL_IS_PRIMITIVE(lval)) {
lval = FETCH_OPND(-2);
js_ReportValueError2(cx, JSMSG_CANT_CONVERT_TO,
-2, lval, NULL,
(i == JSTYPE_VOID)
? "primitive type"
: JS_TYPE_STR(i));
js_ReportValueError2(cx, JSMSG_CANT_CONVERT_TO, -2, lval, NULL,
(i == JSTYPE_VOID) ? "primitive type" : JS_TYPE_STR(i));
goto error;
}
END_CASE(JSOP_PRIMTOP)
@ -3850,8 +3882,7 @@
{
JSTrapHandler handler = cx->debugHooks->debuggerHandler;
if (handler) {
switch (handler(cx, script, regs.pc, &rval,
cx->debugHooks->debuggerHandlerData)) {
switch (handler(cx, script, regs.pc, &rval, cx->debugHooks->debuggerHandlerData)) {
case JSTRAP_ERROR:
goto error;
case JSTRAP_CONTINUE:
@ -3990,9 +4021,9 @@
BEGIN_CASE(JSOP_FILTER)
/*
* We push the hole value before jumping to [enditer] so we can
* detect the first iteration and direct js_StepXMLListFilter to
* initialize filter's state.
* We push the hole value before jumping to [enditer] so we can detect the
* first iteration and direct js_StepXMLListFilter to initialize filter's
* state.
*/
PUSH_OPND(JSVAL_HOLE);
len = GET_JUMP_OFFSET(regs.pc);
@ -4009,8 +4040,8 @@
goto error;
if (regs.sp[-1] != JSVAL_NULL) {
/*
* Decrease sp after EnterWith returns as we use sp[-1] there
* to root temporaries.
* Decrease sp after EnterWith returns as we use sp[-1] there to root
* temporaries.
*/
JS_ASSERT(VALUE_IS_XML(cx, regs.sp[-1]));
if (!js_EnterWith(cx, -2))
@ -4092,9 +4123,7 @@
str = ATOM_TO_STRING(atom);
rval = FETCH_OPND(-1);
str2 = JSVAL_TO_STRING(rval);
obj = js_NewXMLSpecialObject(cx,
JSXML_CLASS_PROCESSING_INSTRUCTION,
str, str2);
obj = js_NewXMLSpecialObject(cx, JSXML_CLASS_PROCESSING_INSTRUCTION, str, str2);
if (!obj)
goto error;
STORE_OPND(-1, OBJECT_TO_JSVAL(obj));
@ -4123,12 +4152,11 @@
JS_ASSERT(fp->blockChain == OBJ_GET_PARENT(cx, obj));
/*
* The young end of fp->scopeChain may omit blocks if we
* haven't closed over them, but if there are any closure
* blocks on fp->scopeChain, they'd better be (clones of)
* ancestors of the block we're entering now; anything
* else we should have popped off fp->scopeChain when we
* left its static scope.
* The young end of fp->scopeChain may omit blocks if we haven't closed
* over them, but if there are any closure blocks on fp->scopeChain, they'd
* better be (clones of) ancestors of the block we're entering now;
* anything else we should have popped off fp->scopeChain when we left its
* static scope.
*/
obj2 = fp->scopeChain;
while ((clasp = OBJ_GET_CLASS(cx, obj2)) == &js_WithClass)
@ -4156,10 +4184,9 @@
JS_ASSERT(blockDepth <= StackDepth(script));
#endif
/*
* If we're about to leave the dynamic scope of a block that has
* been cloned onto fp->scopeChain, clear its private data, move
* its locals from the stack into the clone, and pop it off the
* chain.
* If we're about to leave the dynamic scope of a block that has been
* cloned onto fp->scopeChain, clear its private data, move its locals from
* the stack into the clone, and pop it off the chain.
*/
obj = fp->scopeChain;
if (OBJ_GET_PROTO(cx, obj) == fp->blockChain) {
@ -4171,10 +4198,7 @@
/* Pop the block chain, too. */
fp->blockChain = OBJ_GET_PARENT(cx, fp->blockChain);
/*
* We will move the result of the expression to the new topmost
* stack slot.
*/
/* Move the result of the expression to the new topmost stack slot. */
if (op == JSOP_LEAVEBLOCKEXPR)
rval = FETCH_OPND(-1);
regs.sp -= GET_UINT16(regs.pc);

711
js/src/jsrecursion.cpp Normal file
Просмотреть файл

@ -0,0 +1,711 @@
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99 ft=cpp:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
* June 12, 2009.
*
* The Initial Developer of the Original Code is
* the Mozilla Corporation.
*
* Contributor(s):
* David Anderson <danderson@mozilla.com>
* Andreas Gal <gal@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
class RecursiveSlotMap : public SlotMap
{
public:
RecursiveSlotMap(TraceRecorder& rec)
: SlotMap(rec)
{
}
JS_REQUIRES_STACK void
adjustTypes()
{
}
};
#if defined DEBUG
static JS_REQUIRES_STACK void
AssertDownFrameIsConsistent(JSContext* cx, VMSideExit* anchor, FrameInfo* fi)
{
JS_ASSERT(anchor->recursive_down);
JS_ASSERT(anchor->recursive_down->callerHeight == fi->callerHeight);
unsigned downPostSlots = fi->callerHeight;
JSTraceType* typeMap = fi->get_typemap();
js_CaptureStackTypes(cx, 1, typeMap);
const JSTraceType* m1 = anchor->recursive_down->get_typemap();
for (unsigned i = 0; i < downPostSlots; i++) {
if (m1[i] == typeMap[i])
continue;
if (typeMap[i] == TT_INT32 && m1[i] == TT_DOUBLE)
continue;
JS_NOT_REACHED("invalid RECURSIVE_MISMATCH exit");
}
JS_ASSERT(memcmp(anchor->recursive_down, fi, sizeof(FrameInfo)) == 0);
}
#endif
JS_REQUIRES_STACK VMSideExit*
TraceRecorder::downSnapshot(FrameInfo* downFrame)
{
JS_ASSERT(!pendingSpecializedNative);
/* Build the typemap the exit will have. Note extra stack slot for return value. */
unsigned downPostSlots = downFrame->callerHeight;
unsigned ngslots = treeInfo->globalSlots->length();
unsigned exitTypeMapLen = downPostSlots + 1 + ngslots;
JSTraceType* exitTypeMap = (JSTraceType*)alloca(sizeof(JSTraceType) * exitTypeMapLen);
JSTraceType* typeMap = downFrame->get_typemap();
for (unsigned i = 0; i < downPostSlots; i++)
exitTypeMap[i] = typeMap[i];
exitTypeMap[downPostSlots] = determineSlotType(&stackval(-1));
determineGlobalTypes(&exitTypeMap[downPostSlots + 1]);
VMSideExit* exit = (VMSideExit*)
traceMonitor->traceAlloc->alloc(sizeof(VMSideExit) + sizeof(JSTraceType) * exitTypeMapLen);
memset(exit, 0, sizeof(VMSideExit));
exit->from = fragment;
exit->calldepth = 0;
JS_ASSERT(unsigned(exit->calldepth) == getCallDepth());
exit->numGlobalSlots = ngslots;
exit->numStackSlots = downPostSlots + 1;
exit->numStackSlotsBelowCurrentFrame = cx->fp->down->argv ?
nativeStackOffset(&cx->fp->argv[-2]) / sizeof(double) : 0;
exit->exitType = UNSTABLE_LOOP_EXIT;
exit->block = cx->fp->down->blockChain;
exit->pc = downFrame->pc + JSOP_CALL_LENGTH;
exit->imacpc = NULL;
exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - treeInfo->nativeStackBase;
exit->rp_adj = exit->calldepth * sizeof(FrameInfo*);
exit->nativeCalleeWord = 0;
exit->lookupFlags = js_InferFlags(cx, 0);
memcpy(exit->fullTypeMap(), exitTypeMap, sizeof(JSTraceType) * exitTypeMapLen);
#if defined JS_JIT_SPEW
TreevisLogExit(cx, exit);
#endif
return exit;
}
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::upRecursion()
{
JS_ASSERT((JSOp)*cx->fp->down->regs->pc == JSOP_CALL);
JS_ASSERT(js_CodeSpec[js_GetOpcode(cx, cx->fp->down->script,
cx->fp->down->regs->pc)].length == JSOP_CALL_LENGTH);
JS_ASSERT(callDepth == 0);
/*
* If some operation involving interpreter frame slurping failed, go to
* that code right away, and don't bother with emitting the up-recursive
* guards again.
*/
if (anchor && (anchor->exitType == RECURSIVE_EMPTY_RP_EXIT ||
anchor->exitType == RECURSIVE_SLURP_MISMATCH_EXIT ||
anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT)) {
return InjectStatus(slurpDownFrames(cx->fp->down->regs->pc));
}
jsbytecode* return_pc = cx->fp->down->regs->pc;
jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH;
/*
* It is possible that the down frame isn't the same at runtime. It's not
* enough to guard on the PC, since the typemap could be different as well.
* To deal with this, guard that the FrameInfo on the callstack is 100%
* identical.
*
* Note that though the counted slots is called "downPostSlots", this is
* the number of slots after the CALL instruction has theoretically popped
* callee/this/argv, but before the return value is pushed. This is
* intended since the FrameInfo pushed by down recursion would not have
* the return value yet. Instead, when closing the loop, the return value
* becomes the sole stack type that deduces type stability.
*/
unsigned totalSlots = NativeStackSlots(cx, 1);
unsigned downPostSlots = totalSlots - NativeStackSlots(cx, 0);
FrameInfo* fi = (FrameInfo*)alloca(sizeof(FrameInfo) + totalSlots * sizeof(JSTraceType));
fi->block = cx->fp->blockChain;
fi->pc = (jsbytecode*)return_pc;
fi->imacpc = NULL;
/*
* Need to compute this from the down frame, since the stack could have
* moved on this one.
*/
fi->spdist = cx->fp->down->regs->sp - cx->fp->down->slots;
JS_ASSERT(cx->fp->argc == cx->fp->down->argc);
fi->set_argc(cx->fp->argc, false);
fi->callerHeight = downPostSlots;
fi->callerArgc = cx->fp->down->argc;
if (anchor && anchor->exitType == RECURSIVE_MISMATCH_EXIT) {
/*
* Case 0: Anchoring off a RECURSIVE_MISMATCH guard. Guard on this FrameInfo.
* This is always safe because this point is only reached on simple "call myself"
* recursive functions.
*/
#if defined DEBUG
AssertDownFrameIsConsistent(cx, anchor, fi);
#endif
fi = anchor->recursive_down;
} else if (recursive_pc != fragment->root->ip) {
/*
* Case 1: Guess that down-recursion has to started back out, infer types
* from the down frame.
*/
js_CaptureStackTypes(cx, 1, fi->get_typemap());
} else {
/* Case 2: Guess that up-recursion is backing out, infer types from our TreeInfo. */
JS_ASSERT(treeInfo->nStackTypes == downPostSlots + 1);
JSTraceType* typeMap = fi->get_typemap();
for (unsigned i = 0; i < downPostSlots; i++)
typeMap[i] = treeInfo->typeMap[i];
}
fi = traceMonitor->frameCache->memoize(fi);
/*
* Guard that there are more recursive frames. If coming from an anchor
* where this was already computed, don't bother doing it again.
*/
if (!anchor || anchor->exitType != RECURSIVE_MISMATCH_EXIT) {
VMSideExit* exit = snapshot(RECURSIVE_EMPTY_RP_EXIT);
/* Guard that rp >= sr + 1 */
guard(true,
lir->ins2(LIR_pge, lirbuf->rp,
lir->ins2(LIR_piadd,
lir->insLoad(LIR_ldp, lirbuf->state,
offsetof(InterpState, sor)),
INS_CONSTWORD(sizeof(FrameInfo*)))),
exit);
}
debug_only_printf(LC_TMRecorder, "guardUpRecursive fragment->root=%p fi=%p\n", (void*)fragment->root, (void*)fi);
/* Guard that the FrameInfo above is the same FrameInfo pointer. */
VMSideExit* exit = snapshot(RECURSIVE_MISMATCH_EXIT);
LIns* prev_rp = lir->insLoad(LIR_ldp, lirbuf->rp, -int32_t(sizeof(FrameInfo*)));
guard(true, lir->ins2(LIR_peq, prev_rp, INS_CONSTPTR(fi)), exit);
/*
* Now it's time to try and close the loop. Get a special exit that points
* at the down frame, after the return has been propagated up.
*/
exit = downSnapshot(fi);
/* Move the return value down from this frame to the one below it. */
rval_ins = get(&stackval(-1));
if (isPromoteInt(rval_ins))
rval_ins = demoteIns(rval_ins);
/*
* The native stack offset of the return value once this frame has returned, is:
* -treeInfo->nativeStackBase + downPostSlots * sizeof(double)
*
* Note, not +1, since the offset is 0-based.
*
* This needs to be adjusted down one frame. The amount to adjust must be
* the amount down recursion added, which was just guarded as |downPostSlots|.
*
* So the offset is:
* -treeInfo->nativeStackBase + downPostSlots * sizeof(double) -
* downPostSlots * sizeof(double)
* Or:
* -treeInfo->nativeStackBase
*
* This makes sense because this slot is just above the highest sp for the
* down frame.
*/
lir->insStorei(rval_ins, lirbuf->sp, -treeInfo->nativeStackBase);
/* Adjust stacks. See above for |downPostSlots| reasoning. */
lirbuf->sp = lir->ins2(LIR_piadd, lirbuf->sp,
lir->insImmWord(-int(downPostSlots) * sizeof(double)));
lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
lirbuf->rp = lir->ins2(LIR_piadd, lirbuf->rp,
lir->insImmWord(-int(sizeof(FrameInfo*))));
lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
RecursiveSlotMap slotMap(*this);
for (unsigned i = 0; i < downPostSlots; i++)
slotMap.addSlot(exit->stackType(i));
slotMap.addSlot(&stackval(-1));
VisitGlobalSlots(slotMap, cx, *treeInfo->globalSlots);
if (recursive_pc == (jsbytecode*)fragment->root->ip) {
debug_only_print0(LC_TMTracer, "Compiling up-recursive loop...\n");
} else {
debug_only_print0(LC_TMTracer, "Compiling up-recursive branch...\n");
exit->exitType = RECURSIVE_UNLINKED_EXIT;
exit->recursive_pc = recursive_pc;
}
JS_ASSERT(treeInfo->recursion != Recursion_Disallowed);
if (treeInfo->recursion != Recursion_Detected)
treeInfo->recursion = Recursion_Unwinds;
return closeLoop(slotMap, exit);
}
class SlurpInfo
{
public:
unsigned curSlot;
JSTraceType* typeMap;
VMSideExit* exit;
unsigned slurpFailSlot;
};
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::slurpDownFrames(jsbytecode* return_pc)
{
/* Missing - no go */
if (cx->fp->argc != cx->fp->fun->nargs)
RETURN_STOP_A("argc != nargs");
LIns* argv_ins;
unsigned frameDepth;
unsigned downPostSlots;
JSStackFrame* fp = cx->fp;
LIns* fp_ins = addName(lir->insLoad(LIR_ldp, cx_ins, offsetof(JSContext, fp)), "fp");
/*
* When first emitting slurp code, do so against the down frame. After
* popping the interpreter frame, it is illegal to resume here, as the
* down frame has been moved up. So all this code should be skipped if
* anchoring off such an exit.
*/
if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
fp_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, down)), "downFp");
fp = fp->down;
argv_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argv)), "argv");
/* If recovering from a SLURP_MISMATCH, all of this is unnecessary. */
if (!anchor || anchor->exitType != RECURSIVE_SLURP_MISMATCH_EXIT) {
/* fp->down should not be NULL. */
guard(false, lir->ins_peq0(fp_ins), RECURSIVE_LOOP_EXIT);
/* fp->down->argv should not be NULL. */
guard(false, lir->ins_peq0(argv_ins), RECURSIVE_LOOP_EXIT);
/*
* Guard on the script being the same. This might seem unnecessary,
* but it lets the recursive loop end cleanly if it doesn't match.
* With only the pc check, it is harder to differentiate between
* end-of-recursion and recursion-returns-to-different-pc.
*/
guard(true,
lir->ins2(LIR_peq,
addName(lir->insLoad(LIR_ldp,
fp_ins,
offsetof(JSStackFrame, script)),
"script"),
INS_CONSTPTR(cx->fp->down->script)),
RECURSIVE_LOOP_EXIT);
}
/* fp->down->regs->pc should be == pc. */
guard(true,
lir->ins2(LIR_peq,
lir->insLoad(LIR_ldp,
addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, regs)),
"regs"),
offsetof(JSFrameRegs, pc)),
INS_CONSTPTR(return_pc)),
RECURSIVE_SLURP_MISMATCH_EXIT);
/* fp->down->argc should be == argc. */
guard(true,
lir->ins2(LIR_eq,
addName(lir->insLoad(LIR_ld, fp_ins, offsetof(JSStackFrame, argc)),
"argc"),
INS_CONST(cx->fp->argc)),
MISMATCH_EXIT);
/* Pop the interpreter frame. */
LIns* args[] = { lirbuf->state, cx_ins };
guard(false, lir->ins_eq0(lir->insCall(&js_PopInterpFrame_ci, args)), MISMATCH_EXIT);
/* Compute slots for the down frame. */
downPostSlots = NativeStackSlots(cx, 1) - NativeStackSlots(cx, 0);
frameDepth = 1;
} else {
/* Note: loading argv from fp, not fp->down. */
argv_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argv)), "argv");
/* Slots for this frame, minus the return value. */
downPostSlots = NativeStackSlots(cx, 0) - 1;
frameDepth = 0;
}
/*
* This is a special exit used as a template for the stack-slurping code.
* LeaveTree will ignore all but the final slot, which contains the return
* value. The slurpSlot variable keeps track of the last slot that has been
* unboxed, as to avoid re-unboxing when taking a SLURP_FAIL exit.
*/
unsigned numGlobalSlots = treeInfo->globalSlots->length();
unsigned safeSlots = NativeStackSlots(cx, frameDepth) + 1 + numGlobalSlots;
jsbytecode* recursive_pc = return_pc + JSOP_CALL_LENGTH;
LIns* data = lir->insSkip(sizeof(VMSideExit) + sizeof(JSTraceType) * safeSlots);
VMSideExit* exit = (VMSideExit*)data->payload();
memset(exit, 0, sizeof(VMSideExit));
exit->pc = (jsbytecode*)recursive_pc;
exit->from = fragment;
exit->exitType = RECURSIVE_SLURP_FAIL_EXIT;
exit->numStackSlots = downPostSlots + 1;
exit->numGlobalSlots = numGlobalSlots;
exit->sp_adj = ((downPostSlots + 1) * sizeof(double)) - treeInfo->nativeStackBase;
exit->recursive_pc = recursive_pc;
/*
* Build the exit typemap. This may capture extra types, but they are
* thrown away.
*/
JSTraceType* typeMap = exit->stackTypeMap();
jsbytecode* oldpc = cx->fp->regs->pc;
cx->fp->regs->pc = exit->pc;
js_CaptureStackTypes(cx, frameDepth, typeMap);
cx->fp->regs->pc = oldpc;
typeMap[downPostSlots] = determineSlotType(&stackval(-1));
if (typeMap[downPostSlots] == TT_INT32 &&
oracle.isStackSlotUndemotable(cx, downPostSlots, recursive_pc)) {
typeMap[downPostSlots] = TT_DOUBLE;
}
determineGlobalTypes(&typeMap[exit->numStackSlots]);
#if defined JS_JIT_SPEW
TreevisLogExit(cx, exit);
#endif
/*
* Move return value to the right place, if necessary. The previous store
* could have been killed so it is necessary to write it again.
*/
if (!anchor || anchor->exitType != RECURSIVE_SLURP_FAIL_EXIT) {
JS_ASSERT(exit->sp_adj >= int(sizeof(double)));
ptrdiff_t actRetOffset = exit->sp_adj - sizeof(double);
LIns* rval = get(&stackval(-1));
if (typeMap[downPostSlots] == TT_INT32)
rval = demoteIns(rval);
lir->insStorei(addName(rval, "rval"), lirbuf->sp, actRetOffset);
}
/* Slurp */
SlurpInfo info;
info.curSlot = 0;
info.exit = exit;
info.typeMap = typeMap;
info.slurpFailSlot = (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT) ?
anchor->slurpFailSlot : 0;
/* callee */
slurpSlot(lir->insLoad(LIR_ldp, argv_ins, -2 * ptrdiff_t(sizeof(jsval))),
&fp->argv[-2],
&info);
/* this */
slurpSlot(lir->insLoad(LIR_ldp, argv_ins, -1 * ptrdiff_t(sizeof(jsval))),
&fp->argv[-1],
&info);
/* args[0..n] */
for (unsigned i = 0; i < JS_MAX(fp->argc, fp->fun->nargs); i++)
slurpSlot(lir->insLoad(LIR_ldp, argv_ins, i * sizeof(jsval)), &fp->argv[i], &info);
/* argsobj */
slurpSlot(addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, argsobj)), "argsobj"),
&fp->argsobj,
&info);
/* vars */
LIns* slots_ins = addName(lir->insLoad(LIR_ldp, fp_ins, offsetof(JSStackFrame, slots)),
"slots");
for (unsigned i = 0; i < fp->script->nfixed; i++)
slurpSlot(lir->insLoad(LIR_ldp, slots_ins, i * sizeof(jsval)), &fp->slots[i], &info);
/* stack vals */
unsigned nfixed = fp->script->nfixed;
jsval* stack = StackBase(fp);
LIns* stack_ins = addName(lir->ins2(LIR_piadd,
slots_ins,
INS_CONSTWORD(nfixed * sizeof(jsval))),
"stackBase");
size_t limit = size_t(fp->regs->sp - StackBase(fp));
if (anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT)
limit--;
else
limit -= fp->fun->nargs + 2;
for (size_t i = 0; i < limit; i++)
slurpSlot(lir->insLoad(LIR_ldp, stack_ins, i * sizeof(jsval)), &stack[i], &info);
JS_ASSERT(info.curSlot == downPostSlots);
/* Jump back to the start */
exit = copy(exit);
exit->exitType = UNSTABLE_LOOP_EXIT;
#if defined JS_JIT_SPEW
TreevisLogExit(cx, exit);
#endif
/* Finally, close the loop. */
RecursiveSlotMap slotMap(*this);
for (unsigned i = 0; i < downPostSlots; i++)
slotMap.addSlot(typeMap[i]);
slotMap.addSlot(&stackval(-1));
VisitGlobalSlots(slotMap, cx, *treeInfo->globalSlots);
debug_only_print0(LC_TMTracer, "Compiling up-recursive slurp...\n");
exit = copy(exit);
if (exit->recursive_pc == fragment->root->ip)
exit->exitType = UNSTABLE_LOOP_EXIT;
else
exit->exitType = RECURSIVE_UNLINKED_EXIT;
debug_only_printf(LC_TMTreeVis, "TREEVIS CHANGEEXIT EXIT=%p TYPE=%s\n", (void*)exit,
getExitName(exit->exitType));
JS_ASSERT(treeInfo->recursion >= Recursion_Unwinds);
return closeLoop(slotMap, exit);
}
JS_REQUIRES_STACK AbortableRecordingStatus
TraceRecorder::downRecursion()
{
JSStackFrame* fp = cx->fp;
if ((jsbytecode*)fragment->ip < fp->script->code ||
(jsbytecode*)fragment->ip >= fp->script->code + fp->script->length) {
RETURN_STOP_A("inner recursive call must compile first");
}
/* Adjust the stack by the budget the down-frame needs. */
int slots = NativeStackSlots(cx, 1) - NativeStackSlots(cx, 0);
JS_ASSERT(unsigned(slots) == NativeStackSlots(cx, 1) - fp->argc - 2 - fp->script->nfixed - 1);
/* Guard that there is enough stack space. */
JS_ASSERT(treeInfo->maxNativeStackSlots >= treeInfo->nativeStackBase / sizeof(double));
int guardSlots = slots + treeInfo->maxNativeStackSlots -
treeInfo->nativeStackBase / sizeof(double);
LIns* sp_top = lir->ins2(LIR_piadd, lirbuf->sp, lir->insImmWord(guardSlots * sizeof(double)));
guard(true, lir->ins2(LIR_plt, sp_top, eos_ins), OOM_EXIT);
/* Guard that there is enough call stack space. */
LIns* rp_top = lir->ins2(LIR_piadd, lirbuf->rp, lir->insImmWord(sizeof(FrameInfo*)));
guard(true, lir->ins2(LIR_plt, rp_top, eor_ins), OOM_EXIT);
/* Add space for a new JIT frame. */
lirbuf->sp = lir->ins2(LIR_piadd, lirbuf->sp, lir->insImmWord(slots * sizeof(double)));
lir->insStorei(lirbuf->sp, lirbuf->state, offsetof(InterpState, sp));
lirbuf->rp = lir->ins2(LIR_piadd, lirbuf->rp, lir->insImmWord(sizeof(FrameInfo*)));
lir->insStorei(lirbuf->rp, lirbuf->state, offsetof(InterpState, rp));
--callDepth;
clearFrameSlotsFromCache();
/*
* If the callee and caller have identical call sites, this is a down-
* recursive loop. Otherwise something special happened. For example, a
* recursive call that is unwinding could nest back down recursively again.
* In this case, we build a fragment that ideally we'll never invoke
* directly, but link from a down-recursive branch. The UNLINKED_EXIT tells
* closeLoop() that the peer trees should match the recursive pc, not the
* tree pc.
*/
VMSideExit* exit;
if ((jsbytecode*)fragment->root->ip == fp->script->code)
exit = snapshot(UNSTABLE_LOOP_EXIT);
else
exit = snapshot(RECURSIVE_UNLINKED_EXIT);
exit->recursive_pc = fp->script->code;
debug_only_print0(LC_TMTracer, "Compiling down-recursive function call.\n");
JS_ASSERT(treeInfo->recursion != Recursion_Disallowed);
treeInfo->recursion = Recursion_Detected;
return closeLoop(exit);
}
JS_REQUIRES_STACK LIns*
TraceRecorder::slurpInt32Slot(LIns* val_ins, jsval* vp, VMSideExit* exit)
{
guard(true,
lir->ins2(LIR_or,
lir->ins2(LIR_peq,
lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
INS_CONSTWORD(JSVAL_DOUBLE)),
lir->ins2(LIR_peq,
lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(1)),
INS_CONSTWORD(1))),
exit);
LIns* space = lir->insAlloc(sizeof(int32));
LIns* args[] = { space, val_ins };
LIns* result = lir->insCall(&js_TryUnboxInt32_ci, args);
guard(false, lir->ins_eq0(result), exit);
LIns* int32_ins = lir->insLoad(LIR_ld, space, 0);
return int32_ins;
}
JS_REQUIRES_STACK LIns*
TraceRecorder::slurpDoubleSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
{
guard(true,
lir->ins2(LIR_or,
lir->ins2(LIR_peq,
lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
INS_CONSTWORD(JSVAL_DOUBLE)),
lir->ins2(LIR_peq,
lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(1)),
INS_CONSTWORD(1))),
exit);
LIns* args[] = { val_ins };
LIns* dbl_ins = lir->insCall(&js_UnboxDouble_ci, args);
return dbl_ins;
}
JS_REQUIRES_STACK LIns*
TraceRecorder::slurpBoolSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
{
guard(true,
lir->ins2(LIR_peq,
lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
INS_CONSTWORD(JSVAL_SPECIAL)),
exit);
LIns* bool_ins = lir->ins2(LIR_pirsh, val_ins, INS_CONST(JSVAL_TAGBITS));
bool_ins = p2i(bool_ins);
return bool_ins;
}
JS_REQUIRES_STACK LIns*
TraceRecorder::slurpStringSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
{
guard(true,
lir->ins2(LIR_peq,
lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK)),
INS_CONSTWORD(JSVAL_STRING)),
exit);
LIns* str_ins = lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(~JSVAL_TAGMASK));
return str_ins;
}
JS_REQUIRES_STACK LIns*
TraceRecorder::slurpNullSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
{
guard(true, lir->ins_peq0(val_ins), exit);
return val_ins;
}
JS_REQUIRES_STACK LIns*
TraceRecorder::slurpObjectSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
{
/* Must not be NULL */
guard(false, lir->ins_peq0(val_ins), exit);
/* Must be an object */
guard(true,
lir->ins_peq0(lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK))),
exit);
/* Must NOT have a function class */
guard(false,
lir->ins2(LIR_peq,
lir->ins2(LIR_piand,
lir->insLoad(LIR_ldp, val_ins, offsetof(JSObject, classword)),
INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS)),
INS_CONSTPTR(&js_FunctionClass)),
exit);
return val_ins;
}
JS_REQUIRES_STACK LIns*
TraceRecorder::slurpFunctionSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
{
/* Must not be NULL */
guard(false, lir->ins_peq0(val_ins), exit);
/* Must be an object */
guard(true,
lir->ins_peq0(lir->ins2(LIR_piand, val_ins, INS_CONSTWORD(JSVAL_TAGMASK))),
exit);
/* Must have a function class */
guard(true,
lir->ins2(LIR_peq,
lir->ins2(LIR_piand,
lir->insLoad(LIR_ldp, val_ins, offsetof(JSObject, classword)),
INS_CONSTWORD(~JSSLOT_CLASS_MASK_BITS)),
INS_CONSTPTR(&js_FunctionClass)),
exit);
return val_ins;
}
JS_REQUIRES_STACK LIns*
TraceRecorder::slurpSlot(LIns* val_ins, jsval* vp, VMSideExit* exit)
{
switch (exit->slurpType)
{
case TT_PSEUDOBOOLEAN:
return slurpBoolSlot(val_ins, vp, exit);
case TT_INT32:
return slurpInt32Slot(val_ins, vp, exit);
case TT_DOUBLE:
return slurpDoubleSlot(val_ins, vp, exit);
case TT_STRING:
return slurpStringSlot(val_ins, vp, exit);
case TT_NULL:
return slurpNullSlot(val_ins, vp, exit);
case TT_OBJECT:
return slurpObjectSlot(val_ins, vp, exit);
case TT_FUNCTION:
return slurpFunctionSlot(val_ins, vp, exit);
default:
JS_NOT_REACHED("invalid type in typemap");
return NULL;
}
}
JS_REQUIRES_STACK void
TraceRecorder::slurpSlot(LIns* val_ins, jsval* vp, SlurpInfo* info)
{
/* Don't re-read slots that aren't needed. */
if (info->curSlot < info->slurpFailSlot) {
info->curSlot++;
return;
}
VMSideExit* exit = copy(info->exit);
exit->slurpFailSlot = info->curSlot;
exit->slurpType = info->typeMap[info->curSlot];
#if defined DEBUG
/* Make sure that we don't try and record infinity branches */
JS_ASSERT_IF(anchor && anchor->exitType == RECURSIVE_SLURP_FAIL_EXIT &&
info->curSlot == info->slurpFailSlot,
anchor->slurpType != exit->slurpType);
#endif
LIns* val = slurpSlot(val_ins, vp, exit);
lir->insStorei(val,
lirbuf->sp,
-treeInfo->nativeStackBase + ptrdiff_t(info->curSlot) * sizeof(double));
info->curSlot++;
}

Просмотреть файл

@ -3083,6 +3083,7 @@ class RegExpNativeCompiler {
Allocator &alloc = *JS_TRACE_MONITOR(cx).dataAlloc;
/* Must only create a VMSideExit; see StackFilter::getTops. */
size_t len = (sizeof(GuardRecord) +
sizeof(VMSideExit) +
(re_length-1) * sizeof(jschar));
@ -3196,6 +3197,15 @@ class RegExpNativeCompiler {
if (outOfMemory())
goto fail;
/*
* Deep in the nanojit compiler, the StackFilter is trying to throw
* away stores above the VM interpreter/native stacks. We have no such
* stacks, so rely on the fact that lirbuf->sp and lirbuf->rp are null
* to ensure our stores are ignored.
*/
JS_ASSERT(!lirbuf->sp && !lirbuf->rp);
::compile(assm, fragment verbose_only(, tempAlloc, tm->labels));
if (assm->error() != nanojit::None)
goto fail;

Просмотреть файл

@ -844,7 +844,7 @@ NewToken(JSTokenStream *ts, ptrdiff_t adjust)
tp->pos.begin.index = ts->linepos +
(tp->ptr - ts->linebuf.base) -
ts->ungetpos;
tp->pos.begin.lineno = tp->pos.end.lineno = (uint16)ts->lineno;
tp->pos.begin.lineno = tp->pos.end.lineno = ts->lineno;
return tp;
}
@ -923,7 +923,7 @@ js_GetToken(JSContext *cx, JSTokenStream *ts)
if (!atom)
goto error;
}
tp->pos.end.lineno = (uint16)ts->lineno;
tp->pos.end.lineno = ts->lineno;
tp->t_op = JSOP_STRING;
tp->t_atom = atom;
goto out;
@ -1028,7 +1028,7 @@ js_GetToken(JSContext *cx, JSTokenStream *ts)
atom = atomize(cx, tb);
if (!atom)
goto error;
tp->pos.end.lineno = (uint16)ts->lineno;
tp->pos.end.lineno = ts->lineno;
tp->t_op = JSOP_STRING;
tp->t_atom = atom;
tt = TOK_XMLATTR;
@ -1298,7 +1298,7 @@ retry:
atom = atomize(cx, tb);
if (!atom)
goto error;
tp->pos.end.lineno = (uint16)ts->lineno;
tp->pos.end.lineno = ts->lineno;
tp->t_op = JSOP_STRING;
tp->t_atom = atom;
tt = TOK_STRING;
@ -1532,7 +1532,7 @@ retry:
if (!atom)
goto error;
tp->t_atom = atom;
tp->pos.end.lineno = (uint16)ts->lineno;
tp->pos.end.lineno = ts->lineno;
goto out;
}

Просмотреть файл

@ -287,7 +287,7 @@ struct JSTokenStream {
};
#define CURRENT_TOKEN(ts) ((ts)->tokens[(ts)->cursor])
#define ON_CURRENT_LINE(ts,pos) ((uint16)(ts)->lineno == (pos).end.lineno)
#define ON_CURRENT_LINE(ts,pos) ((ts)->lineno == (pos).end.lineno)
/* JSTokenStream flags */
#define TSF_ERROR 0x01 /* fatal error while compiling */

Просмотреть файл

@ -1022,9 +1022,28 @@ JSScope::reportReadOnlyScope(JSContext *cx)
void
JSScope::generateOwnShape(JSContext *cx)
{
if (object)
#ifdef JS_TRACER
if (object) {
js_LeaveTraceIfGlobalObject(cx, object);
/*
* The JIT must have arranged to re-guard after any unpredictable shape
* change, so if we are on trace here, we should already be prepared to
* bail off trace.
*/
JS_ASSERT_IF(JS_ON_TRACE(cx), cx->bailExit);
/*
* If we are recording, here is where we forget already-guarded shapes.
* Any subsequent property operation upon object on the trace currently
* being recorded will re-guard (and re-memoize).
*/
JSTraceMonitor *tm = &JS_TRACE_MONITOR(cx);
if (TraceRecorder *tr = tm->recorder)
tr->forgetGuardedShapesForObject(object);
}
#endif
shape = js_GenerateShape(cx, false);
setOwnShape();
}

Просмотреть файл

@ -1021,6 +1021,21 @@ static JSHashAllocOps sftbl_alloc_ops = {
js_alloc_sftbl_entry, js_free_sftbl_entry
};
static void
FinishRuntimeScriptState(JSRuntime *rt)
{
if (rt->scriptFilenameTable) {
JS_HashTableDestroy(rt->scriptFilenameTable);
rt->scriptFilenameTable = NULL;
}
#ifdef JS_THREADSAFE
if (rt->scriptFilenameTableLock) {
JS_DESTROY_LOCK(rt->scriptFilenameTableLock);
rt->scriptFilenameTableLock = NULL;
}
#endif
}
JSBool
js_InitRuntimeScriptState(JSRuntime *rt)
{
@ -1035,7 +1050,7 @@ js_InitRuntimeScriptState(JSRuntime *rt)
JS_NewHashTable(16, JS_HashString, js_compare_strings, NULL,
&sftbl_alloc_ops, NULL);
if (!rt->scriptFilenameTable) {
js_FinishRuntimeScriptState(rt); /* free lock if threadsafe */
FinishRuntimeScriptState(rt); /* free lock if threadsafe */
return JS_FALSE;
}
JS_INIT_CLIST(&rt->scriptFilenamePrefixes);
@ -1049,35 +1064,19 @@ typedef struct ScriptFilenamePrefix {
uint32 flags; /* user-defined flags to inherit from this prefix */
} ScriptFilenamePrefix;
void
js_FinishRuntimeScriptState(JSRuntime *rt)
{
if (rt->scriptFilenameTable) {
JS_HashTableDestroy(rt->scriptFilenameTable);
rt->scriptFilenameTable = NULL;
}
#ifdef JS_THREADSAFE
if (rt->scriptFilenameTableLock) {
JS_DESTROY_LOCK(rt->scriptFilenameTableLock);
rt->scriptFilenameTableLock = NULL;
}
#endif
}
void
js_FreeRuntimeScriptState(JSRuntime *rt)
{
ScriptFilenamePrefix *sfp;
if (!rt->scriptFilenameTable)
return;
while (!JS_CLIST_IS_EMPTY(&rt->scriptFilenamePrefixes)) {
sfp = (ScriptFilenamePrefix *) rt->scriptFilenamePrefixes.next;
ScriptFilenamePrefix *sfp = (ScriptFilenamePrefix *)
rt->scriptFilenamePrefixes.next;
JS_REMOVE_LINK(&sfp->links);
js_free(sfp);
}
js_FinishRuntimeScriptState(rt);
FinishRuntimeScriptState(rt);
}
#ifdef DEBUG_brendan
@ -1302,6 +1301,10 @@ js_SweepScriptFilenames(JSRuntime *rt)
if (!rt->scriptFilenameTable)
return;
/*
* JS_HashTableEnumerateEntries shrinks the table if many entries are
* removed preventing wasting memory on a too sparse table.
*/
JS_HashTableEnumerateEntries(rt->scriptFilenameTable,
js_script_filename_sweeper,
rt);

Просмотреть файл

@ -212,17 +212,9 @@ js_InitScriptClass(JSContext *cx, JSObject *obj);
extern JSBool
js_InitRuntimeScriptState(JSRuntime *rt);
/*
* On last context destroy for rt, if script filenames are all GC'd, free the
* script filename table and its lock.
*/
extern void
js_FinishRuntimeScriptState(JSRuntime *rt);
/*
* On JS_DestroyRuntime(rt), forcibly free script filename prefixes and any
* script filename table entries that have not been GC'd, the latter using
* js_FinishRuntimeScriptState.
* script filename table entries that have not been GC'd.
*
* This allows script filename prefixes to outlive any context in rt.
*/

Просмотреть файл

@ -70,6 +70,9 @@ typedef JSUint16 uint16_t;
typedef JSUint32 uint32_t;
typedef JSUint64 uint64_t;
/* Suppress other, conflicting attempts to define stdint-bits. */
#define _STDINT_H
/* If JS_STDDEF_H_HAS_INTPTR_T or JS_CRTDEFS_H_HAS_INTPTR_T are
defined, then jsinttypes.h included the given header, which
introduced definitions for intptr_t and uintptr_t. Otherwise,

Просмотреть файл

@ -48,6 +48,8 @@
* of rooting things that might lose their newborn root due to subsequent GC
* allocations in the same native method.
*/
#define __STDC_LIMIT_MACROS
#include <stdlib.h>
#include <string.h>
#include "jstypes.h"
@ -297,6 +299,8 @@ str_encodeURI(JSContext *cx, uintN argc, jsval *vp);
static JSBool
str_encodeURI_Component(JSContext *cx, uintN argc, jsval *vp);
static const uint32 OVERLONG_UTF8 = UINT32_MAX;
static uint32
Utf8ToOneUcs4Char(const uint8 *utf8Buffer, int utf8Length);
@ -3025,7 +3029,7 @@ js_NewString(JSContext *cx, jschar *chars, size_t length)
return NULL;
}
str = js_NewGCString(cx, GCX_STRING);
str = js_NewGCString(cx);
if (!str)
return NULL;
str->initFlat(chars, length);
@ -3095,7 +3099,7 @@ js_NewDependentString(JSContext *cx, JSString *base, size_t start,
return js_NewStringCopyN(cx, base->chars() + start, length);
}
ds = js_NewGCString(cx, GCX_STRING);
ds = js_NewGCString(cx);
if (!ds)
return NULL;
if (start == 0)
@ -3643,7 +3647,7 @@ js_InflateStringToBuffer(JSContext *cx, const char *src, size_t srclen,
n++;
if (n > srclen)
goto bufferTooSmall;
if (n == 1 || n > 6)
if (n == 1 || n > 4)
goto badCharacter;
for (j = 1; j < n; j++) {
if ((src[j] & 0xC0) != 0x80)
@ -5162,7 +5166,7 @@ Encode(JSContext *cx, JSString *str, const jschar *unescapedSet,
const jschar *chars;
jschar c, c2;
uint32 v;
uint8 utf8buf[6];
uint8 utf8buf[4];
jschar hexBuf[4];
static const char HexDigits[] = "0123456789ABCDEF"; /* NB: uppercase */
@ -5226,7 +5230,7 @@ Decode(JSContext *cx, JSString *str, const jschar *reservedSet, jsval *rval)
jschar c, H;
uint32 v;
jsuint B;
uint8 octets[6];
uint8 octets[4];
intN j, n;
str->getCharsAndLength(chars, length);
@ -5252,7 +5256,7 @@ Decode(JSContext *cx, JSString *str, const jschar *reservedSet, jsval *rval)
n = 1;
while (B & (0x80 >> n))
n++;
if (n == 1 || n > 6)
if (n == 1 || n > 4)
goto report_bad_uri;
octets[0] = (uint8)B;
if (k + 3 * (n - 1) >= length)
@ -5351,14 +5355,14 @@ str_encodeURI_Component(JSContext *cx, uintN argc, jsval *vp)
/*
* Convert one UCS-4 char and write it into a UTF-8 buffer, which must be at
* least 6 bytes long. Return the number of UTF-8 bytes of data written.
* least 4 bytes long. Return the number of UTF-8 bytes of data written.
*/
int
js_OneUcs4ToUtf8Char(uint8 *utf8Buffer, uint32 ucs4Char)
{
int utf8Length = 1;
JS_ASSERT(ucs4Char <= 0x7FFFFFFF);
JS_ASSERT(ucs4Char <= 0x10FFFF);
if (ucs4Char < 0x80) {
*utf8Buffer = (uint8)ucs4Char;
} else {
@ -5391,10 +5395,10 @@ Utf8ToOneUcs4Char(const uint8 *utf8Buffer, int utf8Length)
uint32 minucs4Char;
/* from Unicode 3.1, non-shortest form is illegal */
static const uint32 minucs4Table[] = {
0x00000080, 0x00000800, 0x0001000, 0x0020000, 0x0400000
0x00000080, 0x00000800, 0x00010000
};
JS_ASSERT(utf8Length >= 1 && utf8Length <= 6);
JS_ASSERT(utf8Length >= 1 && utf8Length <= 4);
if (utf8Length == 1) {
ucs4Char = *utf8Buffer;
JS_ASSERT(!(ucs4Char & 0x80));
@ -5407,8 +5411,9 @@ Utf8ToOneUcs4Char(const uint8 *utf8Buffer, int utf8Length)
JS_ASSERT((*utf8Buffer & 0xC0) == 0x80);
ucs4Char = ucs4Char<<6 | (*utf8Buffer++ & 0x3F);
}
if (ucs4Char < minucs4Char ||
ucs4Char == 0xFFFE || ucs4Char == 0xFFFF) {
if (JS_UNLIKELY(ucs4Char < minucs4Char)) {
ucs4Char = OVERLONG_UTF8;
} else if (ucs4Char == 0xFFFE || ucs4Char == 0xFFFF) {
ucs4Char = 0xFFFD;
}
}

2773
js/src/jstracer.cpp Executable file → Normal file

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -47,6 +47,7 @@
#include "jstypes.h"
#include "jsbuiltins.h"
#include "jscntxt.h"
#include "jsdhash.h"
#include "jsinterp.h"
#include "jslock.h"
#include "jsnum.h"
@ -157,18 +158,37 @@ public:
/*
* Tracker is used to keep track of values being manipulated by the interpreter
* during trace recording. Note that tracker pages aren't necessarily the
* same size as OS pages, they just are a moderate-sized chunk of memory.
* during trace recording. It maps opaque, 4-byte aligned address to LIns pointers.
* pointers. To do this efficiently, we observe that the addresses of jsvals
* living in the interpreter tend to be aggregated close to each other -
* usually on the same page (where a tracker page doesn't have to be the same
* size as the OS page size, but it's typically similar). The Tracker
* consists of a linked-list of structures representing a memory page, which
* are created on-demand as memory locations are used.
*
* For every address, first we split it into two parts: upper bits which
* represent the "base", and lower bits which represent an offset against the
* base. For the offset, we then right-shift it by two because the bottom two
* bits of a 4-byte aligned address are always zero. The mapping then
* becomes:
*
* page = page in pagelist such that Base(address) == page->base,
* page->map[Offset(address)]
*/
class Tracker {
#define TRACKER_PAGE_SZB 4096
#define TRACKER_PAGE_ENTRIES (TRACKER_PAGE_SZB >> 2) // each slot is 4 bytes
#define TRACKER_PAGE_MASK jsuword(TRACKER_PAGE_SZB - 1)
struct TrackerPage {
struct TrackerPage* next;
jsuword base;
nanojit::LIns* map[1];
nanojit::LIns* map[TRACKER_PAGE_ENTRIES];
};
struct TrackerPage* pagelist;
jsuword getTrackerPageBase(const void* v) const;
jsuword getTrackerPageOffset(const void* v) const;
struct TrackerPage* findTrackerPage(const void* v) const;
struct TrackerPage* addTrackerPage(const void* v);
public:
@ -260,7 +280,9 @@ public:
JS_REQUIRES_STACK void markGlobalSlotUndemotable(JSContext* cx, unsigned slot);
JS_REQUIRES_STACK bool isGlobalSlotUndemotable(JSContext* cx, unsigned slot) const;
JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot);
JS_REQUIRES_STACK void markStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc);
JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot) const;
JS_REQUIRES_STACK bool isStackSlotUndemotable(JSContext* cx, unsigned slot, const void* pc) const;
void markInstructionUndemotable(jsbytecode* pc);
bool isInstructionUndemotable(jsbytecode* pc) const;
@ -365,7 +387,19 @@ public:
_(UNSTABLE_LOOP) \
_(TIMEOUT) \
_(DEEP_BAIL) \
_(STATUS)
_(STATUS) \
/* Exit is almost recursive and wants a peer at recursive_pc */ \
_(RECURSIVE_UNLINKED) \
/* Exit is recursive, and there are no more frames */ \
_(RECURSIVE_LOOP) \
/* Exit is recursive, but type-mismatched guarding on a down frame */ \
_(RECURSIVE_MISMATCH) \
/* Exit is recursive, and the JIT wants to try slurping interp frames */ \
_(RECURSIVE_EMPTY_RP) \
/* Slurping interp frames in up-recursion failed */ \
_(RECURSIVE_SLURP_FAIL) \
/* Tried to slurp an interp frame, but the pc or argc mismatched */ \
_(RECURSIVE_SLURP_MISMATCH)
enum ExitType {
#define MAKE_EXIT_CODE(x) x##_EXIT,
@ -374,6 +408,8 @@ enum ExitType {
TOTAL_EXIT_TYPES
};
struct FrameInfo;
struct VMSideExit : public nanojit::SideExit
{
JSObject* block;
@ -387,6 +423,11 @@ struct VMSideExit : public nanojit::SideExit
uint32 numStackSlotsBelowCurrentFrame;
ExitType exitType;
uintN lookupFlags;
void* recursive_pc;
FrameInfo* recursive_down;
unsigned hitcount;
unsigned slurpFailSlot;
JSTraceType slurpType;
/*
* Ordinarily 0. If a slow native function is atop the stack, the 1 bit is
@ -410,6 +451,11 @@ struct VMSideExit : public nanojit::SideExit
return (JSTraceType*)(this + 1);
}
inline JSTraceType& stackType(unsigned i) {
JS_ASSERT(i < numStackSlots);
return stackTypeMap()[i];
}
inline JSTraceType* globalTypeMap() {
return (JSTraceType*)(this + 1) + this->numStackSlots;
}
@ -438,6 +484,45 @@ public:
return mOutOfMemory;
}
struct Mark
{
VMAllocator& vma;
bool committed;
nanojit::Allocator::Chunk* saved_chunk;
char* saved_top;
char* saved_limit;
size_t saved_size;
Mark(VMAllocator& vma) :
vma(vma),
committed(false),
saved_chunk(vma.current_chunk),
saved_top(vma.current_top),
saved_limit(vma.current_limit),
saved_size(vma.mSize)
{}
~Mark()
{
if (!committed)
vma.rewind(*this);
}
void commit() { committed = true; }
};
void rewind(const Mark& m) {
while (current_chunk != m.saved_chunk) {
Chunk *prev = current_chunk->prev;
freeChunk(current_chunk);
current_chunk = prev;
}
current_top = m.saved_top;
current_limit = m.saved_limit;
mSize = m.saved_size;
memset(current_top, 0, current_limit - current_top);
}
bool mOutOfMemory;
size_t mSize;
@ -481,6 +566,8 @@ struct REHashFn {
}
};
class TreeInfo;
struct FrameInfo {
JSObject* block; // caller block chain head
jsbytecode* pc; // caller fp->regs->pc
@ -516,6 +603,7 @@ struct FrameInfo {
// The typemap just before the callee is called.
JSTraceType* get_typemap() { return (JSTraceType*) (this+1); }
const JSTraceType* get_typemap() const { return (JSTraceType*) (this+1); }
};
struct UnstableExit
@ -525,6 +613,21 @@ struct UnstableExit
UnstableExit* next;
};
enum MonitorReason
{
Monitor_Branch,
Monitor_EnterFrame,
Monitor_LeaveFrame
};
enum RecursionStatus
{
Recursion_None, /* No recursion has been compiled yet. */
Recursion_Disallowed, /* This tree cannot be recursive. */
Recursion_Unwinds, /* Tree is up-recursive only. */
Recursion_Detected /* Tree has down recursion and maybe up recursion. */
};
class TreeInfo {
public:
nanojit::Fragment* const fragment;
@ -550,6 +653,7 @@ public:
uintN treeLineNumber;
uintN treePCOffset;
#endif
RecursionStatus recursion;
TreeInfo(nanojit::Allocator* alloc,
nanojit::Fragment* _fragment,
@ -568,7 +672,8 @@ public:
sideExits(alloc),
unstableExits(NULL),
gcthings(alloc),
sprops(alloc)
sprops(alloc),
recursion(Recursion_None)
{}
inline unsigned nGlobalTypes() {
@ -584,7 +689,7 @@ public:
UnstableExit* removeUnstableExit(VMSideExit* exit);
};
#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))
#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || defined(NANOJIT_X64))
# define EXECUTE_TREE_TIMER
#endif
@ -600,6 +705,7 @@ struct InterpState
JSContext *cx; // current VM context handle
double *eos; // first unusable word after the native stack
void *eor; // first unusable word after the call stack
void *sor; // start of rp stack
VMSideExit* lastTreeExitGuard; // guard we exited on during a tree call
VMSideExit* lastTreeCallGuard; // guard we want to grow from if the tree
// call exit guard mismatched
@ -625,7 +731,7 @@ struct InterpState
double* deepBailSp;
// Used when calling natives from trace to root the vp vector. */
// Used when calling natives from trace to root the vp vector.
uintN nativeVpLen;
jsval *nativeVp;
};
@ -653,35 +759,120 @@ js_SetBuiltinError(JSContext *cx)
cx->interpState->builtinStatus |= JSBUILTIN_ERROR;
}
#ifdef DEBUG_JSRS_NOT_BOOL
struct JSRecordingStatus {
#ifdef DEBUG_RECORDING_STATUS_NOT_BOOL
/* #define DEBUG_RECORDING_STATUS_NOT_BOOL to detect misuses of RecordingStatus */
struct RecordingStatus {
int code;
bool operator==(JSRecordingStatus &s) { return this->code == s.code; };
bool operator!=(JSRecordingStatus &s) { return this->code != s.code; };
bool operator==(RecordingStatus &s) { return this->code == s.code; };
bool operator!=(RecordingStatus &s) { return this->code != s.code; };
};
enum JSRScodes {
JSRS_ERROR_code,
JSRS_STOP_code,
JSRS_CONTINUE_code,
JSRS_IMACRO_code
enum RecordingStatusCodes {
RECORD_ERROR_code = 0,
RECORD_STOP_code = 1,
RECORD_CONTINUE_code = 3,
RECORD_IMACRO_code = 4
};
struct JSRecordingStatus JSRS_CONTINUE = { JSRS_CONTINUE_code };
struct JSRecordingStatus JSRS_STOP = { JSRS_STOP_code };
struct JSRecordingStatus JSRS_IMACRO = { JSRS_IMACRO_code };
struct JSRecordingStatus JSRS_ERROR = { JSRS_ERROR_code };
#define STATUS_ABORTS_RECORDING(s) ((s) == JSRS_STOP || (s) == JSRS_ERROR)
RecordingStatus RECORD_CONTINUE = { RECORD_CONTINUE_code };
RecordingStatus RECORD_STOP = { RECORD_STOP_code };
RecordingStatus RECORD_IMACRO = { RECORD_IMACRO_code };
RecordingStatus RECORD_ERROR = { RECORD_ERROR_code };
struct AbortableRecordingStatus {
int code;
bool operator==(AbortableRecordingStatus &s) { return this->code == s.code; };
bool operator!=(AbortableRecordingStatus &s) { return this->code != s.code; };
};
enum AbortableRecordingStatusCodes {
ARECORD_ERROR_code = 0,
ARECORD_STOP_code = 1,
ARECORD_ABORTED_code = 2,
ARECORD_CONTINUE_code = 3,
ARECORD_IMACRO_code = 4
};
AbortableRecordingStatus ARECORD_ERROR = { ARECORD_ERROR_code };
AbortableRecordingStatus ARECORD_STOP = { ARECORD_STOP_code };
AbortableRecordingStatus ARECORD_CONTINUE = { ARECORD_CONTINUE_code };
AbortableRecordingStatus ARECORD_IMACRO = { ARECORD_IMACRO_code };
AbortableRecordingStatus ARECORD_ABORTED = { ARECORD_ABORTED_code };
static inline AbortableRecordingStatus
InjectStatus(RecordingStatus rs)
{
AbortableRecordingStatus ars = { rs.code };
return ars;
}
static inline AbortableRecordingStatus
InjectStatus(AbortableRecordingStatus ars)
{
return ars;
}
static inline bool
StatusAbortsRecording(AbortableRecordingStatus ars)
{
return ars == ARECORD_ERROR || ars == ARECORD_STOP || ars == ARECORD_ABORTED;
}
#else
enum JSRecordingStatus {
JSRS_ERROR, // Error; propagate to interpreter.
JSRS_STOP, // Abort recording.
JSRS_CONTINUE, // Continue recording.
JSRS_IMACRO // Entered imacro; continue recording.
/*
* Normally, during recording, when the recorder cannot continue, it returns
* ARECORD_STOP to indicate that recording should be aborted by the top-level
* recording function. However, if the recorder reenters the interpreter (e.g.,
* when executing an inner loop), there will be an immediate abort. This
* condition must be carefully detected and propagated out of all nested
* recorder calls lest the now-invalid TraceRecorder object be accessed
* accidentally. This condition is indicated by the ARECORD_ABORTED value.
*
* The AbortableRecordingStatus enumeration represents the general set of
* possible results of calling a recorder function. Functions that cannot
* possibly return ARECORD_ABORTED may statically guarantee this to the caller
* using the RecordingStatus enumeration. Ideally, C++ would allow subtyping
* of enumerations, but it doesn't. To simulate subtype conversion manually,
* code should call InjectStatus to inject a value of the restricted set into a
* value of the general set.
*/
enum RecordingStatus {
RECORD_ERROR = 0, // Error; propagate to interpreter.
RECORD_STOP = 1, // Recording should be aborted at the top-level
// call to the recorder.
// (value reserved for ARECORD_ABORTED)
RECORD_CONTINUE = 3, // Continue recording.
RECORD_IMACRO = 4 // Entered imacro; continue recording.
// Only JSOP_IS_IMACOP opcodes may return this.
};
#define STATUS_ABORTS_RECORDING(s) ((s) <= JSRS_STOP)
enum AbortableRecordingStatus {
ARECORD_ERROR = 0,
ARECORD_STOP = 1,
ARECORD_ABORTED = 2, // Recording has already been aborted; the recorder
// has been deleted.
ARECORD_CONTINUE = 3,
ARECORD_IMACRO = 4
};
static JS_ALWAYS_INLINE AbortableRecordingStatus
InjectStatus(RecordingStatus rs)
{
return static_cast<AbortableRecordingStatus>(rs);
}
static JS_ALWAYS_INLINE AbortableRecordingStatus
InjectStatus(AbortableRecordingStatus ars)
{
return ars;
}
static JS_ALWAYS_INLINE bool
StatusAbortsRecording(AbortableRecordingStatus ars)
{
return ars <= ARECORD_ABORTED;
}
#endif
class SlotMap;
class SlurpInfo;
/* Results of trying to compare two typemaps together */
enum TypeConsensus
@ -693,6 +884,7 @@ enum TypeConsensus
class TraceRecorder {
VMAllocator& tempAlloc;
VMAllocator::Mark mark;
JSContext* cx;
JSTraceMonitor* traceMonitor;
JSObject* globalObj;
@ -735,6 +927,7 @@ class TraceRecorder {
uint32 outerArgc; /* outer trace deepest frame argc */
bool loop;
nanojit::LIns* loopLabel;
MonitorReason monitorReason;
nanojit::LIns* insImmObj(JSObject* obj);
nanojit::LIns* insImmFun(JSFunction* fun);
@ -756,6 +949,24 @@ class TraceRecorder {
JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, ExitType exitType);
JS_REQUIRES_STACK void guard(bool expected, nanojit::LIns* cond, VMSideExit* exit);
JS_REQUIRES_STACK nanojit::LIns* slurpInt32Slot(nanojit::LIns* val_ins, jsval* vp,
VMSideExit* exit);
JS_REQUIRES_STACK nanojit::LIns* slurpDoubleSlot(nanojit::LIns* val_ins, jsval* vp,
VMSideExit* exit);
JS_REQUIRES_STACK nanojit::LIns* slurpStringSlot(nanojit::LIns* val_ins, jsval* vp,
VMSideExit* exit);
JS_REQUIRES_STACK nanojit::LIns* slurpObjectSlot(nanojit::LIns* val_ins, jsval* vp,
VMSideExit* exit);
JS_REQUIRES_STACK nanojit::LIns* slurpFunctionSlot(nanojit::LIns* val_ins, jsval* vp,
VMSideExit* exit);
JS_REQUIRES_STACK nanojit::LIns* slurpNullSlot(nanojit::LIns* val_ins, jsval* vp,
VMSideExit* exit);
JS_REQUIRES_STACK nanojit::LIns* slurpBoolSlot(nanojit::LIns* val_ins, jsval* vp,
VMSideExit* exit);
JS_REQUIRES_STACK nanojit::LIns* slurpSlot(nanojit::LIns* val_ins, jsval* vp,
VMSideExit* exit);
JS_REQUIRES_STACK void slurpSlot(nanojit::LIns* val_ins, jsval* vp, SlurpInfo* info);
JS_REQUIRES_STACK AbortableRecordingStatus slurpDownFrames(jsbytecode* return_pc);
nanojit::LIns* addName(nanojit::LIns* ins, const char* name);
@ -770,7 +981,8 @@ class TraceRecorder {
JS_REQUIRES_STACK void checkForGlobalObjectReallocation();
JS_REQUIRES_STACK TypeConsensus selfTypeStability(SlotMap& smap);
JS_REQUIRES_STACK TypeConsensus peerTypeStability(SlotMap& smap, VMFragment** peer);
JS_REQUIRES_STACK TypeConsensus peerTypeStability(SlotMap& smap, const void* ip,
VMFragment** peer);
JS_REQUIRES_STACK jsval& argval(unsigned n) const;
JS_REQUIRES_STACK jsval& varval(unsigned n) const;
@ -789,9 +1001,9 @@ class TraceRecorder {
JS_REQUIRES_STACK nanojit::LIns* scopeChain() const;
JS_REQUIRES_STACK JSStackFrame* frameIfInRange(JSObject* obj, unsigned* depthp = NULL) const;
JS_REQUIRES_STACK JSRecordingStatus traverseScopeChain(JSObject *obj, nanojit::LIns *obj_ins, JSObject *obj2, nanojit::LIns *&obj2_ins);
JS_REQUIRES_STACK JSRecordingStatus scopeChainProp(JSObject* obj, jsval*& vp, nanojit::LIns*& ins, NameResult& nr);
JS_REQUIRES_STACK JSRecordingStatus callProp(JSObject* obj, JSProperty* sprop, jsid id, jsval*& vp, nanojit::LIns*& ins, NameResult& nr);
JS_REQUIRES_STACK RecordingStatus traverseScopeChain(JSObject *obj, nanojit::LIns *obj_ins, JSObject *obj2, nanojit::LIns *&obj2_ins);
JS_REQUIRES_STACK AbortableRecordingStatus scopeChainProp(JSObject* obj, jsval*& vp, nanojit::LIns*& ins, NameResult& nr);
JS_REQUIRES_STACK RecordingStatus callProp(JSObject* obj, JSProperty* sprop, jsid id, jsval*& vp, nanojit::LIns*& ins, NameResult& nr);
JS_REQUIRES_STACK nanojit::LIns* arg(unsigned n);
JS_REQUIRES_STACK void arg(unsigned n, nanojit::LIns* i);
@ -810,45 +1022,53 @@ class TraceRecorder {
JS_REQUIRES_STACK nanojit::LIns* newArguments();
JS_REQUIRES_STACK JSRecordingStatus call_imacro(jsbytecode* imacro);
JS_REQUIRES_STACK RecordingStatus call_imacro(jsbytecode* imacro);
JS_REQUIRES_STACK JSRecordingStatus ifop();
JS_REQUIRES_STACK JSRecordingStatus switchop();
JS_REQUIRES_STACK AbortableRecordingStatus ifop();
JS_REQUIRES_STACK RecordingStatus switchop();
#ifdef NANOJIT_IA32
JS_REQUIRES_STACK JSRecordingStatus tableswitch();
JS_REQUIRES_STACK AbortableRecordingStatus tableswitch();
#endif
JS_REQUIRES_STACK JSRecordingStatus inc(jsval& v, jsint incr, bool pre = true);
JS_REQUIRES_STACK JSRecordingStatus inc(jsval v, nanojit::LIns*& v_ins, jsint incr,
JS_REQUIRES_STACK RecordingStatus inc(jsval& v, jsint incr, bool pre = true);
JS_REQUIRES_STACK RecordingStatus inc(jsval v, nanojit::LIns*& v_ins, jsint incr,
bool pre = true);
JS_REQUIRES_STACK JSRecordingStatus incHelper(jsval v, nanojit::LIns* v_ins,
JS_REQUIRES_STACK RecordingStatus incHelper(jsval v, nanojit::LIns* v_ins,
nanojit::LIns*& v_after, jsint incr);
JS_REQUIRES_STACK JSRecordingStatus incProp(jsint incr, bool pre = true);
JS_REQUIRES_STACK JSRecordingStatus incElem(jsint incr, bool pre = true);
JS_REQUIRES_STACK JSRecordingStatus incName(jsint incr, bool pre = true);
JS_REQUIRES_STACK AbortableRecordingStatus incProp(jsint incr, bool pre = true);
JS_REQUIRES_STACK RecordingStatus incElem(jsint incr, bool pre = true);
JS_REQUIRES_STACK AbortableRecordingStatus incName(jsint incr, bool pre = true);
JS_REQUIRES_STACK void strictEquality(bool equal, bool cmpCase);
JS_REQUIRES_STACK JSRecordingStatus equality(bool negate, bool tryBranchAfterCond);
JS_REQUIRES_STACK JSRecordingStatus equalityHelper(jsval l, jsval r,
JS_REQUIRES_STACK AbortableRecordingStatus equality(bool negate, bool tryBranchAfterCond);
JS_REQUIRES_STACK AbortableRecordingStatus equalityHelper(jsval l, jsval r,
nanojit::LIns* l_ins, nanojit::LIns* r_ins,
bool negate, bool tryBranchAfterCond,
jsval& rval);
JS_REQUIRES_STACK JSRecordingStatus relational(nanojit::LOpcode op, bool tryBranchAfterCond);
JS_REQUIRES_STACK AbortableRecordingStatus relational(nanojit::LOpcode op, bool tryBranchAfterCond);
JS_REQUIRES_STACK JSRecordingStatus unary(nanojit::LOpcode op);
JS_REQUIRES_STACK JSRecordingStatus binary(nanojit::LOpcode op);
JS_REQUIRES_STACK RecordingStatus unary(nanojit::LOpcode op);
JS_REQUIRES_STACK RecordingStatus binary(nanojit::LOpcode op);
JS_REQUIRES_STACK void guardShape(nanojit::LIns* obj_ins, JSObject* obj,
uint32 shape, const char* guardName,
JS_REQUIRES_STACK RecordingStatus guardShape(nanojit::LIns* obj_ins, JSObject* obj,
uint32 shape, const char* name,
nanojit::LIns* map_ins, VMSideExit* exit);
JSDHashTable guardedShapeTable;
#if defined DEBUG_notme && defined XP_UNIX
void dumpGuardedShapes(const char* prefix);
#endif
void forgetGuardedShapes();
inline nanojit::LIns* map(nanojit::LIns *obj_ins);
JS_REQUIRES_STACK bool map_is_native(JSObjectMap* map, nanojit::LIns* map_ins,
nanojit::LIns*& ops_ins, size_t op_offset = 0);
JS_REQUIRES_STACK JSRecordingStatus test_property_cache(JSObject* obj, nanojit::LIns* obj_ins,
JS_REQUIRES_STACK AbortableRecordingStatus test_property_cache(JSObject* obj, nanojit::LIns* obj_ins,
JSObject*& obj2, jsuword& pcval);
JS_REQUIRES_STACK JSRecordingStatus guardNativePropertyOp(JSObject* aobj,
JS_REQUIRES_STACK RecordingStatus guardNativePropertyOp(JSObject* aobj,
nanojit::LIns* map_ins);
JS_REQUIRES_STACK JSRecordingStatus guardPropertyCacheHit(nanojit::LIns* obj_ins,
JS_REQUIRES_STACK RecordingStatus guardPropertyCacheHit(nanojit::LIns* obj_ins,
nanojit::LIns* map_ins,
JSObject* aobj,
JSObject* obj2,
@ -882,44 +1102,44 @@ class TraceRecorder {
nanojit::LIns* getStringLength(nanojit::LIns* str_ins);
JS_REQUIRES_STACK JSRecordingStatus name(jsval*& vp, nanojit::LIns*& ins, NameResult& nr);
JS_REQUIRES_STACK JSRecordingStatus prop(JSObject* obj, nanojit::LIns* obj_ins, uint32 *slotp,
JS_REQUIRES_STACK AbortableRecordingStatus name(jsval*& vp, nanojit::LIns*& ins, NameResult& nr);
JS_REQUIRES_STACK AbortableRecordingStatus prop(JSObject* obj, nanojit::LIns* obj_ins, uint32 *slotp,
nanojit::LIns** v_insp, jsval* outp);
JS_REQUIRES_STACK JSRecordingStatus denseArrayElement(jsval& oval, jsval& idx, jsval*& vp,
JS_REQUIRES_STACK RecordingStatus denseArrayElement(jsval& oval, jsval& idx, jsval*& vp,
nanojit::LIns*& v_ins,
nanojit::LIns*& addr_ins);
JS_REQUIRES_STACK JSRecordingStatus getProp(JSObject* obj, nanojit::LIns* obj_ins);
JS_REQUIRES_STACK JSRecordingStatus getProp(jsval& v);
JS_REQUIRES_STACK JSRecordingStatus getThis(nanojit::LIns*& this_ins);
JS_REQUIRES_STACK AbortableRecordingStatus getProp(JSObject* obj, nanojit::LIns* obj_ins);
JS_REQUIRES_STACK AbortableRecordingStatus getProp(jsval& v);
JS_REQUIRES_STACK RecordingStatus getThis(nanojit::LIns*& this_ins);
JS_REQUIRES_STACK VMSideExit* enterDeepBailCall();
JS_REQUIRES_STACK void leaveDeepBailCall();
JS_REQUIRES_STACK JSRecordingStatus primitiveToStringInPlace(jsval* vp);
JS_REQUIRES_STACK RecordingStatus primitiveToStringInPlace(jsval* vp);
JS_REQUIRES_STACK void finishGetProp(nanojit::LIns* obj_ins, nanojit::LIns* vp_ins,
nanojit::LIns* ok_ins, jsval* outp);
JS_REQUIRES_STACK JSRecordingStatus getPropertyByName(nanojit::LIns* obj_ins, jsval* idvalp,
JS_REQUIRES_STACK RecordingStatus getPropertyByName(nanojit::LIns* obj_ins, jsval* idvalp,
jsval* outp);
JS_REQUIRES_STACK JSRecordingStatus getPropertyByIndex(nanojit::LIns* obj_ins,
JS_REQUIRES_STACK RecordingStatus getPropertyByIndex(nanojit::LIns* obj_ins,
nanojit::LIns* index_ins, jsval* outp);
JS_REQUIRES_STACK JSRecordingStatus getPropertyById(nanojit::LIns* obj_ins, jsval* outp);
JS_REQUIRES_STACK JSRecordingStatus getPropertyWithNativeGetter(nanojit::LIns* obj_ins,
JS_REQUIRES_STACK RecordingStatus getPropertyById(nanojit::LIns* obj_ins, jsval* outp);
JS_REQUIRES_STACK RecordingStatus getPropertyWithNativeGetter(nanojit::LIns* obj_ins,
JSScopeProperty* sprop,
jsval* outp);
JS_REQUIRES_STACK JSRecordingStatus nativeSet(JSObject* obj, nanojit::LIns* obj_ins,
JS_REQUIRES_STACK RecordingStatus nativeSet(JSObject* obj, nanojit::LIns* obj_ins,
JSScopeProperty* sprop,
jsval v, nanojit::LIns* v_ins);
JS_REQUIRES_STACK JSRecordingStatus setProp(jsval &l, JSPropCacheEntry* entry,
JS_REQUIRES_STACK RecordingStatus setProp(jsval &l, JSPropCacheEntry* entry,
JSScopeProperty* sprop,
jsval &v, nanojit::LIns*& v_ins);
JS_REQUIRES_STACK JSRecordingStatus setCallProp(JSObject *callobj, nanojit::LIns *callobj_ins,
JS_REQUIRES_STACK RecordingStatus setCallProp(JSObject *callobj, nanojit::LIns *callobj_ins,
JSScopeProperty *sprop, nanojit::LIns *v_ins,
jsval v);
JS_REQUIRES_STACK JSRecordingStatus initOrSetPropertyByName(nanojit::LIns* obj_ins,
JS_REQUIRES_STACK RecordingStatus initOrSetPropertyByName(nanojit::LIns* obj_ins,
jsval* idvalp, jsval* rvalp,
bool init);
JS_REQUIRES_STACK JSRecordingStatus initOrSetPropertyByIndex(nanojit::LIns* obj_ins,
JS_REQUIRES_STACK RecordingStatus initOrSetPropertyByIndex(nanojit::LIns* obj_ins,
nanojit::LIns* index_ins,
jsval* rvalp, bool init);
@ -934,44 +1154,44 @@ class TraceRecorder {
JS_REQUIRES_STACK bool guardHasPrototype(JSObject* obj, nanojit::LIns* obj_ins,
JSObject** pobj, nanojit::LIns** pobj_ins,
VMSideExit* exit);
JS_REQUIRES_STACK JSRecordingStatus guardPrototypeHasNoIndexedProperties(JSObject* obj,
JS_REQUIRES_STACK RecordingStatus guardPrototypeHasNoIndexedProperties(JSObject* obj,
nanojit::LIns* obj_ins,
ExitType exitType);
JS_REQUIRES_STACK JSRecordingStatus guardNotGlobalObject(JSObject* obj,
JS_REQUIRES_STACK RecordingStatus guardNotGlobalObject(JSObject* obj,
nanojit::LIns* obj_ins);
void clearFrameSlotsFromCache();
JS_REQUIRES_STACK void putArguments();
JS_REQUIRES_STACK JSRecordingStatus guardCallee(jsval& callee);
JS_REQUIRES_STACK RecordingStatus guardCallee(jsval& callee);
JS_REQUIRES_STACK JSStackFrame *guardArguments(JSObject *obj, nanojit::LIns* obj_ins,
unsigned *depthp);
JS_REQUIRES_STACK JSRecordingStatus getClassPrototype(JSObject* ctor,
JS_REQUIRES_STACK RecordingStatus getClassPrototype(JSObject* ctor,
nanojit::LIns*& proto_ins);
JS_REQUIRES_STACK JSRecordingStatus getClassPrototype(JSProtoKey key,
JS_REQUIRES_STACK RecordingStatus getClassPrototype(JSProtoKey key,
nanojit::LIns*& proto_ins);
JS_REQUIRES_STACK JSRecordingStatus newArray(JSObject* ctor, uint32 argc, jsval* argv,
JS_REQUIRES_STACK RecordingStatus newArray(JSObject* ctor, uint32 argc, jsval* argv,
jsval* rval);
JS_REQUIRES_STACK JSRecordingStatus newString(JSObject* ctor, uint32 argc, jsval* argv,
JS_REQUIRES_STACK RecordingStatus newString(JSObject* ctor, uint32 argc, jsval* argv,
jsval* rval);
JS_REQUIRES_STACK JSRecordingStatus interpretedFunctionCall(jsval& fval, JSFunction* fun,
JS_REQUIRES_STACK RecordingStatus interpretedFunctionCall(jsval& fval, JSFunction* fun,
uintN argc, bool constructing);
JS_REQUIRES_STACK void propagateFailureToBuiltinStatus(nanojit::LIns *ok_ins,
nanojit::LIns *&status_ins);
JS_REQUIRES_STACK JSRecordingStatus emitNativeCall(JSSpecializedNative* sn, uintN argc,
JS_REQUIRES_STACK RecordingStatus emitNativeCall(JSSpecializedNative* sn, uintN argc,
nanojit::LIns* args[], bool rooted);
JS_REQUIRES_STACK void emitNativePropertyOp(JSScope* scope,
JSScopeProperty* sprop,
nanojit::LIns* obj_ins,
bool setflag,
nanojit::LIns* boxed_ins);
JS_REQUIRES_STACK JSRecordingStatus callSpecializedNative(JSNativeTraceInfo* trcinfo, uintN argc,
JS_REQUIRES_STACK RecordingStatus callSpecializedNative(JSNativeTraceInfo* trcinfo, uintN argc,
bool constructing);
JS_REQUIRES_STACK JSRecordingStatus callNative(uintN argc, JSOp mode);
JS_REQUIRES_STACK JSRecordingStatus functionCall(uintN argc, JSOp mode);
JS_REQUIRES_STACK RecordingStatus callNative(uintN argc, JSOp mode);
JS_REQUIRES_STACK RecordingStatus functionCall(uintN argc, JSOp mode);
JS_REQUIRES_STACK void trackCfgMerges(jsbytecode* pc);
JS_REQUIRES_STACK void emitIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
JS_REQUIRES_STACK void fuseIf(jsbytecode* pc, bool cond, nanojit::LIns* x);
JS_REQUIRES_STACK JSRecordingStatus checkTraceEnd(jsbytecode* pc);
JS_REQUIRES_STACK AbortableRecordingStatus checkTraceEnd(jsbytecode* pc);
bool hasMethod(JSObject* obj, jsid id);
JS_REQUIRES_STACK bool hasIteratorMethod(JSObject* obj);
@ -996,12 +1216,12 @@ public:
TraceRecorder(JSContext* cx, VMSideExit*, nanojit::Fragment*, TreeInfo*,
unsigned stackSlots, unsigned ngslots, JSTraceType* typeMap,
VMSideExit* expectedInnerExit, jsbytecode* outerTree,
uint32 outerArgc);
uint32 outerArgc, MonitorReason monitorReason);
~TraceRecorder();
bool outOfMemory();
static JS_REQUIRES_STACK JSRecordingStatus monitorRecording(JSContext* cx, TraceRecorder* tr,
static JS_REQUIRES_STACK AbortableRecordingStatus monitorRecording(JSContext* cx, TraceRecorder* tr,
JSOp op);
JS_REQUIRES_STACK JSTraceType determineSlotType(jsval* vp);
@ -1028,26 +1248,35 @@ public:
nanojit::Fragment* getFragment() const { return fragment; }
TreeInfo* getTreeInfo() const { return treeInfo; }
JS_REQUIRES_STACK bool compile(JSTraceMonitor* tm);
JS_REQUIRES_STACK bool closeLoop(TypeConsensus &consensus);
JS_REQUIRES_STACK bool closeLoop(SlotMap& slotMap, VMSideExit* exit, TypeConsensus &consensus);
JS_REQUIRES_STACK void endLoop();
JS_REQUIRES_STACK void endLoop(VMSideExit* exit);
JS_REQUIRES_STACK AbortableRecordingStatus compile(JSTraceMonitor* tm);
JS_REQUIRES_STACK AbortableRecordingStatus closeLoop();
JS_REQUIRES_STACK AbortableRecordingStatus closeLoop(VMSideExit* exit);
JS_REQUIRES_STACK AbortableRecordingStatus closeLoop(SlotMap& slotMap, VMSideExit* exit);
JS_REQUIRES_STACK AbortableRecordingStatus endLoop();
JS_REQUIRES_STACK AbortableRecordingStatus endLoop(VMSideExit* exit);
JS_REQUIRES_STACK void joinEdgesToEntry(VMFragment* peer_root);
JS_REQUIRES_STACK void adjustCallerTypes(nanojit::Fragment* f);
JS_REQUIRES_STACK VMFragment* findNestedCompatiblePeer(VMFragment* f);
JS_REQUIRES_STACK void prepareTreeCall(VMFragment* inner);
JS_REQUIRES_STACK void emitTreeCall(VMFragment* inner, VMSideExit* exit);
JS_REQUIRES_STACK VMFragment* findNestedCompatiblePeer(VMFragment* f);
JS_REQUIRES_STACK AbortableRecordingStatus attemptTreeCall(VMFragment* inner,
uintN& inlineCallCount);
unsigned getCallDepth() const;
JS_REQUIRES_STACK JSRecordingStatus record_EnterFrame();
JS_REQUIRES_STACK JSRecordingStatus record_LeaveFrame();
JS_REQUIRES_STACK JSRecordingStatus record_SetPropHit(JSPropCacheEntry* entry,
JSScopeProperty* sprop);
JS_REQUIRES_STACK JSRecordingStatus record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
JS_REQUIRES_STACK JSRecordingStatus record_NativeCallComplete();
JS_REQUIRES_STACK void determineGlobalTypes(JSTraceType* typeMap);
nanojit::LIns* demoteIns(nanojit::LIns* ins);
TreeInfo* getTreeInfo() { return treeInfo; }
JS_REQUIRES_STACK VMSideExit* downSnapshot(FrameInfo* downFrame);
JS_REQUIRES_STACK AbortableRecordingStatus upRecursion();
JS_REQUIRES_STACK AbortableRecordingStatus downRecursion();
JS_REQUIRES_STACK AbortableRecordingStatus record_EnterFrame(uintN& inlineCallCount);
JS_REQUIRES_STACK AbortableRecordingStatus record_LeaveFrame();
JS_REQUIRES_STACK AbortableRecordingStatus record_SetPropHit(JSPropCacheEntry* entry,
JSScopeProperty* sprop);
JS_REQUIRES_STACK AbortableRecordingStatus record_DefLocalFunSetSlot(uint32 slot, JSObject* obj);
JS_REQUIRES_STACK AbortableRecordingStatus record_NativeCallComplete();
void forgetGuardedShapesForObject(JSObject* obj);
#ifdef DEBUG
JS_REQUIRES_STACK void tprint(const char *format, int count, nanojit::LIns *insa[]);
@ -1070,7 +1299,7 @@ public:
#endif
#define OPDEF(op,val,name,token,length,nuses,ndefs,prec,format) \
JS_REQUIRES_STACK JSRecordingStatus record_##op();
JS_REQUIRES_STACK AbortableRecordingStatus record_##op();
# include "jsopcode.tbl"
#undef OPDEF
@ -1082,6 +1311,7 @@ public:
friend class TypeCompatibilityVisitor;
friend class SlotMap;
friend class DefaultSlotMap;
friend class RecursiveSlotMap;
friend jsval *js_ConcatPostImacroStackCleanup(uint32 argc, JSFrameRegs &regs,
TraceRecorder *recorder);
};
@ -1097,14 +1327,14 @@ public:
#define TRACE_ARGS_(x,args) \
JS_BEGIN_MACRO \
if (TraceRecorder* tr_ = TRACE_RECORDER(cx)) { \
JSRecordingStatus status = tr_->record_##x args; \
if (STATUS_ABORTS_RECORDING(status)) { \
AbortableRecordingStatus status = tr_->record_##x args; \
if (StatusAbortsRecording(status)) { \
if (TRACE_RECORDER(cx)) \
js_AbortRecording(cx, #x); \
if (status == JSRS_ERROR) \
if (status == ARECORD_ERROR) \
goto error; \
} \
JS_ASSERT(status != JSRS_IMACRO); \
JS_ASSERT(status != ARECORD_IMACRO); \
} \
JS_END_MACRO
@ -1114,7 +1344,7 @@ public:
#define TRACE_2(x,a,b) TRACE_ARGS(x, (a, b))
extern JS_REQUIRES_STACK bool
js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount);
js_MonitorLoopEdge(JSContext* cx, uintN& inlineCallCount, MonitorReason reason);
#ifdef DEBUG
# define js_AbortRecording(cx, reason) js_AbortRecordingImpl(cx, reason)

Просмотреть файл

@ -204,4 +204,97 @@ static JS_INLINE void js_free(void* p) {
JS_END_EXTERN_C
#ifdef __cplusplus
/**
* The following classes are designed to cause assertions to detect
* inadvertent use of guard objects as temporaries. In other words,
* when we have a guard object whose only purpose is its constructor and
* destructor (and is never otherwise referenced), the intended use
* might be:
* JSAutoTempValueRooter tvr(cx, 1, &val);
* but is is easy to accidentally write:
* JSAutoTempValueRooter(cx, 1, &val);
* which compiles just fine, but runs the destructor well before the
* intended time.
*
* They work by adding (#ifdef DEBUG) an additional parameter to the
* guard object's constructor, with a default value, so that users of
* the guard object's API do not need to do anything. The default value
* of this parameter is a temporary object. C++ (ISO/IEC 14882:1998),
* section 12.2 [class.temporary], clauses 4 and 5 seem to assume a
* guarantee that temporaries are destroyed in the reverse of their
* construction order, but I actually can't find a statement that that
* is true in the general case (beyond the two specific cases mentioned
* there). However, it seems to be true.
*
* These classes are intended to be used only via the macros immediately
* below them:
* JS_DECL_USE_GUARD_OBJECT_NOTIFIER declares (ifdef DEBUG) a member
* variable, and should be put where a declaration of a private
* member variable would be placed.
* JS_GUARD_OBJECT_NOTIFIER_PARAM should be placed at the end of the
* parameters to each constructor of the guard object; it declares
* (ifdef DEBUG) an additional parameter.
* JS_GUARD_OBJECT_NOTIFIER_INIT is a statement that belongs in each
* constructor. It uses the parameter declared by
* JS_GUARD_OBJECT_NOTIFIER_PARAM.
*/
#ifdef DEBUG
class JSGuardObjectNotifier
{
private:
bool* mStatementDone;
public:
JSGuardObjectNotifier() : mStatementDone(NULL) {}
~JSGuardObjectNotifier() {
*mStatementDone = true;
}
void SetStatementDone(bool *aStatementDone) {
mStatementDone = aStatementDone;
}
};
class JSGuardObjectNotificationReceiver
{
private:
bool mStatementDone;
public:
JSGuardObjectNotificationReceiver() : mStatementDone(false) {}
~JSGuardObjectNotificationReceiver() {
// Assert that the guard object was not used as a temporary.
// (Note that this assert might also fire if Init is not called
// because the guard object's implementation is not using the
// above macros correctly.)
JS_ASSERT(mStatementDone);
}
void Init(const JSGuardObjectNotifier &aNotifier) {
// aNotifier is passed as a const reference so that we can pass a
// temporary, but we really intend it as non-const
const_cast<JSGuardObjectNotifier&>(aNotifier).
SetStatementDone(&mStatementDone);
}
};
#define JS_DECL_USE_GUARD_OBJECT_NOTIFIER \
JSGuardObjectNotificationReceiver _mCheckNotUsedAsTemporary;
#define JS_GUARD_OBJECT_NOTIFIER_PARAM \
, const JSGuardObjectNotifier& _notifier = JSGuardObjectNotifier()
#define JS_GUARD_OBJECT_NOTIFIER_INIT \
JS_BEGIN_MACRO _mCheckNotUsedAsTemporary.Init(_notifier); JS_END_MACRO
#else /* defined(DEBUG) */
#define JS_DECL_USE_GUARD_OBJECT_NOTIFIER
#define JS_GUARD_OBJECT_NOTIFIER_PARAM
#define JS_GUARD_OBJECT_NOTIFIER_INIT JS_BEGIN_MACRO JS_END_MACRO
#endif /* !defined(DEBUG) */
#endif /* defined(__cplusplus) */
#endif /* jsutil_h___ */

Просмотреть файл

@ -204,7 +204,7 @@ JS_XDRFindClassById(JSXDRState *xdr, uint32 id);
* before deserialization of bytecode. If the saved version does not match
* the current version, abort deserialization and invalidate the file.
*/
#define JSXDR_BYTECODE_VERSION (0xb973c0de - 54)
#define JSXDR_BYTECODE_VERSION (0xb973c0de - 55)
/*
* Library-private functions.

Просмотреть файл

@ -3385,9 +3385,9 @@ Descendants(JSContext *cx, JSXML *xml, jsval id)
/*
* Protect nameqn's object and strings from GC by linking list to it
* temporarily. The cx->newborn[GCX_OBJECT] GC root protects listobj,
* which protects list. Any other object allocations occuring beneath
* DescendantsHelper use local roots.
* temporarily. The newborn GC root for the last allocated object
* protects listobj, which protects list. Any other object allocations
* occurring beneath DescendantsHelper use local roots.
*/
list->name = nameqn;
if (!js_EnterLocalRootScope(cx))
@ -7182,9 +7182,7 @@ uint32 xml_serial;
JSXML *
js_NewXML(JSContext *cx, JSXMLClass xml_class)
{
JSXML *xml;
xml = (JSXML *) js_NewGCXML(cx, GCX_XML);
JSXML *xml = js_NewGCXML(cx);
if (!xml)
return NULL;

Просмотреть файл

@ -122,7 +122,7 @@ struct JSXML {
} u;
};
JS_STATIC_ASSERT(JS_ROUNDUP(sizeof(JSXML), sizeof(JSGCThing)) == sizeof(JSXML));
JS_STATIC_ASSERT(sizeof(JSXML) % JSVAL_ALIGN == 0);
/* union member shorthands */
#define xml_kids u.list.kids

Просмотреть файл

@ -460,7 +460,9 @@ FragmentAssembler::sProfId = 0;
FragmentAssembler::FragmentAssembler(Lirasm &parent, const string &fragmentName)
: mParent(parent), mFragName(fragmentName)
{
mFragment = new Fragment(NULL verbose_only(, sProfId++));
mFragment = new Fragment(NULL verbose_only(, (mParent.mLogc.lcbits &
nanojit::LC_FragProfile) ?
sProfId++ : 0));
mFragment->lirbuf = mParent.mLirbuf;
mFragment->root = mFragment;
mParent.mFragments[mFragName].fragptr = mFragment;
@ -483,6 +485,8 @@ FragmentAssembler::FragmentAssembler(Lirasm &parent, const string &fragmentName)
mReturnTypeBits = 0;
mLir->ins0(LIR_start);
for (int i = 0; i < nanojit::NumSavedRegs; ++i)
mLir->insParam(i, 1);
mLineno = 0;
}
@ -732,8 +736,8 @@ FragmentAssembler::endFragment()
mFragment->lastIns =
mLir->insGuard(LIR_x, NULL, createGuardRecord(createSideExit()));
::compile(&mParent.mAssm, mFragment, mParent.mAlloc
verbose_only(, mParent.mLabelMap));
::compile(&mParent.mAssm, mFragment
verbose_only(, mParent.mAlloc, mParent.mLabelMap));
if (mParent.mAssm.error() != nanojit::None) {
cerr << "error during assembly: ";

Просмотреть файл

@ -59,7 +59,7 @@ namespace nanojit
Chunk *c = current_chunk;
while (c) {
Chunk *prev = c->prev;
this->freeChunk(c);
freeChunk(c);
c = prev;
}
current_chunk = NULL;

Просмотреть файл

@ -53,7 +53,7 @@ namespace nanojit
class Allocator {
public:
Allocator();
virtual ~Allocator();
~Allocator();
void reset();
/** alloc memory, never return null. */
@ -67,7 +67,7 @@ namespace nanojit
return allocSlow(nbytes);
}
private:
protected:
void* allocSlow(size_t nbytes);
void fill(size_t minbytes);
@ -82,7 +82,7 @@ namespace nanojit
char* current_limit;
// allocator SPI
private:
/** allocate another block from a host provided allocator */
void* allocChunk(size_t nbytes);

Просмотреть файл

@ -41,6 +41,10 @@
#ifdef FEATURE_NANOJIT
#ifdef VTUNE
#include "../core/CodegenLIR.h"
#endif
namespace nanojit
{
#ifdef NJ_VERBOSE
@ -105,6 +109,12 @@ namespace nanojit
, _labels(alloc)
, _epilogue(NULL)
, _err(None)
#if PEDANTIC
, pedanticTop(NULL)
#endif
#ifdef VTUNE
, cgen(NULL)
#endif
, config(core->config)
{
VMPI_memset(&_stats, 0, sizeof(_stats));
@ -193,6 +203,14 @@ namespace nanojit
verbose_only( nBytes += (end - start) * sizeof(NIns); )
NanoAssert(uintptr_t(end) - uintptr_t(start) >= (size_t)LARGEST_UNDERRUN_PROT);
eip = end;
#ifdef VTUNE
if (_nIns && _nExitIns) {
//cgen->jitAddRecord((uintptr_t)list->code, 0, 0, true); // add placeholder record for top of page
cgen->jitCodePosUpdate((uintptr_t)list->code);
cgen->jitPushInfo(); // new page requires new entry
}
#endif
}
void Assembler::reset()
@ -337,10 +355,14 @@ namespace nanojit
Register Assembler::getBaseReg(LIns *i, int &d, RegisterMask allow)
{
#if !PEDANTIC
if (i->isop(LIR_alloc)) {
d += findMemFor(i);
return FP;
}
#else
(void) d;
#endif
return findRegFor(i, allow);
}
@ -1347,7 +1369,29 @@ namespace nanojit
evictScratchRegs();
asm_call(ins);
break;
}
#ifdef VTUNE
case LIR_file:
{
// we traverse backwards so we are now hitting the file
// that is associated with a bunch of LIR_lines we already have seen
uintptr_t currentFile = ins->oprnd1()->imm32();
cgen->jitFilenameUpdate(currentFile);
break;
}
case LIR_line:
{
// add a new table entry, we don't yet knwo which file it belongs
// to so we need to add it to the update table too
// note the alloc, actual act is delayed; see above
uint32_t currentLine = (uint32_t) ins->oprnd1()->imm32();
cgen->jitLineNumUpdate(currentLine);
cgen->jitAddRecord((uintptr_t)_nIns, 0, currentLine, true);
break;
}
#endif // VTUNE
}
#ifdef NJ_VERBOSE
@ -1394,6 +1438,10 @@ namespace nanojit
if (error())
return;
#ifdef VTUNE
cgen->jitCodePosUpdate((uintptr_t)_nIns);
#endif
// check that all is well (don't check in exit paths since its more complicated)
debug_only( pageValidate(); )
debug_only( resourceConsistencyCheck(); )

Просмотреть файл

@ -107,6 +107,10 @@ namespace nanojit
typedef SeqBuilder<NIns*> NInsList;
typedef HashMap<NIns*, LIns*> NInsMap;
#ifdef VTUNE
class avmplus::CodegenLIR;
#endif
class LabelState
{
public:
@ -166,10 +170,13 @@ namespace nanojit
LogControl* _logc;
size_t codeBytes;
size_t exitBytes;
#endif // NJ_VERBOSE
#ifdef VTUNE
avmplus::CodegenLIR *cgen;
#endif
Assembler(CodeAlloc& codeAlloc, Allocator& alloc, AvmCore* core, LogControl* logc);
~Assembler() {}
void endAssembly(Fragment* frag);
void assemble(Fragment* frag);
@ -253,6 +260,9 @@ namespace nanojit
NIns* _nExitIns; // current instruction in exit fragment page
NIns* _epilogue;
AssmError _err; // 0 = means assemble() appears ok, otherwise it failed
#if PEDANTIC
NIns* pedanticTop;
#endif
AR _activation;
RegAlloc _allocator;

Просмотреть файл

@ -1012,6 +1012,13 @@ namespace nanojit
class LirWriter
{
LInsp insDisp(LInsp base, int32_t& d) {
if (!isValidDisplacement(d)) {
base = ins2i(LIR_piadd, base, d);
d = 0;
}
return base;
}
public:
LirWriter *out;
@ -1052,9 +1059,11 @@ namespace nanojit
return out->insImmf(d);
}
virtual LInsp insLoad(LOpcode op, LIns* base, int32_t d) {
base = insDisp(base, d);
return out->insLoad(op, base, d);
}
virtual LInsp insStorei(LIns* value, LIns* base, int32_t d) {
base = insDisp(base, d);
return out->insStorei(value, base, d);
}
virtual LInsp insCall(const CallInfo *call, LInsp args[]) {
@ -1067,6 +1076,13 @@ namespace nanojit
virtual LInsp insSkip(size_t size) {
return out->insSkip(size);
}
void insAssert(LIns* expr) {
#if defined DEBUG
LIns* branch = insBranch(LIR_jt, expr, NULL);
ins0(LIR_dbreak);
branch->setTarget(ins0(LIR_label));
#endif
}
// convenience functions
@ -1101,6 +1117,7 @@ namespace nanojit
class Entry
{
public:
Entry(int) : name(0), size(0), align(0) {}
Entry(char *n, size_t s, size_t a) : name(n),size(s),align(a) {}
char* name;
size_t size:29, align:3;
@ -1139,6 +1156,7 @@ namespace nanojit
class Entry
{
public:
Entry(int) : name(0) {}
Entry(char* n) : name(n) {}
char* name;
};
@ -1344,6 +1362,9 @@ namespace nanojit
LInsp state,param1,sp,rp;
LInsp savedRegs[NumSavedRegs];
protected:
friend class LirBufWriter;
/** each chunk is just a raw area of LIns instances, with no header
and no more than 8-byte alignment. The chunk size is somewhat arbitrary
as long as it's well larger than 2*sizeof(LInsSk) */
@ -1359,9 +1380,6 @@ namespace nanojit
* itself. */
static const size_t MAX_SKIP_PAYLOAD_SZB = MAX_LINS_SZB - sizeof(LInsSk);
protected:
friend class LirBufWriter;
/** get CHUNK_SZB more memory for LIR instructions */
void chunkAlloc();
void moveToNewChunk(uintptr_t addrOfLastLInsOnCurrentChunk);

Просмотреть файл

@ -74,7 +74,7 @@
OPDEF(start, 0, 0, Op0) // start of a fragment
OPDEF(regfence, 1, 0, Op0) // register fence, no register allocation is allowed across this meta instruction
OPDEF(skip, 2, 1, Sk) // holds blobs ("payloads") of data; also links pages
OPDEF(unused3, 3,-1, None)
OPDEF(dbreak, 3, 0, Op0)
OPDEF(unused4, 4,-1, None)
OPDEF(unused5, 5,-1, None)
OPDEF(unused6, 6,-1, None)

Просмотреть файл

@ -161,6 +161,13 @@ static const RegisterMask FpRegs = 1<<D0 | 1<<D1 | 1<<D2 | 1<<D3 | 1<<D4 | 1<<D5
static const RegisterMask GpRegs = 0xFFFF;
static const RegisterMask AllowableFlagRegs = 1<<R0 | 1<<R1 | 1<<R2 | 1<<R3 | 1<<R4 | 1<<R5 | 1<<R6 | 1<<R7 | 1<<R8 | 1<<R9 | 1<<R10;
#define isS12(offs) ((-(1<<12)) <= (offs) && (offs) < (1<<12))
#define isU12(offs) (((offs) & 0xfff) == (offs))
static inline bool isValidDisplacement(int32_t d) {
return isS12(d);
}
#define IsFpReg(_r) ((rmask((Register)_r) & (FpRegs)) != 0)
#define IsGpReg(_r) ((rmask((Register)_r) & (GpRegs)) != 0)
#define FpRegNum(_fpr) ((_fpr) - FirstFloatReg)
@ -307,13 +314,14 @@ enum {
NanoAssert(IsGpReg(rd) && IsGpReg(rl));\
NanoAssert(isOp2Imm(op2imm));\
*(--_nIns) = (NIns) ((cond)<<28 | OP_IMM | (ARM_##op)<<21 | (S)<<20 | (rl)<<16 | (rd)<<12 | (op2imm));\
if (ARM_##op == ARM_mov || ARM_##op == ARM_mvn)\
if (ARM_##op == ARM_mov || ARM_##op == ARM_mvn) { \
asm_output("%s%s%s %s, #0x%X", #op, condNames[cond], (S)?"s":"", gpn(rd), decOp2Imm(op2imm));\
else if (ARM_##op >= ARM_tst && ARM_##op <= ARM_cmn) {\
} else if (ARM_##op >= ARM_tst && ARM_##op <= ARM_cmn) { \
NanoAssert(S==1);\
asm_output("%s%s %s, #0x%X", #op, condNames[cond], gpn(rl), decOp2Imm(op2imm));\
} else\
} else { \
asm_output("%s%s%s %s, %s, #0x%X", #op, condNames[cond], (S)?"s":"", gpn(rd), gpn(rl), decOp2Imm(op2imm));\
}\
} while (0)
// ALU operation with two register arguments
@ -329,13 +337,14 @@ enum {
NanoAssert(((S)==0) || ((S)==1));\
NanoAssert(IsGpReg(rd) && IsGpReg(rl) && IsGpReg(rr));\
*(--_nIns) = (NIns) ((cond)<<28 |(ARM_##op)<<21 | (S)<<20 | (rl)<<16 | (rd)<<12 | (rr));\
if (ARM_##op == ARM_mov || ARM_##op == ARM_mvn)\
if (ARM_##op == ARM_mov || ARM_##op == ARM_mvn) { \
asm_output("%s%s%s %s, %s", #op, condNames[cond], (S)?"s":"", gpn(rd), gpn(rr));\
else if (ARM_##op >= ARM_tst && ARM_##op <= ARM_cmn) {\
} else if (ARM_##op >= ARM_tst && ARM_##op <= ARM_cmn) { \
NanoAssert(S==1);\
asm_output("%s%s %s, %s", #op, condNames[cond], gpn(rl), gpn(rr));\
} else\
} else { \
asm_output("%s%s%s %s, %s, %s", #op, condNames[cond], (S)?"s":"", gpn(rd), gpn(rl), gpn(rr));\
}\
} while (0)
// ALU operation with two register arguments, with rr operated on by a shift and shift immediate
@ -354,13 +363,14 @@ enum {
NanoAssert(IsShift(sh));\
NanoAssert((imm)>=0 && (imm)<32);\
*(--_nIns) = (NIns) ((cond)<<28 |(ARM_##op)<<21 | (S)<<20 | (rl)<<16 | (rd)<<12 | (imm)<<7 | (sh)<<4 | (rr));\
if (ARM_##op == ARM_mov || ARM_##op == ARM_mvn)\
if (ARM_##op == ARM_mov || ARM_##op == ARM_mvn) { \
asm_output("%s%s%s %s, %s, %s #%d", #op, condNames[cond], (S)?"s":"", gpn(rd), gpn(rr), shiftNames[sh], (imm));\
else if (ARM_##op >= ARM_tst && ARM_##op <= ARM_cmn) {\
} else if (ARM_##op >= ARM_tst && ARM_##op <= ARM_cmn) { \
NanoAssert(S==1);\
asm_output("%s%s %s, %s, %s #%d", #op, condNames[cond], gpn(rl), gpn(rr), shiftNames[sh], (imm));\
} else\
} else { \
asm_output("%s%s%s %s, %s, %s, %s #%d", #op, condNames[cond], (S)?"s":"", gpn(rd), gpn(rl), gpn(rr), shiftNames[sh], (imm));\
}\
} while (0)
// ALU operation with two register arguments, with rr operated on by a shift and shift register
@ -378,13 +388,14 @@ enum {
NanoAssert(IsGpReg(rd) && IsGpReg(rl) && IsGpReg(rr) && IsGpReg(rs));\
NanoAssert(IsShift(sh));\
*(--_nIns) = (NIns) ((cond)<<28 |(ARM_##op)<<21 | (S)<<20 | (rl)<<16 | (rd)<<12 | (rs)<<8 | (sh)<<4 | (rr));\
if (ARM_##op == ARM_mov || ARM_##op == ARM_mvn)\
if (ARM_##op == ARM_mov || ARM_##op == ARM_mvn) { \
asm_output("%s%s%s %s, %s, %s %s", #op, condNames[cond], (S)?"s":"", gpn(rd), gpn(rr), shiftNames[sh], gpn(rs));\
else if (ARM_##op >= ARM_tst && ARM_##op <= ARM_cmn) {\
} else if (ARM_##op >= ARM_tst && ARM_##op <= ARM_cmn) { \
NanoAssert(S==1);\
asm_output("%s%s %s, %s, %s %s", #op, condNames[cond], gpn(rl), gpn(rr), shiftNames[sh], gpn(rs));\
} else\
} else { \
asm_output("%s%s%s %s, %s, %s, %s %s", #op, condNames[cond], (S)?"s":"", gpn(rd), gpn(rl), gpn(rr), shiftNames[sh], gpn(rs));\
}\
} while (0)
// --------
@ -647,8 +658,6 @@ enum {
// PC always points to current instruction + 8, so when calculating pc-relative
// offsets, use PC+8.
#define PC_OFFSET_FROM(target,frompc) ((intptr_t)(target) - ((intptr_t)(frompc) + 8))
#define isS12(offs) ((-(1<<12)) <= (offs) && (offs) < (1<<12))
#define isU12(offs) (((offs) & 0xfff) == (offs))
#define B_cond(_c,_t) \
B_cond_chk(_c,_t,1)

Просмотреть файл

@ -254,6 +254,9 @@ namespace nanojit
static const int NumSavedRegs = 18; // R13-R30
#endif
static inline bool isValidDisplacement(int32_t d) {
return true;
}
static inline bool IsFpReg(Register r) {
return r >= F0;
}

Просмотреть файл

@ -181,6 +181,10 @@ namespace nanojit
1<<F22;
static const RegisterMask AllowableFlagRegs = GpRegs;
static inline bool isValidDisplacement(int32_t d) {
return true;
}
#define nextreg(r) Register(r+1)
verbose_only( extern const char* regNames[]; )

Просмотреть файл

@ -70,7 +70,7 @@ tracing
namespace nanojit
{
const Register Assembler::retRegs[] = { RAX };
#ifdef _MSC_VER
#ifdef _WIN64
const Register Assembler::argRegs[] = { RCX, RDX, R8, R9 };
const Register Assembler::savedRegs[] = { RBX, RSI, RDI, R12, R13, R14, R15 };
#else
@ -588,7 +588,7 @@ namespace nanojit
emit(X64_callrax);
}
#ifdef _MSC_VER
#ifdef _WIN64
int stk_used = 32; // always reserve 32byte shadow area
#else
int stk_used = 0;
@ -604,7 +604,7 @@ namespace nanojit
asm_regarg(sz, arg, argRegs[arg_index]);
arg_index++;
}
#ifdef _MSC_VER
#ifdef _WIN64
else if (sz == ARGSIZE_F && arg_index < NumArgRegs) {
// double goes in XMM reg # based on overall arg_index
asm_regarg(sz, arg, Register(XMM0+arg_index));
@ -1153,8 +1153,8 @@ namespace nanojit
uint32_t kind = ins->paramKind();
if (kind == 0) {
// ordinary param
// first six args always in registers for mac x64
if (a < 6) {
// first four or six args always in registers for x86_64 ABI
if (a < (uint32_t)NumArgRegs) {
// incoming arg in register
prepResultReg(ins, rmask(argRegs[a]));
} else {
@ -1281,7 +1281,7 @@ namespace nanojit
void Assembler::nRegisterResetAll(RegAlloc &a) {
// add scratch registers to our free list for the allocator
a.clear();
#ifdef _MSC_VER
#ifdef _WIN64
a.free = 0x001fffcf; // rax-rbx, rsi, rdi, r8-r15, xmm0-xmm5
#else
a.free = 0xffffffff & ~(1<<RSP | 1<<RBP);
@ -1316,7 +1316,7 @@ namespace nanojit
}
Register Assembler::nRegisterAllocFromSet(RegisterMask set) {
#if defined _WIN64
#if defined _MSC_VER
DWORD tr;
_BitScanForward(&tr, set);
_allocator.free &= ~rmask((Register)tr);

Просмотреть файл

@ -320,6 +320,9 @@ namespace nanojit
static const int NumArgRegs = 6;
#endif
static inline bool isValidDisplacement(int32_t d) {
return true;
}
static inline bool IsFpReg(Register r) {
return ((1<<r) & FpRegs) != 0;
}

Просмотреть файл

@ -265,12 +265,6 @@ namespace nanojit
btr RegAlloc::free[ecx], eax // free &= ~rmask(i)
mov r, eax
}
#elif defined WIN64
unsigned long tr, fr;
_BitScanForward(&tr, set);
_bittestandreset(&fr, tr);
regs.free = fr;
r = tr;
#else
asm(
"bsf %1, %%eax\n\t"
@ -1231,6 +1225,24 @@ namespace nanojit
}
}
// negateMask is used by asm_fneg.
#if defined __SUNPRO_CC
// From Sun Studio C++ Readme: #pragma align inside namespace requires mangled names.
// Initialize here to avoid multithreading contention issues during initialization.
static uint32_t negateMask_temp[] = {0, 0, 0, 0, 0, 0, 0};
static uint32_t* negateMaskInit()
{
uint32_t* negateMask = (uint32_t*)alignUp(negateMask_temp, 16);
negateMask[1] = 0x80000000;
return negateMask;
}
static uint32_t *negateMask = negateMaskInit();
#else
static const AVMPLUS_ALIGN16(uint32_t) negateMask[] = {0,0x80000000,0,0};
#endif
void Assembler::asm_fneg(LInsp ins)
{
if (config.sse2)
@ -1255,14 +1267,6 @@ namespace nanojit
}
}
#if defined __SUNPRO_CC
// from Sun Studio C++ Readme: #pragma align inside namespace requires mangled names
static uint32_t temp[] = {0, 0, 0, 0, 0, 0, 0};
static uint32_t *negateMask = (uint32_t *)alignUp(temp, 16);
negateMask[1] = 0x80000000;
#else
static const AVMPLUS_ALIGN16(uint32_t) negateMask[] = {0,0x80000000,0,0};
#endif
SSE_XORPD(rr, negateMask);
if (rr != ra)

Просмотреть файл

@ -152,6 +152,10 @@ namespace nanojit
static const RegisterMask AllowableFlagRegs = 1<<EAX |1<<ECX | 1<<EDX | 1<<EBX;
static inline bool isValidDisplacement(int32_t d) {
return true;
}
#define _rmask_(r) (1<<(r))
#define _is_xmm_reg_(r) ((_rmask_(r)&XmmRegs)!=0)
#define _is_x87_reg_(r) ((_rmask_(r)&x87Regs)!=0)

152
js/src/nanojit/VMPI.cpp Normal file
Просмотреть файл

@ -0,0 +1,152 @@
/* -*- Mode: C++; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 4 -*- */
/* vi: set ts=4 sw=4 expandtab: (add to ~/.vimrc: set modeline modelines=5) */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version 1.1 (the
* "License"); you may not use this file except in compliance with the License. You may obtain
* a copy of the License at http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis, WITHOUT
* WARRANTY OF ANY KIND, either express or implied. See the License for the specific
* language governing rights and limitations under the License.
*
* The Original Code is [Open Source Virtual Machine.]
*
* The Initial Developer of the Original Code is Adobe System Incorporated. Portions created
* by the Initial Developer are Copyright (C)[ 2004-2006 ] Adobe Systems Incorporated. All Rights
* Reserved.
*
* Contributor(s): Adobe AS3 Team
* Andreas Gal <gal@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of either the GNU
* General Public License Version 2 or later (the "GPL"), or the GNU Lesser General Public
* License Version 2.1 or later (the "LGPL"), in which case the provisions of the GPL or the
* LGPL are applicable instead of those above. If you wish to allow use of your version of this
* file only under the terms of either the GPL or the LGPL, and not to allow others to use your
* version of this file under the terms of the MPL, indicate your decision by deleting provisions
* above and replace them with the notice and other provisions required by the GPL or the
* LGPL. If you do not delete the provisions above, a recipient may use your version of this file
* under the terms of any one of the MPL, the GPL or the LGPL.
*
***** END LICENSE BLOCK ***** */
#include "nanojit.h"
#ifdef SOLARIS
#include <ucontext.h>
#include <dlfcn.h>
#include <procfs.h>
#include <sys/stat.h>
extern "C" caddr_t _getfp(void);
typedef caddr_t maddr_ptr;
#else
typedef void *maddr_ptr;
#endif
using namespace avmplus;
#ifdef WIN32
void
VMPI_setPageProtection(void *address,
size_t size,
bool executableFlag,
bool writeableFlag)
{
DWORD oldProtectFlags = 0;
DWORD newProtectFlags = 0;
if ( executableFlag && writeableFlag ) {
newProtectFlags = PAGE_EXECUTE_READWRITE;
} else if ( executableFlag ) {
newProtectFlags = PAGE_EXECUTE_READ;
} else if ( writeableFlag ) {
newProtectFlags = PAGE_READWRITE;
} else {
newProtectFlags = PAGE_READONLY;
}
BOOL retval;
MEMORY_BASIC_INFORMATION mbi;
do {
VirtualQuery(address, &mbi, sizeof(MEMORY_BASIC_INFORMATION));
size_t markSize = size > mbi.RegionSize ? mbi.RegionSize : size;
retval = VirtualProtect(address, markSize, newProtectFlags, &oldProtectFlags);
NanoAssert(retval);
address = (char*) address + markSize;
size -= markSize;
} while(size > 0 && retval);
// We should not be clobbering PAGE_GUARD protections
NanoAssert((oldProtectFlags & PAGE_GUARD) == 0);
}
#elif defined(AVMPLUS_OS2)
void
VMPI_setPageProtection(void *address,
size_t size,
bool executableFlag,
bool writeableFlag)
{
ULONG flags = PAG_READ;
if (executableFlag) {
flags |= PAG_EXECUTE;
}
if (writeableFlag) {
flags |= PAG_WRITE;
}
address = (void*)((size_t)address & ~(0xfff));
size = (size + 0xfff) & ~(0xfff);
ULONG attribFlags = PAG_FREE;
while (size) {
ULONG attrib;
ULONG range = size;
ULONG retval = DosQueryMem(address, &range, &attrib);
AvmAssert(retval == 0);
// exit if this is the start of the next memory object
if (attrib & attribFlags) {
break;
}
attribFlags |= PAG_BASE;
range = size > range ? range : size;
retval = DosSetMem(address, range, flags);
AvmAssert(retval == 0);
address = (char*)address + range;
size -= range;
}
}
#else // !WIN32 && !AVMPLUS_OS2
void VMPI_setPageProtection(void *address,
size_t size,
bool executableFlag,
bool writeableFlag)
{
int bitmask = sysconf(_SC_PAGESIZE) - 1;
// mprotect requires that the addresses be aligned on page boundaries
void *endAddress = (void*) ((char*)address + size);
void *beginPage = (void*) ((size_t)address & ~bitmask);
void *endPage = (void*) (((size_t)endAddress + bitmask) & ~bitmask);
size_t sizePaged = (size_t)endPage - (size_t)beginPage;
int flags = PROT_READ;
if (executableFlag) {
flags |= PROT_EXEC;
}
if (writeableFlag) {
flags |= PROT_WRITE;
}
int retval = mprotect((maddr_ptr)beginPage, (unsigned int)sizePaged, flags);
AvmAssert(retval == 0);
(void)retval;
}
#endif // WIN32

92
js/src/nanojit/VMPI.h Normal file
Просмотреть файл

@ -0,0 +1,92 @@
/* -*- Mode: C++; c-basic-offset: 4; indent-tabs-mode: nil; tab-width: 4 -*- */
/* vi: set ts=4 sw=4 expandtab: (add to ~/.vimrc: set modeline modelines=5) */
/* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is [Open Source Virtual Machine].
*
* The Initial Developer of the Original Code is
* Adobe System Incorporated.
* Portions created by the Initial Developer are Copyright (C) 2004-2007
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Adobe AS3 Team
*
* Alternatively, the contents of this file may be used under the terms of
* either the GNU General Public License Version 2 or later (the "GPL"), or
* the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
/*
* Stub VMPI implementation to support standalone nanojit repository.
*
* Really only works if you *don't* have a busted-up C library.
*/
#ifndef __VMPI_h__
#define __VMPI_h__
#include <assert.h>
#include <string.h>
#include <stdio.h>
#include <ctype.h>
#include <stdlib.h>
#include <stddef.h>
#if defined(AVMPLUS_UNIX) || defined(AVMPLUS_OS2)
#include <unistd.h>
#include <sys/mman.h>
#endif
#ifdef AVMPLUS_WIN32
#if ! defined(_STDINT_H)
typedef signed char int8_t;
typedef signed short int16_t;
typedef signed int int32_t;
typedef signed __int64 int64_t;
typedef unsigned char uint8_t;
typedef unsigned short uint16_t;
typedef unsigned int uint32_t;
typedef unsigned __int64 uint64_t;
#endif
#else
#include <stdint.h>
#include <inttypes.h>
#endif
#define VMPI_strlen strlen
#define VMPI_strcat strcat
#define VMPI_strcmp strcmp
#define VMPI_strncat strncat
#define VMPI_strcpy strcpy
#define VMPI_sprintf sprintf
#define VMPI_memset memset
#define VMPI_isdigit isdigit
#define VMPI_getDate()
extern void VMPI_setPageProtection(void *address,
size_t size,
bool executableFlag,
bool writeableFlag);
#endif

Просмотреть файл

@ -54,7 +54,6 @@ avmplus::AvmLog(char const *msg, ...) {
}
#ifdef _DEBUG
// NanoAssertFail matches JS_Assert in jsutil.cpp.
void NanoAssertFail()
{
#if defined(WIN32)
@ -68,111 +67,6 @@ void NanoAssertFail()
}
#endif
#ifdef WIN32
void
VMPI_setPageProtection(void *address,
size_t size,
bool executableFlag,
bool writeableFlag)
{
DWORD oldProtectFlags = 0;
DWORD newProtectFlags = 0;
if ( executableFlag && writeableFlag ) {
newProtectFlags = PAGE_EXECUTE_READWRITE;
} else if ( executableFlag ) {
newProtectFlags = PAGE_EXECUTE_READ;
} else if ( writeableFlag ) {
newProtectFlags = PAGE_READWRITE;
} else {
newProtectFlags = PAGE_READONLY;
}
BOOL retval;
MEMORY_BASIC_INFORMATION mbi;
do {
VirtualQuery(address, &mbi, sizeof(MEMORY_BASIC_INFORMATION));
size_t markSize = size > mbi.RegionSize ? mbi.RegionSize : size;
retval = VirtualProtect(address, markSize, newProtectFlags, &oldProtectFlags);
NanoAssert(retval);
address = (char*) address + markSize;
size -= markSize;
} while(size > 0 && retval);
// We should not be clobbering PAGE_GUARD protections
NanoAssert((oldProtectFlags & PAGE_GUARD) == 0);
}
#elif defined(AVMPLUS_OS2)
void
VMPI_setPageProtection(void *address,
size_t size,
bool executableFlag,
bool writeableFlag)
{
ULONG flags = PAG_READ;
if (executableFlag) {
flags |= PAG_EXECUTE;
}
if (writeableFlag) {
flags |= PAG_WRITE;
}
address = (void*)((size_t)address & ~(0xfff));
size = (size + 0xfff) & ~(0xfff);
ULONG attribFlags = PAG_FREE;
while (size) {
ULONG attrib;
ULONG range = size;
ULONG retval = DosQueryMem(address, &range, &attrib);
AvmAssert(retval == 0);
// exit if this is the start of the next memory object
if (attrib & attribFlags) {
break;
}
attribFlags |= PAG_BASE;
range = size > range ? range : size;
retval = DosSetMem(address, range, flags);
AvmAssert(retval == 0);
address = (char*)address + range;
size -= range;
}
}
#else // !WIN32 && !AVMPLUS_OS2
void VMPI_setPageProtection(void *address,
size_t size,
bool executableFlag,
bool writeableFlag)
{
int bitmask = sysconf(_SC_PAGESIZE) - 1;
// mprotect requires that the addresses be aligned on page boundaries
void *endAddress = (void*) ((char*)address + size);
void *beginPage = (void*) ((size_t)address & ~bitmask);
void *endPage = (void*) (((size_t)endAddress + bitmask) & ~bitmask);
size_t sizePaged = (size_t)endPage - (size_t)beginPage;
int flags = PROT_READ;
if (executableFlag) {
flags |= PROT_EXEC;
}
if (writeableFlag) {
flags |= PROT_WRITE;
}
int retval = mprotect((maddr_ptr)beginPage, (unsigned int)sizePaged, flags);
AvmAssert(retval == 0);
(void)retval;
}
#endif // WIN32
#ifdef WINCE
// Due to the per-process heap slots on Windows Mobile, we can often run in to OOM

Просмотреть файл

@ -36,19 +36,11 @@
#ifndef avm_h___
#define avm_h___
#include <assert.h>
#include <string.h>
#include <stdio.h>
#include <ctype.h>
#include <stdlib.h>
#if defined(AVMPLUS_UNIX) || defined(AVMPLUS_OS2)
#include <unistd.h>
#include <sys/mman.h>
#if defined(HAVE_CONFIG_H) && defined(NANOJIT_CENTRAL)
#include "config.h"
#endif
#include "jstypes.h"
#include "jsstdint.h"
#include "VMPI.h"
#if !defined(AVMPLUS_LITTLE_ENDIAN) && !defined(AVMPLUS_BIG_ENDIAN)
#ifdef IS_BIG_ENDIAN
@ -104,17 +96,15 @@ void NanoAssertFail();
#define AvmAssertMsg(x, y)
#define AvmDebugLog(x) printf x
#if defined(AVMPLUS_IA32)
#if defined(_MSC_VER)
__declspec(naked) static inline __int64 rdtsc()
#if defined(_M_IX86) || defined(_M_AMD64)
// Visual C++ for x86 and x64 uses compiler intrinsics
static inline unsigned __int64 rdtsc(void)
{
__asm
{
rdtsc;
ret;
return __rdtsc();
}
}
#elif defined(SOLARIS)
#elif defined(AVMPLUS_IA32)
#if defined(SOLARIS)
static inline unsigned long long rdtsc(void)
{
unsigned long long int x;
@ -177,20 +167,6 @@ struct JSContext;
# define PERFM_TPROF_END()
#endif
#define VMPI_strlen strlen
#define VMPI_strcat strcat
#define VMPI_strncat strncat
#define VMPI_strcpy strcpy
#define VMPI_sprintf sprintf
#define VMPI_memset memset
#define VMPI_isdigit isdigit
#define VMPI_getDate()
extern void VMPI_setPageProtection(void *address,
size_t size,
bool executableFlag,
bool writeableFlag);
namespace avmplus {
typedef int FunctionID;

78
js/src/tests/Makefile.in Normal file
Просмотреть файл

@ -0,0 +1,78 @@
# vim: set shiftwidth=8 tabstop=8 autoindent noexpandtab copyindent:
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1/GPL 2.0/LGPL 2.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is Mozilla's javascript tests.
#
# The Initial Developer of the Original Code is the Mozilla Foundation.
# Portions created by the Initial Developer are Copyright (C) 2009
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
#
# Alternatively, the contents of this file may be used under the terms of
# either the GNU General Public License Version 2 or later (the "GPL"), or
# the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
# in which case the provisions of the GPL or the LGPL are applicable instead
# of those above. If you wish to allow use of your version of this file only
# under the terms of either the GPL or the LGPL, and not to allow others to
# use your version of this file under the terms of the MPL, indicate your
# decision by deleting the provisions above and replace them with the notice
# and other provisions required by the GPL or the LGPL. If you do not delete
# the provisions above, a recipient may use your version of this file under
# the terms of any one of the MPL, the GPL or the LGPL.
#
# ***** END LICENSE BLOCK *****
DEPTH = ..
topsrcdir = @top_srcdir@
srcdir = @srcdir@
VPATH = @srcdir@
include $(DEPTH)/config/autoconf.mk
MODULE = jsreftest
include $(topsrcdir)/config/rules.mk
# test files to be packaged.
TEST_FILES = \
jsreftest.html \
shell.js \
browser.js \
js-test-driver-end.js \
user.js \
jstests.list \
e4x/ \
ecma/ \
ecma_2/ \
ecma_3/ \
ecma_3_1/ \
ecma_5/ \
js1_1/ \
js1_2/ \
js1_3/ \
js1_4/ \
js1_5/ \
js1_6/ \
js1_7/ \
js1_8/ \
js1_8_1/ \
$(NULL)
PKG_STAGE = $(DIST)/test-package-stage
# stage tests for packaging
stage-package:
$(NSINSTALL) -D $(PKG_STAGE)/jsreftest/tests
@(cd $(srcdir) && tar $(TAR_CREATE_FLAGS) - $(TEST_FILES)) | (cd $(PKG_STAGE)/jsreftest/tests && tar -xf -)

Просмотреть файл

Просмотреть файл

Просмотреть файл

Просмотреть файл

Просмотреть файл

@ -34,7 +34,6 @@
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
var gPageCompleted;
var GLOBAL = this + '';
@ -164,7 +163,7 @@ function jsdgc()
}
catch(ex)
{
print('gc: ' + ex);
print('jsdgc: ' + ex);
}
}
@ -200,6 +199,7 @@ function Preferences(aPrefRoot)
}
catch(ex)
{
print('Preferences: ' + ex);
}
}
@ -222,6 +222,7 @@ function Preferences_getPrefRoot()
}
catch(ex)
{
print('Preferences_getPrefRoot: ' + ex);
}
return root;
}
@ -242,6 +243,7 @@ function Preferences_getPref(aPrefName)
}
catch(ex)
{
//print('Preferences_getPref: ' + ex);
}
return value;
}
@ -262,6 +264,7 @@ function Preferences_getBoolPref(aPrefName)
}
catch(ex)
{
//print('Preferences_getBoolPref: ' + ex);
}
return value;
}
@ -282,6 +285,7 @@ function Preferences_getIntPref(aPrefName)
}
catch(ex)
{
//print('Preferences_getIntPref: ' + ex);
}
return value;
}
@ -302,6 +306,7 @@ function Preferences_getCharPref(aPrefName)
}
catch(ex)
{
//print('Preferences_getCharPref: ' + ex);
}
return value;
}
@ -329,6 +334,7 @@ function Preferences_setPref(aPrefName, aPrefValue)
}
catch(ex)
{
print('Preferences_setCharPref: ' + ex);
}
}
@ -355,6 +361,7 @@ function Preferences_setBoolPref(aPrefName, aPrefValue)
}
catch(ex)
{
print('Preferences_setBoolPref: ' + ex);
}
}
@ -381,6 +388,7 @@ function Preferences_setIntPref(aPrefName, aPrefValue)
}
catch(ex)
{
print('Preferences_setIntPref: ' + ex);
}
}
@ -407,6 +415,7 @@ function Preferences_setCharPref(aPrefName, aPrefValue)
}
catch(ex)
{
print('Preferences_setCharPref: ' + ex);
}
}
@ -436,6 +445,7 @@ function Preferences_resetPref(aPrefName)
}
catch(ex)
{
print('Preferences_resetPref: ' + ex);
}
}
@ -460,6 +470,7 @@ function Preferences_resetAllPrefs()
}
catch(ex)
{
print('Preferences_resetAllPrefs: ' + ex);
}
}
@ -478,6 +489,7 @@ function Preferences_clearPref(aPrefName)
}
catch(ex)
{
print('Preferences_clearPref: ' + ex);
}
}
@ -596,6 +608,7 @@ var gVersion = 150;
function jsTestDriverBrowserInit()
{
if (typeof dump != 'function')
{
dump = print;
@ -667,6 +680,14 @@ function jsTestDriverBrowserInit()
}
}
// default to language=type;text/javascript. required for
// reftest style manifests.
if (!properties.language)
{
properties.language = 'type';
properties.mimetype = 'text/javascript';
}
gTestPath = properties.test;
gVersion = 10*parseInt(properties.version.replace(/\./g, ''));
@ -680,11 +701,12 @@ function jsTestDriverBrowserInit()
* since the default setting of jit changed from false to true
* in http://hg.mozilla.org/tracemonkey/rev/685e00e68be9
* bisections which depend upon jit settings can be thrown off.
* default jit(false) to make bisections depending upon jit settings
* consistent over time. This is not needed in shell tests as the default
* jit setting has not changed there.
* default jit(false) when not running jsreftests to make bisections
* depending upon jit settings consistent over time. This is not needed
* in shell tests as the default jit setting has not changed there.
*/
if (properties.jit || !document.location.href.match(/jsreftest.html/))
jit(properties.jit);
var testpathparts = properties.test.split(/\//);
@ -809,9 +831,134 @@ function jsTestDriverEnd()
gTestcases[i].dump();
}
// tell reftest the test is complete.
document.documentElement.className = '';
// tell Spider page is complete
gPageCompleted = true;
}
}
//var dlog = (function (s) { print('debug: ' + s); });
var dlog = (function (s) {});
// dialog closer from http://bclary.com/projects/spider/spider/chrome/content/spider/dialog-closer.js
var gDialogCloser;
var gDialogCloserObserver;
function registerDialogCloser()
{
dlog('registerDialogCloser: start');
try
{
netscape.security.PrivilegeManager.
enablePrivilege('UniversalXPConnect');
}
catch(excp)
{
print('registerDialogCloser: ' + excp);
return;
}
gDialogCloser = Components.
classes['@mozilla.org/embedcomp/window-watcher;1'].
getService(Components.interfaces.nsIWindowWatcher);
gDialogCloserObserver = {observe: dialogCloser_observe};
gDialogCloser.registerNotification(gDialogCloserObserver);
dlog('registerDialogCloser: complete');
}
function unregisterDialogCloser()
{
dlog('unregisterDialogCloser: start');
if (!gDialogCloserObserver || !gDialogCloser)
{
return;
}
try
{
netscape.security.PrivilegeManager.
enablePrivilege('UniversalXPConnect');
}
catch(excp)
{
print('unregisterDialogCloser: ' + excp);
return;
}
gDialogCloser.unregisterNotification(gDialogCloserObserver);
gDialogCloserObserver = null;
gDialogCloser = null;
dlog('unregisterDialogCloser: stop');
}
// use an array to handle the case where multiple dialogs
// appear at one time
var gDialogCloserSubjects = [];
function dialogCloser_observe(subject, topic, data)
{
try
{
netscape.security.PrivilegeManager.
enablePrivilege('UniversalXPConnect');
dlog('dialogCloser_observe: ' +
'subject: ' + subject +
', topic=' + topic +
', data=' + data +
', subject.document.documentURI=' + subject.document.documentURI +
', subjects pending=' + gDialogCloserSubjects.length);
}
catch(excp)
{
print('dialogCloser_observe: ' + excp);
return;
}
if (subject instanceof ChromeWindow && topic == 'domwindowopened' )
{
gDialogCloserSubjects.push(subject);
// timeout of 0 needed when running under reftest framework.
subject.setTimeout(closeDialog, 0);
}
dlog('dialogCloser_observe: subjects pending: ' + gDialogCloserSubjects.length);
}
function closeDialog()
{
var subject;
dlog('closeDialog: subjects pending: ' + gDialogCloserSubjects.length);
while ( (subject = gDialogCloserSubjects.pop()) != null)
{
dlog('closeDialog: subject=' + subject);
dlog('closeDialog: subject.document instanceof XULDocument: ' + (subject.document instanceof XULDocument));
dlog('closeDialog: subject.document.documentURI: ' + subject.document.documentURI);
if (subject.document instanceof XULDocument &&
subject.document.documentURI == 'chrome://global/content/commonDialog.xul')
{
dlog('closeDialog: close XULDocument dialog?');
subject.close();
}
else
{
// alerts inside of reftest framework are not XULDocument dialogs.
dlog('closeDialog: close chrome dialog?');
subject.close();
}
}
}
registerDialogCloser();
window.addEventListener('unload', unregisterDialogCloser, true);
jsTestDriverBrowserInit();

Просмотреть файл

Просмотреть файл

Просмотреть файл

Просмотреть файл

Просмотреть файл

Просмотреть файл

@ -0,0 +1,30 @@
url-prefix ../../jsreftest.html?test=e4x/Expressions/
script 11.1.1.js
script 11.1.2.js
script 11.1.3.js
script 11.1.4-01.js
script 11.1.4-02.js
script 11.1.4-03.js
fails script 11.1.4-04.js
script 11.1.4-05.js
script 11.1.4-06.js
script 11.1.4-07.js
fails script 11.1.4-08.js
script 11.1.4.js
script 11.1.5.js
script 11.2.1.js
script 11.2.2.js
script 11.2.3.js
script 11.2.4.js
script 11.3.1.js
script 11.3.2.js
script 11.4.1.js
script 11.5.1.js
script 11.6.1.js
script 11.6.2.js
script 11.6.3.js
script regress-301545.js
script regress-302531.js
script regress-340024.js
script regress-366123.js
script regress-496113.js

Просмотреть файл

Просмотреть файл

@ -0,0 +1,11 @@
url-prefix ../../jsreftest.html?test=e4x/GC/
script regress-280844-1.js
script regress-280844-2.js
skip script regress-292455.js # does not always dismiss alert
script regress-313952-01.js
script regress-313952-02.js
script regress-324117.js
skip script regress-324278.js # slow
script regress-339785.js
script regress-357063-01.js
script regress-357063-02.js

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше