Revert to 07be1f190a3d. Revision 5f5c1cd63641 should not have been pushed.

This commit is contained in:
Jason Orendorff 2009-02-03 12:38:44 -06:00
Родитель 8ca73fdbfa
Коммит 22a1b4eab2
12 изменённых файлов: 231 добавлений и 515 удалений

Просмотреть файл

@ -1544,10 +1544,8 @@ static JSString* FASTCALL
Array_p_join(JSContext* cx, JSObject* obj, JSString *str)
{
jsval v;
if (!array_join_sub(cx, obj, TO_STRING, str, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
if (!array_join_sub(cx, obj, TO_STRING, str, &v))
return NULL;
}
JS_ASSERT(JSVAL_IS_STRING(v));
return JSVAL_TO_STRING(v);
}
@ -2148,8 +2146,7 @@ Array_p_push1(JSContext* cx, JSObject* obj, jsval v)
: array_push_slowly(cx, obj, 1, &v, &v)) {
return v;
}
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_VOID;
return JSVAL_ERROR_COOKIE;
}
#endif
@ -2216,13 +2213,12 @@ static jsval FASTCALL
Array_p_pop(JSContext* cx, JSObject* obj)
{
jsval v;
if (OBJ_IS_DENSE_ARRAY(cx, obj)
if (OBJ_IS_DENSE_ARRAY(cx, obj)
? array_pop_dense(cx, obj, &v)
: array_pop_slowly(cx, obj, &v)) {
return v;
}
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_VOID;
return JSVAL_ERROR_COOKIE;
}
#endif

Просмотреть файл

@ -49,8 +49,8 @@
#undef THIS
#endif
enum JSTNErrType { INFALLIBLE, FAIL_STATUS, FAIL_NULL, FAIL_NEG, FAIL_VOID, FAIL_COOKIE };
enum { JSTN_ERRTYPE_MASK = 0x07, JSTN_UNBOX_AFTER = 0x08, JSTN_MORE = 0x10 };
enum JSTNErrType { INFALLIBLE, FAIL_NULL, FAIL_NEG, FAIL_VOID, FAIL_JSVAL };
enum { JSTN_ERRTYPE_MASK = 7, JSTN_MORE = 8 };
#define JSTN_ERRTYPE(jstn) ((jstn)->flags & JSTN_ERRTYPE_MASK)
@ -85,7 +85,7 @@ struct JSTraceableNative {
const nanojit::CallInfo *builtin;
const char *prefix;
const char *argtypes;
uintN flags; /* JSTNErrType | JSTN_UNBOX_AFTER | JSTN_MORE */
uintN flags; /* JSTN_MORE | JSTNErrType */
};
/*
@ -120,41 +120,13 @@ struct JSTraceableNative {
* Types with -- for the two string fields are not permitted as argument types
* in JS_DEFINE_TRCINFO.
*
* There are three kinds of traceable-native error handling.
*
* - If a traceable native's return type ends with _FAIL, it always runs to
* completion. It can either succeed or fail with an error or exception;
* on success, it may or may not stay on trace. There may be side effects
* in any case. If the call succeeds but bails off trace, we resume in the
* interpreter at the next opcode.
*
* _FAIL builtins indicate failure or bailing off trace by setting bits in
* cx->builtinStatus.
*
* - If a traceable native's return type contains _RETRY, it can either
* succeed, fail with a JS exception, or tell the caller to bail off trace
* and retry the call from the interpreter. The last case happens if the
* builtin discovers that it can't do its job without examining the JS
* stack, reentering the interpreter, accessing properties of the global
* object, etc.
*
* The builtin must detect the need to retry before committing any side
* effects. If a builtin can't do this, it must use a _FAIL return type
* instead of _RETRY.
*
* _RETRY builtins indicate failure with a special return value that
* depends on the return type:
*
* BOOL_RETRY: JSVAL_TO_BOOLEAN(JSVAL_VOID)
* INT32_RETRY: any negative value
* STRING_RETRY: NULL
* OBJECT_RETRY_NULL: NULL
* JSVAL_RETRY: JSVAL_ERROR_COOKIE
*
* _RETRY function calls are faster than _FAIL calls. Each _RETRY call
* saves a write to cx->bailExit and a read from cx->builtinStatus.
*
* - All other traceable natives are infallible (e.g. Date.now, Math.log).
* If a traceable native can fail, the values that indicate failure are part of
* the return type:
* JSVAL_FAIL: JSVAL_ERROR_COOKIE
* BOOL_FAIL: JSVAL_TO_BOOLEAN(JSVAL_VOID)
* INT32_FAIL: any negative value
* STRING_FAIL: NULL
* OBJECT_FAIL_NULL: NULL
*
* Special builtins known to the tracer can have their own idiosyncratic
* error codes.
@ -170,35 +142,29 @@ struct JSTraceableNative {
* effects.
*/
#define _JS_CTYPE(ctype, size, pch, ach, flags) (ctype, size, pch, ach, flags)
#define _JS_JSVAL_CTYPE(size, pch, ach, flags) (jsval, size, pch, ach, (flags | JSTN_UNBOX_AFTER))
#define _JS_CTYPE_CONTEXT _JS_CTYPE(JSContext *, _JS_PTR,"C", "", INFALLIBLE)
#define _JS_CTYPE_RUNTIME _JS_CTYPE(JSRuntime *, _JS_PTR,"R", "", INFALLIBLE)
#define _JS_CTYPE_THIS _JS_CTYPE(JSObject *, _JS_PTR,"T", "", INFALLIBLE)
#define _JS_CTYPE_THIS_DOUBLE _JS_CTYPE(jsdouble, _JS_F64,"D", "", INFALLIBLE)
#define _JS_CTYPE_THIS_STRING _JS_CTYPE(JSString *, _JS_PTR,"S", "", INFALLIBLE)
#define _JS_CTYPE_PC _JS_CTYPE(jsbytecode *, _JS_PTR,"P", "", INFALLIBLE)
#define _JS_CTYPE_JSVAL _JS_JSVAL_CTYPE( _JS_PTR, "","v", INFALLIBLE)
#define _JS_CTYPE_JSVAL_RETRY _JS_JSVAL_CTYPE( _JS_PTR, --, --, FAIL_COOKIE)
#define _JS_CTYPE_JSVAL_FAIL _JS_JSVAL_CTYPE( _JS_PTR, --, --, FAIL_STATUS)
#define _JS_CTYPE_BOOL _JS_CTYPE(JSBool, _JS_I32, "","i", INFALLIBLE)
#define _JS_CTYPE_BOOL_RETRY _JS_CTYPE(int32, _JS_I32, --, --, FAIL_VOID)
#define _JS_CTYPE_BOOL_FAIL _JS_CTYPE(int32, _JS_I32, --, --, FAIL_STATUS)
#define _JS_CTYPE_INT32 _JS_CTYPE(int32, _JS_I32, "","i", INFALLIBLE)
#define _JS_CTYPE_INT32_RETRY _JS_CTYPE(int32, _JS_I32, --, --, FAIL_NEG)
#define _JS_CTYPE_UINT32 _JS_CTYPE(uint32, _JS_I32, --, --, INFALLIBLE)
#define _JS_CTYPE_DOUBLE _JS_CTYPE(jsdouble, _JS_F64, "","d", INFALLIBLE)
#define _JS_CTYPE_STRING _JS_CTYPE(JSString *, _JS_PTR, "","s", INFALLIBLE)
#define _JS_CTYPE_STRING_RETRY _JS_CTYPE(JSString *, _JS_PTR, --, --, FAIL_NULL)
#define _JS_CTYPE_STRING_FAIL _JS_CTYPE(JSString *, _JS_PTR, --, --, FAIL_STATUS)
#define _JS_CTYPE_OBJECT _JS_CTYPE(JSObject *, _JS_PTR, "","o", INFALLIBLE)
#define _JS_CTYPE_OBJECT_RETRY_NULL _JS_CTYPE(JSObject *, _JS_PTR, --, --, FAIL_NULL)
#define _JS_CTYPE_OBJECT_FAIL _JS_CTYPE(JSObject *, _JS_PTR, --, --, FAIL_STATUS)
#define _JS_CTYPE_REGEXP _JS_CTYPE(JSObject *, _JS_PTR, "","r", INFALLIBLE)
#define _JS_CTYPE_SCOPEPROP _JS_CTYPE(JSScopeProperty *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_SIDEEXIT _JS_CTYPE(SideExit *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_INTERPSTATE _JS_CTYPE(InterpState *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_FRAGMENT _JS_CTYPE(nanojit::Fragment *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_CONTEXT _JS_CTYPE(JSContext *, _JS_PTR,"C", "", INFALLIBLE)
#define _JS_CTYPE_RUNTIME _JS_CTYPE(JSRuntime *, _JS_PTR,"R", "", INFALLIBLE)
#define _JS_CTYPE_THIS _JS_CTYPE(JSObject *, _JS_PTR,"T", "", INFALLIBLE)
#define _JS_CTYPE_THIS_DOUBLE _JS_CTYPE(jsdouble, _JS_F64,"D", "", INFALLIBLE)
#define _JS_CTYPE_THIS_STRING _JS_CTYPE(JSString *, _JS_PTR,"S", "", INFALLIBLE)
#define _JS_CTYPE_PC _JS_CTYPE(jsbytecode *, _JS_PTR,"P", "", INFALLIBLE)
#define _JS_CTYPE_JSVAL _JS_CTYPE(jsval, _JS_PTR, "","v", INFALLIBLE)
#define _JS_CTYPE_JSVAL_FAIL _JS_CTYPE(jsval, _JS_PTR, --, --, FAIL_JSVAL)
#define _JS_CTYPE_BOOL _JS_CTYPE(JSBool, _JS_I32, "","i", INFALLIBLE)
#define _JS_CTYPE_BOOL_FAIL _JS_CTYPE(int32, _JS_I32, --, --, FAIL_VOID)
#define _JS_CTYPE_INT32 _JS_CTYPE(int32, _JS_I32, "","i", INFALLIBLE)
#define _JS_CTYPE_INT32_FAIL _JS_CTYPE(int32, _JS_I32, --, --, FAIL_NEG)
#define _JS_CTYPE_UINT32 _JS_CTYPE(uint32, _JS_I32, --, --, INFALLIBLE)
#define _JS_CTYPE_DOUBLE _JS_CTYPE(jsdouble, _JS_F64, "","d", INFALLIBLE)
#define _JS_CTYPE_STRING _JS_CTYPE(JSString *, _JS_PTR, "","s", INFALLIBLE)
#define _JS_CTYPE_STRING_FAIL _JS_CTYPE(JSString *, _JS_PTR, --, --, FAIL_NULL)
#define _JS_CTYPE_OBJECT _JS_CTYPE(JSObject *, _JS_PTR, "","o", INFALLIBLE)
#define _JS_CTYPE_OBJECT_FAIL_NULL _JS_CTYPE(JSObject *, _JS_PTR, --, --, FAIL_NULL)
#define _JS_CTYPE_REGEXP _JS_CTYPE(JSObject *, _JS_PTR, "","r", INFALLIBLE)
#define _JS_CTYPE_SCOPEPROP _JS_CTYPE(JSScopeProperty *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_SIDEEXIT _JS_CTYPE(SideExit *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_INTERPSTATE _JS_CTYPE(InterpState *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_FRAGMENT _JS_CTYPE(nanojit::Fragment *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_EXPAND(tokens) tokens

Просмотреть файл

@ -93,9 +93,6 @@ typedef struct JSGSNCache {
#define JS_CLEAR_GSN_CACHE(cx) GSN_CACHE_CLEAR(&JS_GSN_CACHE(cx))
#define JS_METER_GSN_CACHE(cx,cnt) GSN_CACHE_METER(&JS_GSN_CACHE(cx), cnt)
typedef struct InterpState InterpState;
typedef struct VMSideExit VMSideExit;
#ifdef __cplusplus
namespace nanojit {
class Fragment;
@ -131,26 +128,8 @@ typedef struct JSTraceMonitor {
* both interpreter activation and last-ditch garbage collection when up
* against our runtime's memory limits. This flag also suppresses calls to
* JS_ReportOutOfMemory when failing due to runtime limits.
*
* !onTrace && !recorder: not on trace.
* onTrace && recorder: recording a trace.
* onTrace && !recorder: executing a trace.
* !onTrace && recorder && !prohibitRecording:
* not on trace; deep-aborted while recording.
* !onTrace && recorder && prohibitRecording:
* not on trace; deep-bailed in SpiderMonkey code called from a
* trace. JITted code is on the stack.
*/
JSPackedBool onTrace;
/*
* Do not start recording after a deep bail. That would free JITted code
* pages that we will later return to.
*/
JSPackedBool prohibitRecording;
/* See reservedObjects below. */
JSPackedBool useReservedObjects;
JSBool onTrace;
CLS(nanojit::LirBuffer) lirbuf;
CLS(nanojit::Fragmento) fragmento;
@ -167,6 +146,7 @@ typedef struct JSTraceMonitor {
* The JIT uses this to ensure that leaving a trace tree can't fail.
*/
JSObject *reservedObjects;
JSBool useReservedObjects;
/* Fragmento for the regular expression compiler. This is logically
* a distinct compiler but needs to be managed in exactly the same
@ -178,8 +158,6 @@ typedef struct JSTraceMonitor {
CLS(TraceRecorder) abortStack;
} JSTraceMonitor;
typedef struct InterpStruct InterpStruct;
#ifdef JS_TRACER
# define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).onTrace)
#else
@ -283,14 +261,6 @@ typedef enum JSRuntimeState {
JSRTS_LANDING
} JSRuntimeState;
#ifdef JS_TRACER
typedef enum JSBuiltinStatus {
JSBUILTIN_OK = 0,
JSBUILTIN_BAILED = 1,
JSBUILTIN_ERROR = 2
} JSBuiltinStatus;
#endif
typedef enum JSBuiltinFunctionId {
JSBUILTIN_ObjectToIterator,
JSBUILTIN_CallIteratorNext,
@ -1014,23 +984,6 @@ struct JSContext {
/* Current bytecode location (or NULL if no hint was supplied). */
jsbytecode *pcHint;
#ifdef JS_TRACER
/*
* State for the current tree execution. bailExit is valid if the tree has
* called back into native code via a _FAIL builtin and has not yet bailed,
* else garbage (NULL in debug builds).
*/
InterpState *interpState;
VMSideExit *bailExit;
/*
* Used by _FAIL builtins; see jsbuiltins.h. The builtin sets the
* JSBUILTIN_BAILED bit if it bails off trace and the JSBUILTIN_ERROR bit
* if an error or exception occurred. Cleared on side exit.
*/
uint32 builtinStatus;
#endif
};
#define BEGIN_PC_HINT(pc) (cx->pcHint = (pc))

Просмотреть файл

@ -2031,7 +2031,7 @@ static JSFunctionSpec date_static_methods[] = {
};
JS_DEFINE_TRCINFO_1(date_valueOf,
(3, (static, JSVAL_RETRY, date_valueOf_tn, CONTEXT, THIS, STRING, 0, 0)))
(3, (static, JSVAL_FAIL, date_valueOf_tn, CONTEXT, THIS, STRING, 0, 0)))
static JSFunctionSpec date_methods[] = {
JS_FN("getTime", date_getTime, 0,0),

Просмотреть файл

@ -4293,11 +4293,13 @@ js_Interpret(JSContext *cx)
LOAD_ATOM(i);
}
id = ATOM_TO_JSID(atom);
if (entry
? !js_GetPropertyHelper(cx, aobj, id, &rval, &entry)
: !OBJ_GET_PROPERTY(cx, obj, id, &rval)) {
goto error;
}
BEGIN_PC_HINT(regs.pc);
if (entry
? !js_GetPropertyHelper(cx, aobj, id, &rval, &entry)
: !OBJ_GET_PROPERTY(cx, obj, id, &rval)) {
goto error;
}
END_PC_HINT();
} while (0);
STORE_OPND(-1, rval);
@ -4401,17 +4403,21 @@ js_Interpret(JSContext *cx)
goto error;
} else
#endif
if (entry
? !js_GetPropertyHelper(cx, aobj, id, &rval, &entry)
: !OBJ_GET_PROPERTY(cx, obj, id, &rval)) {
goto error;
}
BEGIN_PC_HINT(regs.pc);
if (entry
? !js_GetPropertyHelper(cx, aobj, id, &rval, &entry)
: !OBJ_GET_PROPERTY(cx, obj, id, &rval)) {
goto error;
}
END_PC_HINT();
STORE_OPND(-1, OBJECT_TO_JSVAL(obj));
STORE_OPND(-2, rval);
} else {
JS_ASSERT(obj->map->ops->getProperty == js_GetProperty);
if (!js_GetPropertyHelper(cx, obj, id, &rval, &entry))
goto error;
BEGIN_PC_HINT(regs.pc);
if (!js_GetPropertyHelper(cx, obj, id, &rval, &entry))
goto error;
END_PC_HINT();
STORE_OPND(-1, lval);
STORE_OPND(-2, rval);
}

Просмотреть файл

@ -1932,11 +1932,11 @@ const char js_lookupSetter_str[] = "__lookupSetter__";
#endif
JS_DEFINE_TRCINFO_1(obj_valueOf,
(3, (static, JSVAL, Object_p_valueOf, CONTEXT, THIS, STRING, 0, 0)))
(3, (static, JSVAL, Object_p_valueOf, CONTEXT, THIS, STRING, 0, 0)))
JS_DEFINE_TRCINFO_1(obj_hasOwnProperty,
(3, (static, BOOL_RETRY, Object_p_hasOwnProperty, CONTEXT, THIS, STRING, 0, 0)))
(3, (static, BOOL_FAIL, Object_p_hasOwnProperty, CONTEXT, THIS, STRING, 0, 0)))
JS_DEFINE_TRCINFO_1(obj_propertyIsEnumerable,
(3, (static, BOOL_RETRY, Object_p_propertyIsEnumerable, CONTEXT, THIS, STRING, 0, 0)))
(3, (static, BOOL_FAIL, Object_p_propertyIsEnumerable, CONTEXT, THIS, STRING, 0, 0)))
static JSFunctionSpec object_methods[] = {
#if JS_HAS_TOSOURCE
@ -3904,7 +3904,7 @@ static jsbytecode*
js_GetCurrentBytecodePC(JSContext* cx)
{
jsbytecode *pc = cx->pcHint;
if (!pc || !JS_ON_TRACE(cx)) {
if (!pc) {
JSStackFrame* fp = js_GetTopStackFrame(cx);
if (fp && fp->regs) {
pc = fp->regs->pc;
@ -3912,8 +3912,6 @@ js_GetCurrentBytecodePC(JSContext* cx)
// JSOP_GETELEM imacro (bug 476559).
if (*pc == JSOP_CALL && fp->imacpc && *fp->imacpc == JSOP_GETELEM)
pc = fp->imacpc;
} else {
pc = NULL;
}
}
return pc;

Просмотреть файл

@ -64,7 +64,6 @@
#include "jsregexp.h"
#include "jsscan.h"
#include "jsscope.h"
#include "jsstaticcheck.h"
#include "jsstr.h"
#ifdef JS_TRACER
@ -3833,17 +3832,14 @@ MatchRegExp(REGlobalData *gData, REMatchState *x)
gData->skipped = (ptrdiff_t) x->cp;
#ifdef JS_JIT_SPEW
debug_only_v({
VOUCH_DOES_NOT_REQUIRE_STACK();
JSStackFrame *caller = (JS_ON_TRACE(gData->cx))
? NULL
: js_GetScriptedCaller(gData->cx, NULL);
printf("entering REGEXP trace at %s:%u@%u, code: %p\n",
caller ? caller->script->filename : "<unknown>",
caller ? js_FramePCToLineNumber(gData->cx, caller) : 0,
caller ? FramePCOffset(caller) : 0,
(void *) native);
})
{
JSStackFrame *caller = js_GetScriptedCaller(gData->cx, NULL);
debug_only_v(printf("entering REGEXP trace at %s:%u@%u, code: %p\n",
caller ? caller->script->filename : "<unknown>",
caller ? js_FramePCToLineNumber(gData->cx, caller) : 0,
caller ? FramePCOffset(caller) : 0,
(void *) native););
}
#endif
#if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
@ -4823,7 +4819,7 @@ Regexp_p_test(JSContext* cx, JSObject* regexp, JSString* str)
}
JS_DEFINE_TRCINFO_1(regexp_test,
(3, (static, BOOL_RETRY, Regexp_p_test, CONTEXT, THIS, STRING, 1, 1)))
(3, (static, BOOL_FAIL, Regexp_p_test, CONTEXT, THIS, STRING, 1, 1)))
#endif

Просмотреть файл

@ -1344,6 +1344,8 @@ match_or_replace(JSContext *cx,
destroy(cx, data);
}
} else {
jsval savedObject = JSVAL_NULL;
if (GET_MODE(data->flags) == MODE_REPLACE) {
test = JS_TRUE;
} else {
@ -1351,7 +1353,10 @@ match_or_replace(JSContext *cx,
* MODE_MATCH implies str_match is being called from a script or a
* scripted function. If the caller cares only about testing null
* vs. non-null return value, optimize away the array object that
* would normally be returned in *vp.
* would normally be returned in *vp. Instead return an arbitrary
* object (not JSVAL_TRUE, for type map integrity; see bug 453564).
* The caller provides the object in *vp and is responsible for
* rooting it elsewhere.
*
* Assume a full array result is required, then prove otherwise.
*/
@ -1365,12 +1370,16 @@ match_or_replace(JSContext *cx,
case JSOP_IFEQX:
case JSOP_IFNEX:
test = JS_TRUE;
savedObject = *vp;
JS_ASSERT(!JSVAL_IS_PRIMITIVE(savedObject));
break;
default:;
}
}
}
ok = js_ExecuteRegExp(cx, re, str, &index, test, vp);
if (ok && !JSVAL_IS_NULL(savedObject) && *vp == JSVAL_TRUE)
*vp = savedObject;
}
DROP_REGEXP(cx, re);
@ -1445,6 +1454,9 @@ str_match(JSContext *cx, uintN argc, jsval *vp)
for (fp = js_GetTopStackFrame(cx); fp && !fp->regs; fp = fp->down)
JS_ASSERT(!fp->script);
/* Root the object in vp[0]. See comment in match_or_replace. */
JSAutoTempValueRooter tvr(cx, vp[0]);
return StringMatchHelper(cx, argc, vp, fp ? fp->regs->pc : NULL);
}
@ -1452,22 +1464,22 @@ str_match(JSContext *cx, uintN argc, jsval *vp)
static jsval FASTCALL
String_p_match(JSContext* cx, JSString* str, jsbytecode *pc, JSObject* regexp)
{
jsval vp[3] = { JSVAL_NULL, STRING_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
if (!StringMatchHelper(cx, 1, vp, pc)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_VOID;
}
/* arbitrary object in vp[0] */
jsval vp[3] = { OBJECT_TO_JSVAL(regexp), STRING_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
if (!StringMatchHelper(cx, 1, vp, pc))
return JSVAL_ERROR_COOKIE;
JS_ASSERT(JSVAL_IS_OBJECT(vp[0]));
return vp[0];
}
static jsval FASTCALL
String_p_match_obj(JSContext* cx, JSObject* str, jsbytecode *pc, JSObject* regexp)
{
jsval vp[3] = { JSVAL_NULL, OBJECT_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
if (!StringMatchHelper(cx, 1, vp, pc)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_VOID;
}
/* arbitrary object in vp[0] */
jsval vp[3] = { OBJECT_TO_JSVAL(regexp), OBJECT_TO_JSVAL(str), OBJECT_TO_JSVAL(regexp) };
if (!StringMatchHelper(cx, 1, vp, pc))
return JSVAL_ERROR_COOKIE;
JS_ASSERT(JSVAL_IS_OBJECT(vp[0]));
return vp[0];
}
#endif
@ -2488,32 +2500,32 @@ JS_DEFINE_CALLINFO_2(extern, BOOL, js_EqualStrings, STRING, STRING,
JS_DEFINE_CALLINFO_2(extern, INT32, js_CompareStrings, STRING, STRING, 1, 1)
JS_DEFINE_TRCINFO_1(str_toString,
(2, (extern, STRING_FAIL, String_p_toString, CONTEXT, THIS, 1, 1)))
(2, (extern, STRING_FAIL, String_p_toString, CONTEXT, THIS, 1, 1)))
JS_DEFINE_TRCINFO_2(str_substring,
(4, (static, STRING_RETRY, String_p_substring, CONTEXT, THIS_STRING, INT32, INT32, 1, 1)),
(3, (static, STRING_RETRY, String_p_substring_1, CONTEXT, THIS_STRING, INT32, 1, 1)))
(4, (static, STRING_FAIL, String_p_substring, CONTEXT, THIS_STRING, INT32, INT32, 1, 1)),
(3, (static, STRING_FAIL, String_p_substring_1, CONTEXT, THIS_STRING, INT32, 1, 1)))
JS_DEFINE_TRCINFO_1(str_charAt,
(3, (extern, STRING_RETRY, js_String_getelem, CONTEXT, THIS_STRING, INT32, 1, 1)))
(3, (extern, STRING_FAIL, js_String_getelem, CONTEXT, THIS_STRING, INT32, 1, 1)))
JS_DEFINE_TRCINFO_1(str_charCodeAt,
(2, (extern, INT32_RETRY, js_String_p_charCodeAt, THIS_STRING, INT32, 1, 1)))
(2, (extern, INT32_FAIL, js_String_p_charCodeAt, THIS_STRING, INT32, 1, 1)))
JS_DEFINE_TRCINFO_4(str_concat,
(3, (static, STRING_RETRY, String_p_concat_1int, CONTEXT, THIS_STRING, INT32, 1, 1)),
(3, (extern, STRING_RETRY, js_ConcatStrings, CONTEXT, THIS_STRING, STRING, 1, 1)),
(4, (static, STRING_RETRY, String_p_concat_2str, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
(5, (static, STRING_RETRY, String_p_concat_3str, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
(3, (static, STRING_FAIL, String_p_concat_1int, CONTEXT, THIS_STRING, INT32, 1, 1)),
(3, (extern, STRING_FAIL, js_ConcatStrings, CONTEXT, THIS_STRING, STRING, 1, 1)),
(4, (static, STRING_FAIL, String_p_concat_2str, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
(5, (static, STRING_FAIL, String_p_concat_3str, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
JS_DEFINE_TRCINFO_2(str_match,
(4, (static, JSVAL_FAIL, String_p_match, CONTEXT, THIS_STRING, PC, REGEXP, 1, 1)),
(4, (static, JSVAL_FAIL, String_p_match_obj, CONTEXT, THIS, PC, REGEXP, 1, 1)))
(4, (static, JSVAL_FAIL, String_p_match, CONTEXT, THIS_STRING, PC, REGEXP, 1, 1)),
(4, (static, JSVAL_FAIL, String_p_match_obj, CONTEXT, THIS, PC, REGEXP, 1, 1)))
JS_DEFINE_TRCINFO_3(str_replace,
(4, (static, STRING_RETRY, String_p_replace_str, CONTEXT, THIS_STRING, REGEXP, STRING, 1, 1)),
(4, (static, STRING_RETRY, String_p_replace_str2, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
(5, (static, STRING_RETRY, String_p_replace_str3, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
(4, (static, STRING_FAIL, String_p_replace_str, CONTEXT, THIS_STRING, REGEXP, STRING, 1, 1)),
(4, (static, STRING_FAIL, String_p_replace_str2, CONTEXT, THIS_STRING, STRING, STRING, 1, 1)),
(5, (static, STRING_FAIL, String_p_replace_str3, CONTEXT, THIS_STRING, STRING, STRING, STRING, 1, 1)))
JS_DEFINE_TRCINFO_1(str_split,
(3, (static, OBJECT_RETRY_NULL, String_p_split, CONTEXT, THIS_STRING, STRING, 0, 0)))
(3, (static, OBJECT_FAIL_NULL, String_p_split, CONTEXT, THIS_STRING, STRING, 0, 0)))
JS_DEFINE_TRCINFO_1(str_toLowerCase,
(2, (extern, STRING_RETRY, js_toLowerCase, CONTEXT, THIS_STRING, 1, 1)))
(2, (extern, STRING_FAIL, js_toLowerCase, CONTEXT, THIS_STRING, 1, 1)))
JS_DEFINE_TRCINFO_1(str_toUpperCase,
(2, (extern, STRING_RETRY, js_toUpperCase, CONTEXT, THIS_STRING, 1, 1)))
(2, (extern, STRING_FAIL, js_toUpperCase, CONTEXT, THIS_STRING, 1, 1)))
#define GENERIC JSFUN_GENERIC_NATIVE
#define PRIMITIVE JSFUN_THISP_PRIMITIVE
@ -2650,7 +2662,7 @@ String_fromCharCode(JSContext* cx, int32 i)
#endif
JS_DEFINE_TRCINFO_1(str_fromCharCode,
(2, (static, STRING_RETRY, String_fromCharCode, CONTEXT, INT32, 1, 1)))
(2, (static, STRING_FAIL, String_fromCharCode, CONTEXT, INT32, 1, 1)))
static JSFunctionSpec string_static_methods[] = {
JS_TN("fromCharCode", str_fromCharCode, 1, 0, str_fromCharCode_trcinfo),

Просмотреть файл

@ -1218,7 +1218,7 @@ TraceRecorder::TraceRecorder(JSContext* cx, VMSideExit* _anchor, Fragment* _frag
lirbuf->sp = addName(lir->insLoad(LIR_ldp, lirbuf->state, (int)offsetof(InterpState, sp)), "sp");
lirbuf->rp = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, rp)), "rp");
cx_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, cx)), "cx");
gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, global)), "gp");
gp_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, gp)), "gp");
eos_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eos)), "eos");
eor_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, eor)), "eor");
globalObj_ins = addName(lir->insLoad(LIR_ldp, lirbuf->state, offsetof(InterpState, globalObj)), "globalObj");
@ -2098,7 +2098,7 @@ TraceRecorder::snapshot(ExitType exitType)
/* WARNING: don't return before restoring the original pc if (resumeAfter). */
bool resumeAfter = (pendingTraceableNative &&
JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS);
JSTN_ERRTYPE(pendingTraceableNative) == FAIL_JSVAL);
if (resumeAfter) {
JS_ASSERT(*pc == JSOP_CALL || *pc == JSOP_APPLY);
pc += cs.length;
@ -2130,8 +2130,7 @@ TraceRecorder::snapshot(ExitType exitType)
/* If we are capturing the stack state on a specific instruction, the value on
the top of the stack is a boxed value. */
if (resumeAfter) {
if (pendingTraceableNative->flags & JSTN_UNBOX_AFTER)
typemap[stackSlots - 1] = JSVAL_BOXED;
typemap[stackSlots - 1] = JSVAL_BOXED;
/* Now restore the the original pc (after which early returns are ok). */
MUST_FLOW_LABEL(restore_pc);
@ -3040,9 +3039,6 @@ js_StartRecorder(JSContext* cx, VMSideExit* anchor, Fragment* f, TreeInfo* ti,
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
JS_ASSERT(f->root != f || !cx->fp->imacpc);
if (JS_TRACE_MONITOR(cx).prohibitRecording)
return false;
/* start recording if no exception during construction */
tm->recorder = new (&gc) TraceRecorder(cx, anchor, f, ti,
stackSlots, ngslots, typeMap,
@ -3859,93 +3855,81 @@ js_FindVMCompatiblePeer(JSContext* cx, Fragment* f)
return NULL;
}
static void
LeaveTree(InterpState&, VMSideExit* lr);
/**
* Executes a tree.
*/
static JS_REQUIRES_STACK VMSideExit*
js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
static VMSideExit*
js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
VMSideExit** innermostNestedGuardp)
{
JS_ASSERT(f->code() && f->vmprivate);
JS_ASSERT(cx->builtinStatus == 0);
JSTraceMonitor* tm = &JS_TRACE_MONITOR(cx);
JSObject* globalObj = JS_GetGlobalForObject(cx, cx->fp->scopeChain);
TreeInfo* ti = (TreeInfo*)f->vmprivate;
unsigned ngslots = ti->globalSlots->length();
uint16* gslots = ti->globalSlots->data();
InterpState state;
state.cx = cx;
state.globalObj = globalObj;
state.inlineCallCountp = &inlineCallCount;
state.innermostNestedGuardp = innermostNestedGuardp;
state.outermostTree = ti;
state.lastTreeExitGuard = NULL;
state.lastTreeCallGuard = NULL;
state.rpAtLastTreeCall = NULL;
unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
double* global = (double*)alloca((globalFrameSize+1) * sizeof(double));
double stack_buffer[MAX_NATIVE_STACK_SLOTS];
double* stack = stack_buffer;
/* Make sure the global object is sane. */
JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) == ti->globalShape));
JS_ASSERT(!ngslots || (OBJ_SHAPE(JS_GetGlobalForObject(cx, cx->fp->scopeChain)) == ti->globalShape));
/* Make sure our caller replenished the double pool. */
JS_ASSERT(tm->reservedDoublePoolPtr >= tm->reservedDoublePool + MAX_NATIVE_STACK_SLOTS);
/* Reserve objects and stack space now, to make leaving the tree infallible. */
void *reserve;
void *stackMark = JS_ARENA_MARK(&cx->stackPool);
if (!js_ReserveObjects(cx, MAX_CALL_STACK_ENTRIES))
return NULL;
/* Setup the native global frame. */
unsigned globalFrameSize = STOBJ_NSLOTS(globalObj);
state.global = (double*)alloca((globalFrameSize+1) * sizeof(double));
/* Setup the native stack frame. */
double stack_buffer[MAX_NATIVE_STACK_SLOTS];
state.stackBase = stack_buffer;
double* entry_sp = &stack_buffer[ti->nativeStackBase/sizeof(double)];
state.sp = entry_sp;
state.eos = state.sp + MAX_NATIVE_STACK_SLOTS;
/* Setup the native call stack frame. */
FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
state.callstackBase = callstack_buffer;
state.rp = callstack_buffer;
state.eor = callstack_buffer + MAX_CALL_STACK_ENTRIES;
void *reserve;
state.stackMark = JS_ARENA_MARK(&cx->stackPool);
JS_ARENA_ALLOCATE(reserve, &cx->stackPool, MAX_INTERP_STACK_BYTES);
if (!reserve)
return NULL;
#ifdef DEBUG
state.jsframe_pop_blocks_set_on_entry = bool(cx->fp->flags & JSFRAME_POP_BLOCKS);
bool jsframe_pop_blocks_set_on_entry = bool(cx->fp->flags & JSFRAME_POP_BLOCKS);
memset(stack_buffer, 0xCD, sizeof(stack_buffer));
memset(state.global, 0xCD, (globalFrameSize+1)*sizeof(double));
#endif
memset(global, 0xCD, (globalFrameSize+1)*sizeof(double));
#endif
debug_only(*(uint64*)&state.global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
debug_only(*(uint64*)&global[globalFrameSize] = 0xdeadbeefdeadbeefLL;)
debug_only_v(printf("entering trace at %s:%u@%u, native stack slots: %u code: %p\n",
cx->fp->script->filename,
js_FramePCToLineNumber(cx, cx->fp),
FramePCOffset(cx->fp),
ti->maxNativeStackSlots,
f->code());)
JS_ASSERT(ti->nGlobalTypes() == ngslots);
if (ngslots)
BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), global);
BuildNativeStackFrame(cx, 0/*callDepth*/, ti->typeMap.data(), stack);
if (ngslots)
BuildNativeGlobalFrame(cx, ngslots, gslots, ti->globalTypeMap(), state.global);
BuildNativeStackFrame(cx, 0/*callDepth*/, ti->typeMap.data(), stack_buffer);
double* entry_sp = &stack[ti->nativeStackBase/sizeof(double)];
FrameInfo* callstack_buffer[MAX_CALL_STACK_ENTRIES];
FrameInfo** callstack = callstack_buffer;
InterpState state;
state.sp = (void*)entry_sp;
state.eos = ((double*)state.sp) + MAX_NATIVE_STACK_SLOTS;
state.rp = callstack;
state.eor = callstack + MAX_CALL_STACK_ENTRIES;
state.gp = global;
state.cx = cx;
state.globalObj = globalObj;
state.lastTreeExitGuard = NULL;
state.lastTreeCallGuard = NULL;
state.rpAtLastTreeCall = NULL;
union { NIns *code; GuardRecord* (FASTCALL *func)(InterpState*, Fragment*); } u;
u.code = f->code();
#ifdef EXECUTE_TREE_TIMER
state.startTime = rdtsc();
#ifdef JS_JIT_SPEW
#if defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__))
uint64 start = rdtsc();
#endif
#endif
/* Set a flag that indicates to the runtime system that we are running in native code
@ -3954,8 +3938,7 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
and eventually triggers the GC. */
JS_ASSERT(!tm->onTrace);
tm->onTrace = true;
cx->interpState = &state;
debug_only(fflush(NULL);)
GuardRecord* rec;
#if defined(JS_NO_FASTCALL) && defined(NANOJIT_IA32)
@ -3968,19 +3951,8 @@ js_ExecuteTree(JSContext* cx, Fragment* f, uintN& inlineCallCount,
AUDIT(traceTriggered);
JS_ASSERT(lr->exitType != LOOP_EXIT || !lr->calldepth);
tm->onTrace = false;
LeaveTree(state, lr);
return state.innermost;
}
static JS_FORCES_STACK void
LeaveTree(InterpState& state, VMSideExit* lr)
{
VOUCH_DOES_NOT_REQUIRE_STACK();
JSContext* cx = state.cx;
FrameInfo** callstack = state.callstackBase;
double* stack = state.stackBase;
/* Except if we find that this is a nested bailout, the guard the call returned is the
one we have to use to adjust pc and sp. */
@ -4013,56 +3985,15 @@ LeaveTree(InterpState& state, VMSideExit* lr)
rp = (FrameInfo**)state.rpAtLastTreeCall;
}
innermost = state.lastTreeExitGuard;
if (state.innermostNestedGuardp)
*state.innermostNestedGuardp = nested;
if (innermostNestedGuardp)
*innermostNestedGuardp = nested;
JS_ASSERT(nested);
JS_ASSERT(nested->exitType == NESTED_EXIT);
JS_ASSERT(state.lastTreeExitGuard);
JS_ASSERT(state.lastTreeExitGuard->exitType != NESTED_EXIT);
}
int32_t bs = cx->builtinStatus;
cx->builtinStatus = 0;
bool bailed = innermost->exitType == STATUS_EXIT && (bs & JSBUILTIN_BAILED);
if (bailed)
JS_TRACE_MONITOR(cx).prohibitRecording = false;
if (bailed && !(bs & JSBUILTIN_ERROR)) {
/*
* Deep-bail case.
*
* A _FAIL native already called LeaveTree. We already reconstructed
* the interpreter stack, in pre-call state, with pc pointing to the
* CALL/APPLY op, for correctness. Then we continued in native code.
* The native succeeded (no exception or error). After it returned, the
* trace stored the return value (at the top of the native stack) and
* then immediately flunked the guard on cx->builtinStatus.
*
* Now LeaveTree has been called again from the tail of
* js_ExecuteTree. We are about to return to the interpreter. Adjust
* the top stack frame to resume on the next op.
*/
JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL || *cx->fp->regs->pc == JSOP_APPLY);
uintN argc = GET_ARGC(cx->fp->regs->pc);
cx->fp->regs->pc += JSOP_CALL_LENGTH;
cx->fp->regs->sp -= argc + 1;
JS_ASSERT_IF(!cx->fp->imacpc,
cx->fp->slots + cx->fp->script->nfixed +
js_ReconstructStackDepth(cx, cx->fp->script, cx->fp->regs->pc) ==
cx->fp->regs->sp);
/*
* The return value was not available when we reconstructed the stack,
* but we have it now. Box it.
*/
uint8* typeMap = getStackTypeMap(innermost);
NativeToValue(cx,
cx->fp->regs->sp[-1],
typeMap[innermost->numStackSlots - 1],
(jsdouble *) state.sp + innermost->sp_adj / sizeof(jsdouble) - 1);
return;
}
JS_ARENA_RELEASE(&cx->stackPool, state.stackMark);
JS_ARENA_RELEASE(&cx->stackPool, stackMark);
while (callstack < rp) {
/* Synthesize a stack frame and write out the values in it using the type map pointer
on the native call stack. */
@ -4078,7 +4009,7 @@ LeaveTree(InterpState& state, VMSideExit* lr)
#endif
/* Keep track of the additional frames we put on the interpreter stack and the native
stack slots we consumed. */
++*state.inlineCallCountp;
++inlineCallCount;
++callstack;
stack += slots;
}
@ -4090,8 +4021,8 @@ LeaveTree(InterpState& state, VMSideExit* lr)
unsigned calldepth_slots = 0;
for (unsigned n = 0; n < calldepth; ++n) {
calldepth_slots += js_SynthesizeFrame(cx, *callstack[n]);
++*state.inlineCallCountp;
#ifdef DEBUG
++inlineCallCount;
#ifdef DEBUG
JSStackFrame* fp = cx->fp;
debug_only_v(printf("synthesized shallow frame for %s:%u@%u\n",
fp->script->filename, js_FramePCToLineNumber(cx, fp),
@ -4106,7 +4037,7 @@ LeaveTree(InterpState& state, VMSideExit* lr)
JSStackFrame* fp = cx->fp;
JS_ASSERT_IF(fp->flags & JSFRAME_POP_BLOCKS,
calldepth == 0 && state.jsframe_pop_blocks_set_on_entry);
calldepth == 0 && jsframe_pop_blocks_set_on_entry);
fp->blockChain = innermost->block;
/* If we are not exiting from an inlined frame the state->sp is spbase, otherwise spbase
@ -4117,8 +4048,8 @@ LeaveTree(InterpState& state, VMSideExit* lr)
fp->slots + fp->script->nfixed +
js_ReconstructStackDepth(cx, fp->script, fp->regs->pc) == fp->regs->sp);
#ifdef EXECUTE_TREE_TIMER
uint64 cycles = rdtsc() - state.startTime;
#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))
uint64 cycles = rdtsc() - start;
#elif defined(JS_JIT_SPEW)
uint64 cycles = 0;
#endif
@ -4139,21 +4070,19 @@ LeaveTree(InterpState& state, VMSideExit* lr)
which we don't have any type information available in the side exit. We merge in this
information from the entry type-map. See also comment in the constructor of TraceRecorder
why this is always safe to do. */
TreeInfo* outermostTree = state.outermostTree;
uint16* gslots = outermostTree->globalSlots->data();
unsigned ngslots = outermostTree->globalSlots->length();
JS_ASSERT(ngslots == outermostTree->nGlobalTypes());
unsigned exit_gslots = innermost->numGlobalSlots;
JS_ASSERT(exit_gslots <= ngslots);
JS_ASSERT(ngslots == ti->nGlobalTypes());
JS_ASSERT(ngslots >= exit_gslots);
uint8* globalTypeMap = getGlobalTypeMap(innermost);
if (exit_gslots < ngslots)
mergeTypeMaps(&globalTypeMap, &exit_gslots, outermostTree->globalTypeMap(), ngslots,
mergeTypeMaps(&globalTypeMap, &exit_gslots, ti->globalTypeMap(), ngslots,
(uint8*)alloca(sizeof(uint8) * ngslots));
JS_ASSERT(exit_gslots == outermostTree->globalSlots->length());
JS_ASSERT(exit_gslots == ti->nGlobalTypes());
/* write back interned globals */
FlushNativeGlobalFrame(cx, exit_gslots, gslots, globalTypeMap, state.global);
JS_ASSERT(*(uint64*)&state.global[STOBJ_NSLOTS(state.globalObj)] == 0xdeadbeefdeadbeefLL);
FlushNativeGlobalFrame(cx, exit_gslots, gslots, globalTypeMap, global);
JS_ASSERT_IF(ngslots != 0, globalFrameSize == STOBJ_NSLOTS(globalObj));
JS_ASSERT(*(uint64*)&global[globalFrameSize] == 0xdeadbeefdeadbeefLL);
/* write back native stack frame */
#ifdef DEBUG
@ -4178,7 +4107,7 @@ LeaveTree(InterpState& state, VMSideExit* lr)
AUDIT(timeoutIntoInterpreter);
#endif
state.innermost = innermost;
return innermost;
}
JS_REQUIRES_STACK bool
@ -4622,16 +4551,14 @@ JS_FORCES_STACK JSStackFrame *
js_GetTopStackFrame(JSContext *cx)
{
if (JS_ON_TRACE(cx)) {
/* It's a bug if a non-FAIL_STATUS builtin gets here. */
JS_ASSERT(cx->bailExit);
JS_TRACE_MONITOR(cx).onTrace = false;
JS_TRACE_MONITOR(cx).prohibitRecording = true;
LeaveTree(*cx->interpState, cx->bailExit);
#ifdef DEBUG
cx->bailExit = NULL;
/*
* TODO: If executing a tree, synthesize stack frames and bail off
* trace. See bug 462027.
*/
debug_only_v(printf("Internal error: getting top stack frame on trace.\n"));
#ifdef DEBUG_jason
JS_ASSERT(0);
#endif
cx->builtinStatus |= JSBUILTIN_BAILED;
}
return cx->fp;
}
@ -6672,21 +6599,6 @@ success:
JS_ASSERT(args[0] != (LIns *)0xcdcdcdcd);
#endif
if (JSTN_ERRTYPE(known) == FAIL_STATUS) {
// This needs to capture the pre-call state of the stack. So do not set
// pendingTraceableNative before taking this snapshot.
JS_ASSERT(!pendingTraceableNative);
// Take snapshot for deep LeaveTree and store it in cx->bailExit.
LIns* rec_ins = snapshot(DEEP_BAIL_EXIT);
GuardRecord* rec = (GuardRecord *) rec_ins->payload();
JS_ASSERT(rec->exit);
lir->insStorei(INS_CONSTPTR(rec->exit), cx_ins, offsetof(JSContext, bailExit));
// Tell nanojit not to discard or defer stack writes before this call.
lir->insGuard(LIR_xbarrier, rec_ins, rec_ins);
}
LIns* res_ins = lir->insCall(known->builtin, args);
if (!constructing)
rval_ins = res_ins;
@ -6705,9 +6617,6 @@ success:
case FAIL_VOID:
guard(false, lir->ins2i(LIR_eq, res_ins, JSVAL_TO_BOOLEAN(JSVAL_VOID)), OOM_EXIT);
break;
case FAIL_COOKIE:
guard(false, lir->ins2(LIR_eq, res_ins, INS_CONST(JSVAL_ERROR_COOKIE)), OOM_EXIT);
break;
default:;
}
set(&fval, res_ins);
@ -6991,7 +6900,6 @@ GetProperty_tn(JSContext *cx, jsbytecode *pc, JSObject *obj, JSString *name)
BEGIN_PC_HINT(pc);
if (!js_ValueToStringId(cx, STRING_TO_JSVAL(name), &id) ||
!OBJ_GET_PROPERTY(cx, obj, id, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
v = JSVAL_ERROR_COOKIE;
}
END_PC_HINT();
@ -7019,15 +6927,11 @@ GetElement_tn(JSContext* cx, jsbytecode *pc, JSObject* obj, int32 index)
jsval v;
jsid id;
if (!js_Int32ToId(cx, index, &id)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
if (!js_Int32ToId(cx, index, &id))
return JSVAL_ERROR_COOKIE;
}
BEGIN_PC_HINT(pc);
if (!OBJ_GET_PROPERTY(cx, obj, id, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
if (!OBJ_GET_PROPERTY(cx, obj, id, &v))
v = JSVAL_ERROR_COOKIE;
}
END_PC_HINT();
return v;
}
@ -7135,9 +7039,9 @@ SetProperty_tn(JSContext* cx, JSObject* obj, JSString* idstr, jsval v)
if (!js_ValueToStringId(cx, STRING_TO_JSVAL(idstr), &id) ||
!OBJ_SET_PROPERTY(cx, obj, id, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_TO_BOOLEAN(JSVAL_VOID);
}
return JSVAL_TO_BOOLEAN(JSVAL_VOID);
return JSVAL_TRUE;
}
static JSBool
@ -7164,8 +7068,8 @@ SetElement_tn(JSContext* cx, JSObject* obj, int32 index, jsval v)
jsid id;
if (!js_Int32ToId(cx, index, &id) || !OBJ_SET_PROPERTY(cx, obj, id, &v))
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_TO_BOOLEAN(JSVAL_VOID);
return JSVAL_TO_BOOLEAN(JSVAL_VOID);
return JSVAL_TRUE;
}
JS_DEFINE_TRCINFO_1(SetProperty,
@ -7465,31 +7369,18 @@ JS_REQUIRES_STACK bool
TraceRecorder::record_FastNativeCallComplete()
{
JS_ASSERT(pendingTraceableNative);
/* At this point the generated code has already called the native function
and we can no longer fail back to the original pc location (JSOP_CALL)
because that would cause the interpreter to re-execute the native
function, which might have side effects.
Instead, snapshot(), which is invoked from unbox_jsval() below, will see
that we are currently parked on a traceable native's JSOP_CALL
instruction, and it will advance the pc to restore by the length of the
current opcode. If the native's return type is jsval, snapshot() will
also indicate in the type map that the element on top of the stack is a
boxed value which doesn't need to be boxed if the type guard generated
by unbox_jsval() fails. */
if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_STATUS) {
#ifdef DEBUG
// Keep cx->bailExit null when it's invalid.
lir->insStorei(INS_CONSTPTR(NULL), cx_ins, (int) offsetof(JSContext, bailExit));
#endif
guard(true,
lir->ins_eq0(
lir->insLoad(LIR_ld, cx_ins, (int) offsetof(JSContext, builtinStatus))),
STATUS_EXIT);
}
Instead, snapshot(), which is invoked from unbox_jsval(), will see that
we are currently parked on a traceable native's JSOP_CALL instruction,
and it will advance the pc to restore by the length of the current
opcode, and indicate in the type map that the element on top of the
stack is a boxed value which doesn't need to be boxed if the type guard
generated by unbox_jsval() fails. */
JS_ASSERT(*cx->fp->regs->pc == JSOP_CALL ||
*cx->fp->regs->pc == JSOP_APPLY);
@ -7497,13 +7388,16 @@ TraceRecorder::record_FastNativeCallComplete()
LIns* v_ins = get(&v);
bool ok = true;
if (pendingTraceableNative->flags & JSTN_UNBOX_AFTER) {
switch (JSTN_ERRTYPE(pendingTraceableNative)) {
case FAIL_JSVAL:
unbox_jsval(v, v_ins);
set(&v, v_ins);
} else if (JSTN_ERRTYPE(pendingTraceableNative) == FAIL_NEG) {
break;
case FAIL_NEG:
/* Already added i2f in functionCall. */
JS_ASSERT(JSVAL_IS_NUMBER(v));
} else {
break;
default:
/* Convert the result to double if the builtin returns int32. */
if (JSVAL_IS_NUMBER(v) &&
(pendingTraceableNative->builtin->_argtypes & 3) == nanojit::ARGSIZE_LO) {
@ -9073,14 +8967,8 @@ static JSObject* FASTCALL
ObjectToIterator_tn(JSContext* cx, JSObject *obj, int32 flags)
{
jsval v = OBJECT_TO_JSVAL(obj);
if (!js_ValueToIterator(cx, flags, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
if (!js_ValueToIterator(cx, flags, &v))
return NULL;
}
if (OBJ_GET_CLASS(cx, JSVAL_TO_OBJECT(v)) == &js_GeneratorClass) {
js_LeaveTrace(cx);
return NULL;
}
return JSVAL_TO_OBJECT(v);
}
@ -9094,17 +8982,15 @@ static jsval FASTCALL
CallIteratorNext_tn(JSContext* cx, JSObject* iterobj)
{
jsval v;
if (!js_CallIteratorNext(cx, iterobj, &v)) {
cx->builtinStatus |= JSBUILTIN_ERROR;
if (!js_CallIteratorNext(cx, iterobj, &v))
return JSVAL_ERROR_COOKIE;
}
return v;
}
JS_DEFINE_TRCINFO_1(ObjectToIterator,
(3, (static, OBJECT_FAIL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0)))
(3, (static, OBJECT_FAIL_NULL, ObjectToIterator_tn, CONTEXT, THIS, INT32, 0, 0)))
JS_DEFINE_TRCINFO_1(CallIteratorNext,
(2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0)))
(2, (static, JSVAL_FAIL, CallIteratorNext_tn, CONTEXT, THIS, 0, 0)))
static const struct BuiltinFunctionInfo {
JSTraceableNative *tn;

Просмотреть файл

@ -213,9 +213,7 @@ enum ExitType {
OOM_EXIT,
OVERFLOW_EXIT,
UNSTABLE_LOOP_EXIT,
TIMEOUT_EXIT,
DEEP_BAIL_EXIT,
STATUS_EXIT
TIMEOUT_EXIT
};
struct VMSideExit : public nanojit::SideExit
@ -246,18 +244,20 @@ static inline uint8* getFullTypeMap(nanojit::SideExit* exit)
return getStackTypeMap(exit);
}
struct FrameInfo {
JSObject* callee; // callee function object
JSObject* block; // caller block chain head
intptr_t ip_adj; // caller script-based pc index and imacro pc
union {
struct {
uint16 spdist; // distance from fp->slots to fp->regs->sp at JSOP_CALL
uint16 argc; // actual argument count, may be < fun->nargs
} s;
uint32 word; // for spdist/argc LIR store in record_JSOP_CALL
};
};
struct InterpState
{
void* sp; /* native stack pointer, stack[0] is spbase[0] */
void* rp; /* call stack pointer */
void* gp; /* global frame pointer */
JSContext *cx; /* current VM context handle */
void* eos; /* first unusable word after the native stack */
void* eor; /* first unusable word after the call stack */
VMSideExit* lastTreeExitGuard; /* guard we exited on during a tree call */
VMSideExit* lastTreeCallGuard; /* guard we want to grow from if the tree
call exit guard mismatched */
void* rpAtLastTreeCall; /* value of rp at innermost tree call guard */
JSObject* globalObj; /* pointer to the global object */
};
struct UnstableExit
{
@ -309,37 +309,18 @@ public:
}
};
#if defined(JS_JIT_SPEW) && (defined(NANOJIT_IA32) || (defined(NANOJIT_AMD64) && defined(__GNUC__)))
# define EXECUTE_TREE_TIMER
#endif
struct InterpState
{
double *sp; // native stack pointer, stack[0] is spbase[0]
double *global; // global frame pointer
void *rp; // call stack pointer
JSContext *cx; // current VM context handle
double *eos; // first unusable word after the native stack
void *eor; // first unusable word after the call stack
VMSideExit* lastTreeExitGuard; // guard we exited on during a tree call
VMSideExit* lastTreeCallGuard; // guard we want to grow from if the tree
// call exit guard mismatched
void* rpAtLastTreeCall; // value of rp at innermost tree call guard
TreeInfo* outermostTree; // the outermost tree we initially invoked
JSObject* globalObj; // pointer to the global object
double* stackBase; // native stack base
FrameInfo** callstackBase; // call stack base
uintN* inlineCallCountp; // inline call count counter
VMSideExit** innermostNestedGuardp;
void* stackMark;
VMSideExit* innermost;
#ifdef EXECUTE_TREE_TIMER
uint64 startTime;
#endif
#ifdef DEBUG
bool jsframe_pop_blocks_set_on_entry;
#endif
};
struct FrameInfo {
JSObject* callee; // callee function object
JSObject* block; // caller block chain head
intptr_t ip_adj; // caller script-based pc index and imacro pc
union {
struct {
uint16 spdist; // distance from fp->slots to fp->regs->sp at JSOP_CALL
uint16 argc; // actual argument count, may be < fun->nargs
} s;
uint32 word; // for spdist/argc LIR store in record_JSOP_CALL
};
};
enum JSMonitorRecordingStatus {
JSMRS_CONTINUE,

Просмотреть файл

@ -976,10 +976,8 @@ static jsval JS_FASTCALL
Print_tn(JSContext *cx, JSString *str)
{
char *bytes = JS_EncodeString(cx, str);
if (!bytes) {
cx->builtinStatus |= JSBUILTIN_ERROR;
return JSVAL_VOID;
}
if (!bytes)
return JSVAL_ERROR_COOKIE;
fprintf(gOutFile, "%s\n", bytes);
JS_free(cx, bytes);
fflush(gOutFile);

Просмотреть файл

@ -2094,84 +2094,6 @@ function testArrayPushPop() {
testArrayPushPop.expected = "55,45";
test(testArrayPushPop);
function testSlowArrayPop() {
var a = [];
for (var i = 0; i < RUNLOOP; i++)
a[i] = [0];
a[RUNLOOP-1].__defineGetter__("0", function () { return 'xyzzy'; });
var last;
for (var i = 0; i < RUNLOOP; i++)
last = a[i].pop(); // reenters interpreter in getter
return last;
}
testSlowArrayPop.expected = 'xyzzy';
test(testSlowArrayPop);
// Same thing but it needs to reconstruct multiple stack frames (so,
// multiple functions called inside the loop)
function testSlowArrayPopMultiFrame() {
var a = [];
for (var i = 0; i < RUNLOOP; i++)
a[i] = [0];
a[RUNLOOP-1].__defineGetter__("0", function () { return 23; });
function child(a, i) {
return a[i].pop(); // reenters interpreter in getter
}
function parent(a, i) {
return child(a, i);
}
function gramps(a, i) {
return parent(a, i);
}
var last;
for (var i = 0; i < RUNLOOP; i++)
last = gramps(a, i);
return last;
}
testSlowArrayPopMultiFrame.expected = 23;
test(testSlowArrayPopMultiFrame);
// Same thing but nested trees, each reconstructing one or more stack frames
// (so, several functions with loops, such that the loops end up being
// nested though they are not lexically nested)
function testSlowArrayPopNestedTrees() {
var a = [];
for (var i = 0; i < RUNLOOP; i++)
a[i] = [0];
a[RUNLOOP-1].__defineGetter__("0", function () { return 3.14159 });
function child(a, i, j, k) {
var last = 2.71828;
for (var l = 0; l < RUNLOOP; l++)
if (i == RUNLOOP-1 && j == RUNLOOP-1 && k == RUNLOOP-1)
last = a[l].pop(); // reenters interpreter in getter
return last;
}
function parent(a, i, j) {
var last;
for (var k = 0; k < RUNLOOP; k++)
last = child(a, i, j, k);
return last;
}
function gramps(a, i) {
var last;
for (var j = 0; j < RUNLOOP; j++)
last = parent(a, i, j);
return last;
}
var last;
for (var i = 0; i < RUNLOOP; i++)
last = gramps(a, i);
return last;
}
testSlowArrayPopNestedTrees.expected = 3.14159;
test(testSlowArrayPopNestedTrees);
function testResumeOp() {
var a = [1,"2",3,"4",5,"6",7,"8",9,"10",11,"12",13,"14",15,"16"];
var x = "";
@ -4202,6 +4124,7 @@ function testInterpreterReentry5() {
}
test(testInterpreterReentry5);
/* // These tests should pass but currently crash, pending bug 462027.
function testInterpreterReentry6() {
var obj = {a:1, b:1, c:1, d:1, set e(x) { this._e = x; }};
for (var p in obj)
@ -4223,6 +4146,7 @@ function testInterpreterReentry7() {
}
testInterpreterReentry7.expected = "grue bleen";
test(testInterpreterReentry7);
*/
/*****************************************************************************
* *