jaegermonkey. what. a=arewefastyet

This commit is contained in:
Robert Sayre 2010-09-11 15:06:33 -04:00
Родитель acb09a662d 2fb107ae39
Коммит 411ce23490
518 изменённых файлов: 58134 добавлений и 11086 удалений

Просмотреть файл

@ -1 +1 @@
{"title":"","id":1,"dateAdded":1233157910552624,"lastModified":1233157955206833,"type":"text/x-moz-place-container","root":"placesRoot","children":[{"title":"Bookmarks Menu","id":2,"parent":1,"dateAdded":1233157910552624,"lastModified":1233157993171424,"type":"text/x-moz-place-container","root":"bookmarksMenuFolder","children":[{"title":"examplejson","id":27,"parent":2,"dateAdded":1233157972101126,"lastModified":1233157984999673,"type":"text/x-moz-place","uri":"http://example.com/"}]},{"index":1,"title":"Bookmarks Toolbar","id":3,"parent":1,"dateAdded":1233157910552624,"lastModified":1233157972101126,"annos":[{"name":"bookmarkProperties/description","flags":0,"expires":4,"mimeType":null,"type":3,"value":"Add bookmarks to this folder to see them displayed on the Bookmarks Toolbar"}],"type":"text/x-moz-place-container","root":"toolbarFolder","children":[{"title":"examplejson","id":26,"parent":3,"dateAdded":1233157972101126,"lastModified":1233157984999673,"type":"text/x-moz-place","uri":"http://example.com/"}]},{"index":2,"title":"Tags","id":4,"parent":1,"dateAdded":1233157910552624,"lastModified":1233157910582667,"type":"text/x-moz-place-container","root":"tagsFolder","children":[]},{"index":3,"title":"Unsorted Bookmarks","id":5,"parent":1,"dateAdded":1233157910552624,"lastModified":1233157911033315,"type":"text/x-moz-place-container","root":"unfiledBookmarksFolder","children":[]}]}
{"title":"","id":1,"dateAdded":1233157910552624,"lastModified":1233157955206833,"type":"text/x-moz-place-container","root":"placesRoot","children":[{"title":"Bookmarks Menu","id":2,"parent":1,"dateAdded":1233157910552624,"lastModified":1233157993171424,"type":"text/x-moz-place-container","root":"bookmarksMenuFolder","children":[{"title":"examplejson","id":27,"parent":2,"dateAdded":1233157972101126,"lastModified":1233157984999673,"type":"text/x-moz-place","uri":"http://example.com/"}]},{"index":1,"title":"Bookmarks Toolbar","id":3,"parent":1,"dateAdded":1233157910552624,"lastModified":1233157972101126,"annos":[{"name":"bookmarkProperties/description","flags":0,"expires":4,"mimeType":null,"type":3,"value":"Add bookmarks to this folder to see them displayed on the Bookmarks Toolbar"}],"type":"text/x-moz-place-container","root":"toolbarFolder","children":[{"title":"examplejson","id":26,"parent":3,"dateAdded":1233157972101126,"lastModified":1233157984999673,"type":"text/x-moz-place","uri":"http://example.com/"}]},{"index":2,"title":"Tags","id":4,"parent":1,"dateAdded":1233157910552624,"lastModified":1233157910582667,"type":"text/x-moz-place-container","root":"tagsFolder","children":[]},{"index":3,"title":"Unsorted Bookmarks","id":5,"parent":1,"dateAdded":1233157910552624,"lastModified":1233157911033315,"type":"text/x-moz-place-container","root":"unfiledBookmarksFolder","children":[]},]}

Просмотреть файл

@ -345,7 +345,8 @@ user_pref("accessibility.typeaheadfind.autostart", false);
user_pref("javascript.options.showInConsole", true);
user_pref("layout.debug.enable_data_xbl", true);
user_pref("browser.EULA.override", true);
user_pref("javascript.options.jit.content", true);
user_pref("javascript.options.tracejit.content", true);
user_pref("javascript.options.methodjit.content", true);
user_pref("gfx.color_management.force_srgb", true);
user_pref("network.manage-offline-status", false);
user_pref("test.mousescroll", true);

Просмотреть файл

@ -111,11 +111,14 @@ getUTF8StringArgument(JSContext *cx, JSObject *obj, PRUint16 argNum,
}
static JSBool
netscape_security_isPrivilegeEnabled(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval)
netscape_security_isPrivilegeEnabled(JSContext *cx, uintN argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
if (!obj)
return JS_FALSE;
JSBool result = JS_FALSE;
char *cap = getStringArgument(cx, obj, 0, argc, argv);
char *cap = getStringArgument(cx, obj, 0, argc, JS_ARGV(cx, vp));
if (cap) {
nsresult rv;
nsCOMPtr<nsIScriptSecurityManager> securityManager =
@ -128,16 +131,19 @@ netscape_security_isPrivilegeEnabled(JSContext *cx, JSObject *obj, uintN argc,
result = JS_FALSE;
}
}
*rval = BOOLEAN_TO_JSVAL(result);
JS_SET_RVAL(cx, vp, BOOLEAN_TO_JSVAL(result));
return JS_TRUE;
}
static JSBool
netscape_security_enablePrivilege(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval)
netscape_security_enablePrivilege(JSContext *cx, uintN argc, jsval *vp)
{
char *cap = getStringArgument(cx, obj, 0, argc, argv);
JSObject *obj = JS_THIS_OBJECT(cx, vp);
if (!obj)
return JS_FALSE;
char *cap = getStringArgument(cx, obj, 0, argc, JS_ARGV(cx, vp));
if (!cap)
return JS_FALSE;
@ -152,14 +158,18 @@ netscape_security_enablePrivilege(JSContext *cx, JSObject *obj, uintN argc,
rv = securityManager->EnableCapability(cap);
if (NS_FAILED(rv))
return JS_FALSE;
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSBool
netscape_security_disablePrivilege(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval)
netscape_security_disablePrivilege(JSContext *cx, uintN argc, jsval *vp)
{
char *cap = getStringArgument(cx, obj, 0, argc, argv);
JSObject *obj = JS_THIS_OBJECT(cx, vp);
if (!obj)
return JS_FALSE;
char *cap = getStringArgument(cx, obj, 0, argc, JS_ARGV(cx, vp));
if (!cap)
return JS_FALSE;
@ -174,14 +184,18 @@ netscape_security_disablePrivilege(JSContext *cx, JSObject *obj, uintN argc,
rv = securityManager->DisableCapability(cap);
if (NS_FAILED(rv))
return JS_FALSE;
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSBool
netscape_security_revertPrivilege(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval)
netscape_security_revertPrivilege(JSContext *cx, uintN argc, jsval *vp)
{
char *cap = getStringArgument(cx, obj, 0, argc, argv);
JSObject *obj = JS_THIS_OBJECT(cx, vp);
if (!obj)
return JS_FALSE;
char *cap = getStringArgument(cx, obj, 0, argc, JS_ARGV(cx, vp));
if (!cap)
return JS_FALSE;
@ -196,17 +210,21 @@ netscape_security_revertPrivilege(JSContext *cx, JSObject *obj, uintN argc,
rv = securityManager->RevertCapability(cap);
if (NS_FAILED(rv))
return JS_FALSE;
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSBool
netscape_security_setCanEnablePrivilege(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval)
netscape_security_setCanEnablePrivilege(JSContext *cx, uintN argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
if (!obj)
return JS_FALSE;
if (argc < 2) return JS_FALSE;
nsCAutoString principalFingerprint;
getUTF8StringArgument(cx, obj, 0, argc, argv, principalFingerprint);
char *cap = getStringArgument(cx, obj, 1, argc, argv);
getUTF8StringArgument(cx, obj, 0, argc, JS_ARGV(cx, vp), principalFingerprint);
char *cap = getStringArgument(cx, obj, 1, argc, JS_ARGV(cx, vp));
if (principalFingerprint.IsEmpty() || !cap)
return JS_FALSE;
@ -222,15 +240,19 @@ netscape_security_setCanEnablePrivilege(JSContext *cx, JSObject *obj, uintN argc
nsIPrincipal::ENABLE_GRANTED);
if (NS_FAILED(rv))
return JS_FALSE;
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSBool
netscape_security_invalidate(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval)
netscape_security_invalidate(JSContext *cx, uintN argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
if (!obj)
return JS_FALSE;
nsCAutoString principalFingerprint;
getUTF8StringArgument(cx, obj, 0, argc, argv, principalFingerprint);
getUTF8StringArgument(cx, obj, 0, argc, JS_ARGV(cx, vp), principalFingerprint);
if (principalFingerprint.IsEmpty())
return JS_FALSE;
@ -247,19 +269,20 @@ netscape_security_invalidate(JSContext *cx, JSObject *obj, uintN argc,
nsIPrincipal::ENABLE_GRANTED);
if (NS_FAILED(rv))
return JS_FALSE;
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSFunctionSpec PrivilegeManager_static_methods[] = {
{ "isPrivilegeEnabled", netscape_security_isPrivilegeEnabled, 1,0,0},
{ "enablePrivilege", netscape_security_enablePrivilege, 1,0,0},
{ "disablePrivilege", netscape_security_disablePrivilege, 1,0,0},
{ "revertPrivilege", netscape_security_revertPrivilege, 1,0,0},
{ "isPrivilegeEnabled", netscape_security_isPrivilegeEnabled, 1,0},
{ "enablePrivilege", netscape_security_enablePrivilege, 1,0},
{ "disablePrivilege", netscape_security_disablePrivilege, 1,0},
{ "revertPrivilege", netscape_security_revertPrivilege, 1,0},
//-- System Cert Functions
{ "setCanEnablePrivilege", netscape_security_setCanEnablePrivilege,
2,0,0},
{ "invalidate", netscape_security_invalidate, 1,0,0},
{nsnull,nsnull,0,0,0}
2,0},
{ "invalidate", netscape_security_invalidate, 1,0},
{nsnull,nsnull,0,0}
};
/*

Просмотреть файл

@ -1541,25 +1541,17 @@ nsIDOMDocument *
nsContentUtils::GetDocumentFromCaller()
{
JSContext *cx = nsnull;
sThreadJSContextStack->Peek(&cx);
JSObject *obj = nsnull;
sXPConnect->GetCaller(&cx, &obj);
NS_ASSERTION(cx && obj, "Caller ensures something is running");
nsIDOMDocument *doc = nsnull;
if (cx) {
JSObject *callee = nsnull;
JSStackFrame *fp = nsnull;
while (!callee && (fp = ::JS_FrameIterator(cx, &fp))) {
callee = ::JS_GetFrameCalleeObject(cx, fp);
}
nsCOMPtr<nsPIDOMWindow> win =
do_QueryInterface(nsJSUtils::GetStaticScriptGlobal(cx, callee));
if (win) {
doc = win->GetExtantDocument();
}
nsCOMPtr<nsPIDOMWindow> win =
do_QueryInterface(nsJSUtils::GetStaticScriptGlobal(cx, obj));
if (!win) {
return nsnull;
}
return doc;
return win->GetExtantDocument();
}
nsIDOMDocument *

0
content/canvas/test/webgl/00_testFIXME_list.txt Executable file → Normal file
Просмотреть файл

Просмотреть файл

Просмотреть файл

@ -46,6 +46,8 @@ include $(topsrcdir)/config/rules.mk
# Disabled due to timeouts.
# test_bug563329.html
# Disabled due to lack of present support for JSD in JM
# test_bug448602.html
_TEST_FILES = \
test_bug226361.xhtml \
bug226361_iframe.xhtml \
@ -72,7 +74,6 @@ _TEST_FILES = \
test_bug426082.html \
test_bug443985.html \
test_bug447736.html \
test_bug448602.html \
test_bug450876.html \
test_bug456273.html \
test_bug457672.html \

Просмотреть файл

@ -25,7 +25,11 @@ var tokens = {
function gotPlayEvent(event) {
var v = event.target;
ok(tokens[v._state].indexOf(event.type) >= 0,
"Check expected event got " + event.type + " at " + v._state + " for " + v.src);
"Check expected event got " + event.type + " at " + v._state + " for " + v.src +
" uneval(event.type)=" + uneval(event.type) + " typeof(event.type)=" + typeof(event.type) +
" uneval(v._state)=" + uneval(v._state) + " typeof(v._state)=" + typeof(v._state) +
" tokens["+v._state+"]=" + tokens[v._state] +
" tokens["+v._state+"].indexOf(event.type)=" + tokens[v._state].indexOf(event.type));
v._state = event.type;
}

Просмотреть файл

@ -55,6 +55,7 @@
#include "nsContentUtils.h"
#include "nsDOMJSUtils.h"
#include "mozilla/Services.h"
#include "xpcpublic.h"
static NS_DEFINE_CID(kDOMScriptObjectFactoryCID, NS_DOM_SCRIPT_OBJECT_FACTORY_CID);
@ -324,9 +325,15 @@ nsXBLDocGlobalObject::EnsureScriptEnvironment(PRUint32 aLangID)
// we must apparently override that with our own (although it isn't clear
// why - see bug 339647)
JS_SetErrorReporter(cx, XBL_ProtoErrorReporter);
mJSObject = ::JS_NewGlobalObject(cx, &gSharedGlobalClass);
if (!mJSObject)
return nsnull;
nsIPrincipal *principal = GetPrincipal();
nsCString origin;
JSCompartment *compartment;
principal->GetOrigin(getter_Copies(origin));
rv = xpc_CreateGlobalObject(cx, &gSharedGlobalClass, origin, principal,
&mJSObject, &compartment);
NS_ENSURE_SUCCESS(rv, nsnull);
::JS_SetGlobalObject(cx, mJSObject);

Просмотреть файл

@ -65,6 +65,7 @@
#include "nsContentUtils.h"
#include "nsCCUncollectableMarker.h"
#include "nsDOMJSUtils.h" // for GetScriptContextFromJSContext
#include "xpcpublic.h"
static NS_DEFINE_CID(kDOMScriptObjectFactoryCID,
NS_DOM_SCRIPT_OBJECT_FACTORY_CID);
@ -729,9 +730,16 @@ nsXULPDGlobalObject::EnsureScriptEnvironment(PRUint32 lang_id)
// some special JS specific code we should abstract
JSContext *cx = (JSContext *)ctxNew->GetNativeContext();
JSAutoRequest ar(cx);
JSObject *newGlob = ::JS_NewGlobalObject(cx, &gSharedGlobalClass);
if (!newGlob)
return nsnull;
nsIPrincipal *principal = GetPrincipal();
nsCString origin;
JSObject *newGlob;
JSCompartment *compartment;
principal->GetOrigin(getter_Copies(origin));
rv = xpc_CreateGlobalObject(cx, &gSharedGlobalClass, origin, principal,
&newGlob, &compartment);
NS_ENSURE_SUCCESS(rv, nsnull);
::JS_SetGlobalObject(cx, newGlob);

Просмотреть файл

@ -6553,10 +6553,13 @@ nsCommonWindowSH::GlobalResolve(nsGlobalWindow *aWin, JSContext *cx,
// Native code for window._content getter, this simply maps
// window._content to window.content for backwards compatibility only.
static JSBool
ContentWindowGetter(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
jsval *rval)
ContentWindowGetter(JSContext *cx, uintN argc, jsval *vp)
{
return ::JS_GetProperty(cx, obj, "content", rval);
JSObject *obj = JS_THIS_OBJECT(cx, vp);
if (!obj)
return JS_FALSE;
return ::JS_GetProperty(cx, obj, "content", vp);
}
PRBool
@ -8749,16 +8752,20 @@ ResolveImpl(JSContext *cx, nsIXPConnectWrappedNative *wrapper, jsid id,
// static
JSBool
nsHTMLDocumentSH::DocumentOpen(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval)
nsHTMLDocumentSH::DocumentOpen(JSContext *cx, uintN argc, jsval *vp)
{
JSObject *obj = JS_THIS_OBJECT(cx, vp);
if (!obj)
return JS_FALSE;
jsval *argv = JS_ARGV(cx, vp);
if (argc > 2) {
JSObject *global = ::JS_GetGlobalForObject(cx, obj);
// DOM0 quirk that makes document.open() call window.open() if
// called with 3 or more arguments.
return ::JS_CallFunctionName(cx, global, "open", argc, argv, rval);
return ::JS_CallFunctionName(cx, global, "open", argc, JS_ARGV(cx, vp), vp);
}
nsCOMPtr<nsISupports> native = do_QueryWrapper(cx, obj);
@ -8811,7 +8818,7 @@ nsHTMLDocumentSH::DocumentOpen(JSContext *cx, JSObject *obj, uintN argc,
}
nsCOMPtr<nsIXPConnectJSObjectHolder> holder;
rv = WrapNative(cx, obj, retval, PR_FALSE, rval,
rv = WrapNative(cx, obj, retval, PR_FALSE, vp,
getter_AddRefs(holder));
NS_ASSERTION(NS_SUCCEEDED(rv), "Failed to wrap native!");
@ -9087,8 +9094,7 @@ nsHTMLDocumentSH::ReleaseDocument(JSContext *cx, JSObject *obj)
}
JSBool
nsHTMLDocumentSH::CallToGetPropMapper(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval)
nsHTMLDocumentSH::CallToGetPropMapper(JSContext *cx, uintN argc, jsval *vp)
{
// Handle document.all("foo") style access to document.all.
@ -9102,28 +9108,30 @@ nsHTMLDocumentSH::CallToGetPropMapper(JSContext *cx, JSObject *obj, uintN argc,
}
// Convert all types to string.
JSString *str = ::JS_ValueToString(cx, argv[0]);
JSString *str = ::JS_ValueToString(cx, JS_ARGV(cx, vp)[0]);
if (!str) {
return JS_FALSE;
}
JSObject *self;
if (::JS_TypeOfValue(cx, argv[-2]) == JSTYPE_FUNCTION) {
// If argv[-2] is a function, we're called through
if (::JS_TypeOfValue(cx, JS_CALLEE(cx, vp)) == JSTYPE_FUNCTION) {
// If the callee is a function, we're called through
// document.all.item() or something similar. In such a case, self
// is passed as obj.
self = obj;
self = JS_THIS_OBJECT(cx, vp);
if (!self)
return JS_FALSE;
} else {
// In other cases (i.e. document.all("foo")), self is passed as
// argv[-2].
// the callee
self = JSVAL_TO_OBJECT(argv[-2]);
self = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
}
return ::JS_GetUCProperty(cx, self, ::JS_GetStringChars(str),
::JS_GetStringLength(str), rval);
::JS_GetStringLength(str), vp);
}

Просмотреть файл

@ -999,8 +999,7 @@ protected:
{
}
static JSBool DocumentOpen(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval);
static JSBool DocumentOpen(JSContext *cx, uintN argc, jsval *vp);
static JSBool GetDocumentAllNodeList(JSContext *cx, JSObject *obj,
nsDocument *doc,
nsContentList **nodeList);
@ -1011,8 +1010,7 @@ public:
static JSBool DocumentAllNewResolve(JSContext *cx, JSObject *obj, jsid id,
uintN flags, JSObject **objp);
static void ReleaseDocument(JSContext *cx, JSObject *obj);
static JSBool CallToGetPropMapper(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval);
static JSBool CallToGetPropMapper(JSContext *cx, uintN argc, jsval *vp);
static JSBool DocumentAllHelperGetProperty(JSContext *cx, JSObject *obj,
jsid id, jsval *vp);
static JSBool DocumentAllHelperNewResolve(JSContext *cx, JSObject *obj,

Просмотреть файл

@ -559,7 +559,7 @@ NS_ScriptErrorReporter(JSContext *cx,
if (!JSREPORT_IS_WARNING(report->flags)) {
JSStackFrame * fp = nsnull;
while ((fp = JS_FrameIterator(cx, &fp))) {
if (!JS_IsNativeFrame(cx, fp)) {
if (JS_IsScriptFrame(cx, fp)) {
return;
}
}
@ -1218,8 +1218,10 @@ static const char js_relimit_option_str[]= JS_OPTIONS_DOT_STR "relimit";
#ifdef JS_GC_ZEAL
static const char js_zeal_option_str[] = JS_OPTIONS_DOT_STR "gczeal";
#endif
static const char js_jit_content_str[] = JS_OPTIONS_DOT_STR "jit.content";
static const char js_jit_chrome_str[] = JS_OPTIONS_DOT_STR "jit.chrome";
static const char js_tracejit_content_str[] = JS_OPTIONS_DOT_STR "tracejit.content";
static const char js_tracejit_chrome_str[] = JS_OPTIONS_DOT_STR "tracejit.chrome";
static const char js_methodjit_content_str[] = JS_OPTIONS_DOT_STR "methodjit.content";
static const char js_methodjit_chrome_str[] = JS_OPTIONS_DOT_STR "methodjit.chrome";
int
nsJSContext::JSOptionChangedCallback(const char *pref, void *data)
@ -1239,22 +1241,32 @@ nsJSContext::JSOptionChangedCallback(const char *pref, void *data)
// XXX components be covered by the chrome pref instead of the content one?
nsCOMPtr<nsIDOMChromeWindow> chromeWindow(do_QueryInterface(global));
PRBool useJIT = nsContentUtils::GetBoolPref(chromeWindow ?
js_jit_chrome_str :
js_jit_content_str);
PRBool useTraceJIT = nsContentUtils::GetBoolPref(chromeWindow ?
js_tracejit_chrome_str :
js_tracejit_content_str);
PRBool useMethodJIT = nsContentUtils::GetBoolPref(chromeWindow ?
js_methodjit_chrome_str :
js_methodjit_content_str);
nsCOMPtr<nsIXULRuntime> xr = do_GetService(XULRUNTIME_SERVICE_CONTRACTID);
if (xr) {
PRBool safeMode = PR_FALSE;
xr->GetInSafeMode(&safeMode);
if (safeMode)
useJIT = PR_FALSE;
if (safeMode) {
useTraceJIT = PR_FALSE;
useMethodJIT = PR_FALSE;
}
}
if (useJIT)
if (useTraceJIT)
newDefaultJSOptions |= JSOPTION_JIT;
else
newDefaultJSOptions &= ~JSOPTION_JIT;
if (useMethodJIT)
newDefaultJSOptions |= JSOPTION_METHODJIT;
else
newDefaultJSOptions &= ~JSOPTION_METHODJIT;
#ifdef DEBUG
// In debug builds, warnings are enabled in chrome context if javascript.options.strict.debug is true
PRBool strictDebug = nsContentUtils::GetBoolPref(js_strict_debug_option_str);
@ -3075,21 +3087,23 @@ static JSClass OptionsClass = {
#include "nsTraceMalloc.h"
static JSBool
TraceMallocDisable(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
TraceMallocDisable(JSContext *cx, uintN argc, jsval *vp)
{
NS_TraceMallocDisable();
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSBool
TraceMallocEnable(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
TraceMallocEnable(JSContext *cx, uintN argc, jsval *vp)
{
NS_TraceMallocEnable();
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSBool
TraceMallocOpenLogFile(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
TraceMallocOpenLogFile(JSContext *cx, uintN argc, jsval *vp)
{
int fd;
JSString *str;
@ -3098,7 +3112,7 @@ TraceMallocOpenLogFile(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, js
if (argc == 0) {
fd = -1;
} else {
str = JS_ValueToString(cx, argv[0]);
str = JS_ValueToString(cx, JS_ARGV(cx, vp)[0]);
if (!str)
return JS_FALSE;
filename = JS_GetStringBytes(str);
@ -3108,19 +3122,19 @@ TraceMallocOpenLogFile(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, js
return JS_FALSE;
}
}
*rval = INT_TO_JSVAL(fd);
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(fd));
return JS_TRUE;
}
static JSBool
TraceMallocChangeLogFD(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
TraceMallocChangeLogFD(JSContext *cx, uintN argc, jsval *vp)
{
int32 fd, oldfd;
if (argc == 0) {
oldfd = -1;
} else {
if (!JS_ValueToECMAInt32(cx, argv[0], &fd))
if (!JS_ValueToECMAInt32(cx, JS_ARGV(cx, vp)[0], &fd))
return JS_FALSE;
oldfd = NS_TraceMallocChangeLogFD(fd);
if (oldfd == -2) {
@ -3128,44 +3142,46 @@ TraceMallocChangeLogFD(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, js
return JS_FALSE;
}
}
*rval = INT_TO_JSVAL(oldfd);
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(oldfd));
return JS_TRUE;
}
static JSBool
TraceMallocCloseLogFD(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
TraceMallocCloseLogFD(JSContext *cx, uintN argc, jsval *vp)
{
int32 fd;
JS_SET_RVAL(cx, vp, JSVAL_VOID);
if (argc == 0)
return JS_TRUE;
if (!JS_ValueToECMAInt32(cx, argv[0], &fd))
if (!JS_ValueToECMAInt32(cx, JS_ARGV(cx, vp)[0], &fd))
return JS_FALSE;
NS_TraceMallocCloseLogFD((int) fd);
return JS_TRUE;
}
static JSBool
TraceMallocLogTimestamp(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
TraceMallocLogTimestamp(JSContext *cx, uintN argc, jsval *vp)
{
JSString *str;
const char *caption;
str = JS_ValueToString(cx, argv[0]);
str = JS_ValueToString(cx, argc ? JS_ARGV(cx, vp)[0] : JSVAL_VOID);
if (!str)
return JS_FALSE;
caption = JS_GetStringBytes(str);
NS_TraceMallocLogTimestamp(caption);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSBool
TraceMallocDumpAllocations(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
TraceMallocDumpAllocations(JSContext *cx, uintN argc, jsval *vp)
{
JSString *str;
const char *pathname;
str = JS_ValueToString(cx, argv[0]);
str = JS_ValueToString(cx, argc ? JS_ARGV(cx, vp)[0] : JSVAL_VOID);
if (!str)
return JS_FALSE;
pathname = JS_GetStringBytes(str);
@ -3173,18 +3189,19 @@ TraceMallocDumpAllocations(JSContext *cx, JSObject *obj, uintN argc, jsval *argv
JS_ReportError(cx, "can't dump to %s: %s", pathname, strerror(errno));
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSFunctionSpec TraceMallocFunctions[] = {
{"TraceMallocDisable", TraceMallocDisable, 0, 0, 0},
{"TraceMallocEnable", TraceMallocEnable, 0, 0, 0},
{"TraceMallocOpenLogFile", TraceMallocOpenLogFile, 1, 0, 0},
{"TraceMallocChangeLogFD", TraceMallocChangeLogFD, 1, 0, 0},
{"TraceMallocCloseLogFD", TraceMallocCloseLogFD, 1, 0, 0},
{"TraceMallocLogTimestamp", TraceMallocLogTimestamp, 1, 0, 0},
{"TraceMallocDumpAllocations", TraceMallocDumpAllocations, 1, 0, 0},
{nsnull, nsnull, 0, 0, 0}
{"TraceMallocDisable", TraceMallocDisable, 0, 0},
{"TraceMallocEnable", TraceMallocEnable, 0, 0},
{"TraceMallocOpenLogFile", TraceMallocOpenLogFile, 1, 0},
{"TraceMallocChangeLogFD", TraceMallocChangeLogFD, 1, 0},
{"TraceMallocCloseLogFD", TraceMallocCloseLogFD, 1, 0},
{"TraceMallocLogTimestamp", TraceMallocLogTimestamp, 1, 0},
{"TraceMallocDumpAllocations", TraceMallocDumpAllocations, 1, 0},
{nsnull, nsnull, 0, 0}
};
#endif /* NS_TRACE_MALLOC */
@ -3267,11 +3284,11 @@ static JSFunctionSpec JProfFunctions[] = {
#ifdef MOZ_SHARK
static JSFunctionSpec SharkFunctions[] = {
{"startShark", js_StartShark, 0, 0, 0},
{"stopShark", js_StopShark, 0, 0, 0},
{"connectShark", js_ConnectShark, 0, 0, 0},
{"disconnectShark", js_DisconnectShark, 0, 0, 0},
{nsnull, nsnull, 0, 0, 0}
{"startShark", js_StartShark, 0, 0},
{"stopShark", js_StopShark, 0, 0},
{"connectShark", js_ConnectShark, 0, 0},
{"disconnectShark", js_DisconnectShark, 0, 0},
{nsnull, nsnull, 0, 0}
};
#endif

Просмотреть файл

@ -52,7 +52,7 @@ interface nsIScriptGlobalObject;
/**
* Encode and decode JSON text.
*/
[scriptable, uuid(6fcf09ee-87d0-42ec-a72a-8d60114e974f)]
[scriptable, uuid(a4d68b4e-0c0b-4c7c-b540-ef2f9834171f)]
interface nsIJSON : nsISupports
{
AString encode(/* in JSObject value */);
@ -71,4 +71,25 @@ interface nsIJSON : nsISupports
// Make sure you GCroot the result of this function before using it.
[noscript] jsval decodeToJSVal(in AString str, in JSContext cx);
/*
* Decode a JSON string, but also accept some strings in non-JSON format, as
* the decoding methods here did previously before tightening.
*
* This method is provided only as a temporary transition path for users of
* the old code who depended on the ability to decode leniently; new users
* should use the non-legacy decoding methods.
*
* @param str the string to parse
*/
void /* JSObject */ legacyDecode(in AString str);
/* Identical to legacyDecode, but decode the contents of stream. */
void /* JSObject */ legacyDecodeFromStream(in nsIInputStream stream,
in long contentLength);
/* Identical to legacyDecode, but decode into a jsval. */
// Make sure you GCroot the result of this function before using it.
[noscript] jsval legacyDecodeToJSVal(in AString str, in JSContext cx);
};

Просмотреть файл

@ -417,7 +417,7 @@ nsJSON::DecodeToJSVal(const nsAString &str, JSContext *cx, jsval *result)
// Since we've called JS_BeginJSONParse, we have to call JS_FinishJSONParse,
// even if JS_ConsumeJSONText fails. But if either fails, we'll report an
// error.
ok = ok && JS_FinishJSONParse(cx, parser, JSVAL_NULL);
ok &= JS_FinishJSONParse(cx, parser, JSVAL_NULL);
if (!ok) {
return NS_ERROR_UNEXPECTED;
@ -429,7 +429,8 @@ nsJSON::DecodeToJSVal(const nsAString &str, JSContext *cx, jsval *result)
nsresult
nsJSON::DecodeInternal(nsIInputStream *aStream,
PRInt32 aContentLength,
PRBool aNeedsConverter)
PRBool aNeedsConverter,
DecodingMode mode /* = STRICT */)
{
nsresult rv;
nsIXPConnect *xpc = nsContentUtils::XPConnect();
@ -464,7 +465,7 @@ nsJSON::DecodeInternal(nsIInputStream *aStream,
return NS_ERROR_FAILURE;
nsRefPtr<nsJSONListener>
jsonListener(new nsJSONListener(cx, retvalPtr, aNeedsConverter));
jsonListener(new nsJSONListener(cx, retvalPtr, aNeedsConverter, mode));
if (!jsonListener)
return NS_ERROR_OUT_OF_MEMORY;
@ -514,6 +515,52 @@ nsJSON::DecodeInternal(nsIInputStream *aStream,
return NS_OK;
}
NS_IMETHODIMP
nsJSON::LegacyDecode(const nsAString& json)
{
const PRUnichar *data;
PRUint32 len = NS_StringGetData(json, &data);
nsCOMPtr<nsIInputStream> stream;
nsresult rv = NS_NewByteInputStream(getter_AddRefs(stream),
(const char*) data,
len * sizeof(PRUnichar),
NS_ASSIGNMENT_DEPEND);
NS_ENSURE_SUCCESS(rv, rv);
return DecodeInternal(stream, len, PR_FALSE, LEGACY);
}
NS_IMETHODIMP
nsJSON::LegacyDecodeFromStream(nsIInputStream *aStream, PRInt32 aContentLength)
{
return DecodeInternal(aStream, aContentLength, PR_TRUE, LEGACY);
}
NS_IMETHODIMP
nsJSON::LegacyDecodeToJSVal(const nsAString &str, JSContext *cx, jsval *result)
{
JSAutoRequest ar(cx);
JSONParser *parser = JS_BeginJSONParse(cx, result);
NS_ENSURE_TRUE(parser, NS_ERROR_UNEXPECTED);
JSBool ok = js_ConsumeJSONText(cx, parser,
(jschar*)PromiseFlatString(str).get(),
(uint32)str.Length(),
LEGACY);
// Since we've called JS_BeginJSONParse, we have to call JS_FinishJSONParse,
// even if js_ConsumeJSONText fails. But if either fails, we'll report an
// error.
ok &= JS_FinishJSONParse(cx, parser, JSVAL_NULL);
if (!ok) {
return NS_ERROR_UNEXPECTED;
}
return NS_OK;
}
nsresult
NS_NewJSON(nsISupports* aOuter, REFNSIID aIID, void** aResult)
{
@ -528,11 +575,13 @@ NS_NewJSON(nsISupports* aOuter, REFNSIID aIID, void** aResult)
}
nsJSONListener::nsJSONListener(JSContext *cx, jsval *rootVal,
PRBool needsConverter)
PRBool needsConverter,
DecodingMode mode /* = STRICT */)
: mNeedsConverter(needsConverter),
mJSONParser(nsnull),
mCx(cx),
mRootVal(rootVal)
mRootVal(rootVal),
mDecodingMode(mode)
{
}
@ -706,7 +755,8 @@ nsJSONListener::Consume(const PRUnichar* aBuffer, PRUint32 aByteLength)
if (!mJSONParser)
return NS_ERROR_FAILURE;
if (!JS_ConsumeJSONText(mCx, mJSONParser, (jschar*) aBuffer, aByteLength)) {
if (!js_ConsumeJSONText(mCx, mJSONParser, (jschar*) aBuffer, aByteLength,
mDecodingMode)) {
Cleanup();
return NS_ERROR_FAILURE;
}

Просмотреть файл

@ -40,6 +40,7 @@
#define nsJSON_h__
#include "jsapi.h"
#include "json.h"
#include "nsIJSON.h"
#include "nsString.h"
#include "nsCOMPtr.h"
@ -86,9 +87,11 @@ public:
protected:
nsresult EncodeInternal(nsJSONWriter *writer);
nsresult DecodeInternal(nsIInputStream *aStream,
PRInt32 aContentLength,
PRBool aNeedsConverter);
PRBool aNeedsConverter,
DecodingMode mode = STRICT);
nsCOMPtr<nsIURI> mURI;
};
@ -98,7 +101,8 @@ NS_NewJSON(nsISupports* aOuter, REFNSIID aIID, void** aResult);
class nsJSONListener : public nsIStreamListener
{
public:
nsJSONListener(JSContext *cx, jsval *rootVal, PRBool needsConverter);
nsJSONListener(JSContext *cx, jsval *rootVal, PRBool needsConverter,
DecodingMode mode);
virtual ~nsJSONListener();
NS_DECL_ISUPPORTS
@ -112,6 +116,7 @@ protected:
jsval *mRootVal;
nsCOMPtr<nsIUnicodeDecoder> mDecoder;
nsCString mSniffBuffer;
DecodingMode mDecodingMode;
nsresult ProcessBytes(const char* aBuffer, PRUint32 aByteLength);
nsresult ConsumeConverted(const char* aBuffer, PRUint32 aByteLength);
nsresult Consume(const PRUnichar *data, PRUint32 len);

Просмотреть файл

@ -167,14 +167,7 @@ function test_files() {
try {
dump(path +"\n");
x = read_file(path);
if (i == 4) {
// ["extra comma",]
do_check_eq(x[0], "extra comma");
do_check_eq(x.length, 1);
} else if (i == 9) {
// {"Extra comma": true,}
do_check_eq(x["Extra comma"], true);
} else if (i == 13) {
if (i == 13) {
// {"Numbers cannot have leading zeroes": 013}
do_check_eq(x["Numbers cannot have leading zeroes"], 13);
} else if (i == 18) {
@ -189,7 +182,7 @@ function test_files() {
} catch (ex) {
// expected from parsing invalid JSON
if (i == 4 || i == 9 || i == 13 || i == 18) {
if (i == 13 || i == 18) {
do_throw("Unexpected pass in " + path);
}
}

Просмотреть файл

@ -395,35 +395,41 @@ public:
JS_TriggerOperationCallback(cx);
PRBool killWorkerWhenDone;
{
nsLazyAutoRequest ar;
JSAutoCrossCompartmentCall axcc;
// Tell the worker which context it will be using
if (mWorker->SetGlobalForContext(cx)) {
RunQueue(cx, &killWorkerWhenDone);
// Tell the worker which context it will be using
if (mWorker->SetGlobalForContext(cx, &ar, &axcc)) {
NS_ASSERTION(ar.entered(), "SetGlobalForContext must enter request on success");
NS_ASSERTION(axcc.entered(), "SetGlobalForContext must enter xcc on success");
// Code in XPConnect assumes that the context's global object won't be
// replaced outside of a request.
JSAutoRequest ar(cx);
RunQueue(cx, &killWorkerWhenDone);
// Remove the global object from the context so that it might be garbage
// collected.
JS_SetGlobalObject(cx, NULL);
JS_SetContextPrivate(cx, NULL);
}
else {
{
// Code in XPConnect assumes that the context's global object won't be
// replaced outside of a request.
JSAutoRequest ar(cx);
// This is usually due to a parse error in the worker script...
// Remove the global object from the context so that it might be garbage
// collected.
JS_SetGlobalObject(cx, NULL);
JS_SetContextPrivate(cx, NULL);
}
else {
NS_ASSERTION(!ar.entered(), "SetGlobalForContext must not enter request on failure");
NS_ASSERTION(!axcc.entered(), "SetGlobalForContext must not enter xcc on failure");
nsAutoMonitor mon(gDOMThreadService->mMonitor);
killWorkerWhenDone = mKillWorkerWhenDone;
gDOMThreadService->WorkerComplete(this);
mon.NotifyAll();
{
// Code in XPConnect assumes that the context's global object won't be
// replaced outside of a request.
JSAutoRequest ar2(cx);
// This is usually due to a parse error in the worker script...
JS_SetGlobalObject(cx, NULL);
JS_SetContextPrivate(cx, NULL);
}
nsAutoMonitor mon(gDOMThreadService->mMonitor);
killWorkerWhenDone = mKillWorkerWhenDone;
gDOMThreadService->WorkerComplete(this);
mon.NotifyAll();
}
}
if (killWorkerWhenDone) {

Просмотреть файл

@ -75,45 +75,38 @@ public:
// Same as window.dump().
static JSBool
Dump(JSContext* aCx, JSObject* aObj, uintN aArgc, jsval* aArgv, jsval* aRval);
Dump(JSContext* aCx, uintN aArgc, jsval* aVp);
// Same as window.setTimeout().
static JSBool
SetTimeout(JSContext* aCx, JSObject* aObj, uintN aArgc, jsval* aArgv,
jsval* aRval) {
return MakeTimeout(aCx, aObj, aArgc, aArgv, aRval, PR_FALSE);
SetTimeout(JSContext* aCx, uintN aArgc, jsval* aVp) {
return MakeTimeout(aCx, aArgc, aVp, PR_FALSE);
}
// Same as window.setInterval().
static JSBool
SetInterval(JSContext* aCx, JSObject* aObj, uintN aArgc, jsval* aArgv,
jsval* aRval) {
return MakeTimeout(aCx, aObj, aArgc, aArgv, aRval, PR_TRUE);
SetInterval(JSContext* aCx, uintN aArgc, jsval* aVp) {
return MakeTimeout(aCx, aArgc, aVp, PR_TRUE);
}
// Used for both clearTimeout() and clearInterval().
static JSBool
KillTimeout(JSContext* aCx, JSObject* aObj, uintN aArgc, jsval* aArgv,
jsval* aRval);
KillTimeout(JSContext* aCx, uintN aArgc, jsval* aVp);
static JSBool
LoadScripts(JSContext* aCx, JSObject* aObj, uintN aArgc, jsval* aArgv,
jsval* aRval);
LoadScripts(JSContext* aCx, uintN aArgc, jsval* aVp);
static JSBool
NewXMLHttpRequest(JSContext* aCx, JSObject* aObj, uintN aArgc, jsval* aArgv,
jsval* aRval);
NewXMLHttpRequest(JSContext* aCx, uintN aArgc, jsval* aVp);
static JSBool
NewWorker(JSContext* aCx, JSObject* aObj, uintN aArgc, jsval* aArgv,
jsval* aRval) {
return MakeNewWorker(aCx, aObj, aArgc, aArgv, aRval, nsDOMWorker::CONTENT);
NewWorker(JSContext* aCx, uintN aArgc, jsval* aVp) {
return MakeNewWorker(aCx, aArgc, aVp, nsDOMWorker::CONTENT);
}
// Chrome-only functions
static JSBool
NewChromeWorker(JSContext* aCx, JSObject* aObj, uintN aArgc, jsval* aArgv,
jsval* aRval);
NewChromeWorker(JSContext* aCx, uintN aArgc, jsval* aVp);
#ifdef BUILD_CTYPES
static JSBool
@ -123,27 +116,25 @@ public:
private:
// Internal helper for SetTimeout and SetInterval.
static JSBool
MakeTimeout(JSContext* aCx, JSObject* aObj, uintN aArgc, jsval* aArgv,
jsval* aRval, PRBool aIsInterval);
MakeTimeout(JSContext* aCx, uintN aArgc, jsval* aVp, PRBool aIsInterval);
static JSBool
MakeNewWorker(JSContext* aCx, JSObject* aObj, uintN aArgc, jsval* aArgv,
jsval* aRval, WorkerPrivilegeModel aPrivilegeModel);
MakeNewWorker(JSContext* aCx, uintN aArgc, jsval* aVp,
WorkerPrivilegeModel aPrivilegeModel);
};
JSBool
nsDOMWorkerFunctions::Dump(JSContext* aCx,
JSObject* /* aObj */,
uintN aArgc,
jsval* aArgv,
jsval* /* aRval */)
jsval* aVp)
{
JS_SET_RVAL(cx, aVp, JSVAL_VOID);
if (!nsGlobalWindow::DOMWindowDumpEnabled()) {
return JS_TRUE;
}
JSString* str;
if (aArgc && (str = JS_ValueToString(aCx, aArgv[0])) && str) {
if (aArgc && (str = JS_ValueToString(aCx, JS_ARGV(aCx, aVp)[0])) && str) {
nsDependentJSString string(str);
fputs(NS_ConvertUTF16toUTF8(nsDependentJSString(str)).get(), stderr);
fflush(stderr);
@ -153,10 +144,8 @@ nsDOMWorkerFunctions::Dump(JSContext* aCx,
JSBool
nsDOMWorkerFunctions::MakeTimeout(JSContext* aCx,
JSObject* /* aObj */,
uintN aArgc,
jsval* aArgv,
jsval* aRval,
jsval* aVp,
PRBool aIsInterval)
{
nsDOMWorker* worker = static_cast<nsDOMWorker*>(JS_GetContextPrivate(aCx));
@ -170,7 +159,7 @@ nsDOMWorkerFunctions::MakeTimeout(JSContext* aCx,
if (worker->IsClosing()) {
// Timeouts won't run in the close handler, fake success and bail.
*aRval = INT_TO_JSVAL(id);
JS_SET_RVAL(aCx, aVp, INT_TO_JSVAL(id));
return JS_TRUE;
}
@ -180,7 +169,7 @@ nsDOMWorkerFunctions::MakeTimeout(JSContext* aCx,
return JS_FALSE;
}
nsresult rv = timeout->Init(aCx, aArgc, aArgv, aIsInterval);
nsresult rv = timeout->Init(aCx, aArgc, JS_ARGV(aCx, aVp), aIsInterval);
if (NS_FAILED(rv)) {
JS_ReportError(aCx, "Failed to initialize timeout!");
return JS_FALSE;
@ -198,16 +187,14 @@ nsDOMWorkerFunctions::MakeTimeout(JSContext* aCx,
return JS_FALSE;
}
*aRval = INT_TO_JSVAL(id);
JS_SET_RVAL(aCx, aVp, INT_TO_JSVAL(id));
return JS_TRUE;
}
JSBool
nsDOMWorkerFunctions::KillTimeout(JSContext* aCx,
JSObject* /* aObj */,
uintN aArgc,
jsval* aArgv,
jsval* /* aRval */)
jsval* aVp)
{
nsDOMWorker* worker = static_cast<nsDOMWorker*>(JS_GetContextPrivate(aCx));
NS_ASSERTION(worker, "This should be set by the DOM thread service!");
@ -222,21 +209,20 @@ nsDOMWorkerFunctions::KillTimeout(JSContext* aCx,
}
uint32 id;
if (!JS_ValueToECMAUint32(aCx, aArgv[0], &id)) {
if (!JS_ValueToECMAUint32(aCx, JS_ARGV(aCx, aVp)[0], &id)) {
JS_ReportError(aCx, "First argument must be a timeout id");
return JS_FALSE;
}
worker->CancelTimeoutWithId(PRUint32(id));
JS_SET_RVAL(aCx, aVp, JSVAL_VOID);
return JS_TRUE;
}
JSBool
nsDOMWorkerFunctions::LoadScripts(JSContext* aCx,
JSObject* /* aObj */,
uintN aArgc,
jsval* aArgv,
jsval* /* aRval */)
jsval* aVp)
{
nsDOMWorker* worker = static_cast<nsDOMWorker*>(JS_GetContextPrivate(aCx));
NS_ASSERTION(worker, "This should be set by the DOM thread service!");
@ -257,8 +243,9 @@ nsDOMWorkerFunctions::LoadScripts(JSContext* aCx,
return JS_FALSE;
}
jsval* argv = JS_ARGV(aCx, aVp);
for (uintN index = 0; index < aArgc; index++) {
jsval val = aArgv[index];
jsval val = argv[index];
if (!JSVAL_IS_STRING(val)) {
JS_ReportError(aCx, "Argument %d must be a string", index);
@ -298,16 +285,20 @@ nsDOMWorkerFunctions::LoadScripts(JSContext* aCx,
return JS_FALSE;
}
JS_SET_RVAL(aCx, aVp, JSVAL_VOID);
return JS_TRUE;
}
JSBool
nsDOMWorkerFunctions::NewXMLHttpRequest(JSContext* aCx,
JSObject* aObj,
uintN aArgc,
jsval* /* aArgv */,
jsval* aRval)
jsval* aVp)
{
JSObject *obj = JS_THIS_OBJECT(aCx, aVp);
if (!obj) {
return JS_FALSE;
}
nsDOMWorker* worker = static_cast<nsDOMWorker*>(JS_GetContextPrivate(aCx));
NS_ASSERTION(worker, "This should be set by the DOM thread service!");
@ -340,7 +331,7 @@ nsDOMWorkerFunctions::NewXMLHttpRequest(JSContext* aCx,
nsCOMPtr<nsIXPConnectJSObjectHolder> xhrWrapped;
jsval v;
rv = nsContentUtils::WrapNative(aCx, aObj,
rv = nsContentUtils::WrapNative(aCx, obj,
static_cast<nsIXMLHttpRequest*>(xhr), &v,
getter_AddRefs(xhrWrapped));
if (NS_FAILED(rv)) {
@ -348,16 +339,14 @@ nsDOMWorkerFunctions::NewXMLHttpRequest(JSContext* aCx,
return JS_FALSE;
}
*aRval = v;
JS_SET_RVAL(aCs, aVp, v);
return JS_TRUE;
}
JSBool
nsDOMWorkerFunctions::NewChromeWorker(JSContext* aCx,
JSObject* aObj,
uintN aArgc,
jsval* aArgv,
jsval* aRval)
jsval* aVp)
{
nsDOMWorker* worker = static_cast<nsDOMWorker*>(JS_GetContextPrivate(aCx));
NS_ASSERTION(worker, "This should be set by the DOM thread service!");
@ -367,17 +356,20 @@ nsDOMWorkerFunctions::NewChromeWorker(JSContext* aCx,
return JS_FALSE;
}
return MakeNewWorker(aCx, aObj, aArgc, aArgv, aRval, nsDOMWorker::CHROME);
return MakeNewWorker(aCx, aArgc, aVp, nsDOMWorker::CHROME);
}
JSBool
nsDOMWorkerFunctions::MakeNewWorker(JSContext* aCx,
JSObject* aObj,
uintN aArgc,
jsval* aArgv,
jsval* aRval,
jsval* aVp,
WorkerPrivilegeModel aPrivilegeModel)
{
JSObject *obj = JS_THIS_OBJECT(aCx, aVp);
if (!obj) {
return JS_FALSE;
}
nsDOMWorker* worker = static_cast<nsDOMWorker*>(JS_GetContextPrivate(aCx));
NS_ASSERTION(worker, "This should be set by the DOM thread service!");
@ -412,7 +404,8 @@ nsDOMWorkerFunctions::MakeNewWorker(JSContext* aCx,
return JS_FALSE;
}
nsresult rv = newWorker->InitializeInternal(owner, aCx, aObj, aArgc, aArgv);
nsresult rv = newWorker->InitializeInternal(owner, aCx, obj, aArgc,
JS_ARGV(aCx, aVp));
if (NS_FAILED(rv)) {
JS_ReportError(aCx, "Couldn't initialize new worker!");
return JS_FALSE;
@ -420,14 +413,14 @@ nsDOMWorkerFunctions::MakeNewWorker(JSContext* aCx,
nsCOMPtr<nsIXPConnectJSObjectHolder> workerWrapped;
jsval v;
rv = nsContentUtils::WrapNative(aCx, aObj, static_cast<nsIWorker*>(newWorker),
rv = nsContentUtils::WrapNative(aCx, obj, static_cast<nsIWorker*>(newWorker),
&v, getter_AddRefs(workerWrapped));
if (NS_FAILED(rv)) {
JS_ReportError(aCx, "Failed to wrap new worker!");
return JS_FALSE;
}
*aRval = v;
JS_SET_RVAL(aCx, aVp, v);
return JS_TRUE;
}
@ -461,26 +454,26 @@ nsDOMWorkerFunctions::CTypesLazyGetter(JSContext* aCx,
#endif
JSFunctionSpec gDOMWorkerFunctions[] = {
{ "dump", nsDOMWorkerFunctions::Dump, 1, 0, 0 },
{ "setTimeout", nsDOMWorkerFunctions::SetTimeout, 1, 0, 0 },
{ "clearTimeout", nsDOMWorkerFunctions::KillTimeout, 1, 0, 0 },
{ "setInterval", nsDOMWorkerFunctions::SetInterval, 1, 0, 0 },
{ "clearInterval", nsDOMWorkerFunctions::KillTimeout, 1, 0, 0 },
{ "importScripts", nsDOMWorkerFunctions::LoadScripts, 1, 0, 0 },
{ "XMLHttpRequest", nsDOMWorkerFunctions::NewXMLHttpRequest, 0, 0, 0 },
{ "Worker", nsDOMWorkerFunctions::NewWorker, 1, 0, 0 },
{ "dump", nsDOMWorkerFunctions::Dump, 1, 0 },
{ "setTimeout", nsDOMWorkerFunctions::SetTimeout, 1, 0 },
{ "clearTimeout", nsDOMWorkerFunctions::KillTimeout, 1, 0 },
{ "setInterval", nsDOMWorkerFunctions::SetInterval, 1, 0 },
{ "clearInterval", nsDOMWorkerFunctions::KillTimeout, 1, 0 },
{ "importScripts", nsDOMWorkerFunctions::LoadScripts, 1, 0 },
{ "XMLHttpRequest", nsDOMWorkerFunctions::NewXMLHttpRequest, 0, 0 },
{ "Worker", nsDOMWorkerFunctions::NewWorker, 1, 0 },
#ifdef MOZ_SHARK
{ "startShark", js_StartShark, 0, 0, 0 },
{ "stopShark", js_StopShark, 0, 0, 0 },
{ "connectShark", js_ConnectShark, 0, 0, 0 },
{ "disconnectShark", js_DisconnectShark, 0, 0, 0 },
{ "startShark", js_StartShark, 0, 0 },
{ "stopShark", js_StopShark, 0, 0 },
{ "connectShark", js_ConnectShark, 0, 0 },
{ "disconnectShark", js_DisconnectShark, 0, 0 },
#endif
{ nsnull, nsnull, 0, 0, 0 }
{ nsnull, nsnull, 0, 0 }
};
JSFunctionSpec gDOMWorkerChromeFunctions[] = {
{ "ChromeWorker", nsDOMWorkerFunctions::NewChromeWorker, 1, 0, 0 },
{ nsnull, nsnull, 0, 0, 0 }
{ "ChromeWorker", nsDOMWorkerFunctions::NewChromeWorker, 1, 0 },
{ nsnull, nsnull, 0, 0 }
};
nsDOMWorkerScope::nsDOMWorkerScope(nsDOMWorker* aWorker)
@ -1567,26 +1560,41 @@ nsDOMWorker::PostMessageInternal(PRBool aToInner)
}
PRBool
nsDOMWorker::SetGlobalForContext(JSContext* aCx)
nsDOMWorker::SetGlobalForContext(JSContext* aCx, nsLazyAutoRequest *aRequest,
JSAutoCrossCompartmentCall *aCall)
{
NS_ASSERTION(!NS_IsMainThread(), "Wrong thread!");
if (!CompileGlobalObject(aCx)) {
if (!CompileGlobalObject(aCx, aRequest, aCall)) {
return PR_FALSE;
}
JSAutoRequest ar(aCx);
JS_SetGlobalObject(aCx, mGlobal);
return PR_TRUE;
}
PRBool
nsDOMWorker::CompileGlobalObject(JSContext* aCx)
nsDOMWorker::CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest,
JSAutoCrossCompartmentCall *aCall)
{
NS_ASSERTION(!NS_IsMainThread(), "Wrong thread!");
// On success, we enter a request and a cross-compartment call that both
// belong to the caller. But on failure, we must not remain in a request or
// cross-compartment call. So we enter both only locally at first. On
// failure, the local request and call will automatically get cleaned
// up. Once success is certain, we swap them into *aRequest and *aCall.
nsLazyAutoRequest localRequest;
JSAutoCrossCompartmentCall localCall;
localRequest.enter(aCx);
PRBool success;
if (mGlobal) {
success = localCall.enter(aCx, mGlobal);
NS_ENSURE_TRUE(success, PR_FALSE);
aRequest->swap(localRequest);
aCall->swap(localCall);
return PR_TRUE;
}
@ -1598,8 +1606,6 @@ nsDOMWorker::CompileGlobalObject(JSContext* aCx)
NS_ASSERTION(!mScriptURL.IsEmpty(), "Must have a url here!");
JSAutoRequest ar(aCx);
NS_ASSERTION(!JS_GetGlobalObject(aCx), "Global object should be unset!");
nsRefPtr<nsDOMWorkerScope> scope = new nsDOMWorkerScope(this);
@ -1629,6 +1635,9 @@ nsDOMWorker::CompileGlobalObject(JSContext* aCx)
NS_ASSERTION(JS_GetGlobalObject(aCx) == global, "Global object mismatch!");
success = localCall.enter(aCx, global);
NS_ENSURE_TRUE(success, PR_FALSE);
#ifdef DEBUG
{
jsval components;
@ -1640,7 +1649,7 @@ nsDOMWorker::CompileGlobalObject(JSContext* aCx)
#endif
// Set up worker thread functions.
PRBool success = JS_DefineFunctions(aCx, global, gDOMWorkerFunctions);
success = JS_DefineFunctions(aCx, global, gDOMWorkerFunctions);
NS_ENSURE_TRUE(success, PR_FALSE);
if (mPrivilegeModel == CHROME) {
@ -1697,6 +1706,8 @@ nsDOMWorker::CompileGlobalObject(JSContext* aCx)
NS_ASSERTION(mPrincipal && mURI, "Script loader didn't set our principal!");
aRequest->swap(localRequest);
aCall->swap(localCall);
return PR_TRUE;
}

Просмотреть файл

@ -105,6 +105,33 @@ private:
PRPackedBool mHasOnerror;
};
class nsLazyAutoRequest
{
public:
nsLazyAutoRequest() : mCx(nsnull) {}
~nsLazyAutoRequest() {
if (mCx)
JS_EndRequest(mCx);
}
void enter(JSContext *aCx) {
JS_BeginRequest(aCx);
mCx = aCx;
}
bool entered() const { return mCx != nsnull; }
void swap(nsLazyAutoRequest &other) {
JSContext *tmp = mCx;
mCx = other.mCx;
other.mCx = tmp;
}
private:
JSContext *mCx;
};
class nsDOMWorker : public nsDOMWorkerMessageHandler,
public nsIChromeWorker,
public nsITimerCallback,
@ -174,7 +201,7 @@ public:
PRBool IsClosing();
PRBool IsSuspended();
PRBool SetGlobalForContext(JSContext* aCx);
PRBool SetGlobalForContext(JSContext* aCx, nsLazyAutoRequest *aRequest, JSAutoCrossCompartmentCall *aCall);
void SetPool(nsDOMWorkerPool* aPool);
@ -258,7 +285,7 @@ private:
nsresult PostMessageInternal(PRBool aToInner);
PRBool CompileGlobalObject(JSContext* aCx);
PRBool CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest, JSAutoCrossCompartmentCall *aCall);
PRUint32 NextTimeoutId() {
return ++mNextTimeoutId;

Просмотреть файл

@ -140,7 +140,7 @@ ScriptErrorReporter(JSContext *cx,
// Don't report an exception from inner JS frames as the callers may intend
// to handle it.
while ((fp = JS_FrameIterator(cx, &fp))) {
if (!JS_IsNativeFrame(cx, fp)) {
if (JS_IsScriptFrame(cx, fp)) {
return;
}
}
@ -243,14 +243,13 @@ ContextCallback(JSContext *cx,
static JSBool
Print(JSContext *cx,
JSObject *obj,
uintN argc,
jsval *argv,
jsval *rval)
jsval *vp)
{
uintN i, n;
JSString *str;
jsval *argv = JS_ARGV(cx, vp);
for (i = n = 0; i < argc; i++) {
str = JS_ValueToString(cx, argv[i]);
if (!str)
@ -261,6 +260,7 @@ Print(JSContext *cx,
n++;
if (n)
fputc('\n', stdout);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
@ -280,16 +280,16 @@ GetLine(char *bufp,
static JSBool
Dump(JSContext *cx,
JSObject *obj,
uintN argc,
jsval *argv,
jsval *rval)
jsval *vp)
{
JS_SET_RVAL(cx, vp, JSVAL_VOID);
JSString *str;
if (!argc)
return JS_TRUE;
str = JS_ValueToString(cx, argv[0]);
str = JS_ValueToString(cx, JS_ARGV(cx, vp)[0]);
if (!str)
return JS_FALSE;
@ -300,10 +300,8 @@ Dump(JSContext *cx,
static JSBool
Load(JSContext *cx,
JSObject *obj,
uintN argc,
jsval *argv,
jsval *rval)
jsval *vp)
{
uintN i;
JSString *str;
@ -313,6 +311,11 @@ Load(JSContext *cx,
jsval result;
FILE *file;
JSObject *obj = JS_THIS_OBJECT(cx, vp);
if (!obj)
return JS_FALSE;
jsval *argv = JS_ARGV(cx, vp);
for (i = 0; i < argc; i++) {
str = JS_ValueToString(cx, argv[i]);
if (!str)
@ -337,25 +340,25 @@ Load(JSContext *cx,
if (!ok)
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSBool
Version(JSContext *cx,
JSObject *obj,
uintN argc,
jsval *argv,
jsval *rval)
jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
if (argc > 0 && JSVAL_IS_INT(argv[0]))
*rval = INT_TO_JSVAL(JS_SetVersion(cx, JSVersion(JSVAL_TO_INT(argv[0]))));
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(JS_SetVersion(cx, JSVersion(JSVAL_TO_INT(argv[0])))));
else
*rval = INT_TO_JSVAL(JS_GetVersion(cx));
JS_SET_RVAL(cx, vp, INT_TO_JSVAL(JS_GetVersion(cx)));
return JS_TRUE;
}
static JSBool
BuildDate(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
BuildDate(JSContext *cx, uintN argc, jsval *vp)
{
fprintf(stdout, "built on %s at %s\n", __DATE__, __TIME__);
return JS_TRUE;
@ -363,13 +366,11 @@ BuildDate(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
static JSBool
Quit(JSContext *cx,
JSObject *obj,
uintN argc,
jsval *argv,
jsval *rval)
jsval *vp)
{
int exitCode = 0;
JS_ConvertArguments(cx, argc, argv, "/ i", &exitCode);
JS_ConvertArguments(cx, argc, JS_ARGV(cx, vp), "/ i", &exitCode);
XPCShellEnvironment* env = Environment(cx);
env->SetExitCode(exitCode);
@ -380,30 +381,27 @@ Quit(JSContext *cx,
static JSBool
DumpXPC(JSContext *cx,
JSObject *obj,
uintN argc,
jsval *argv,
jsval *rval)
jsval *vp)
{
int32 depth = 2;
if (argc > 0) {
if (!JS_ValueToInt32(cx, argv[0], &depth))
if (!JS_ValueToInt32(cx, JS_ARGV(cx, vp)[0], &depth))
return JS_FALSE;
}
nsCOMPtr<nsIXPConnect> xpc = do_GetService(nsIXPConnect::GetCID());
if(xpc)
xpc->DebugDump((int16)depth);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
static JSBool
GC(JSContext *cx,
JSObject *obj,
uintN argc,
jsval *argv,
jsval *rval)
jsval *vp)
{
JSRuntime *rt;
uint32 preBytes;
@ -422,6 +420,7 @@ GC(JSContext *cx,
#ifdef JS_GCMETER
js_DumpGCStats(rt, stdout);
#endif
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
@ -429,10 +428,8 @@ GC(JSContext *cx,
static JSBool
DumpHeap(JSContext *cx,
JSObject *obj,
uintN argc,
jsval *argv,
jsval *rval)
jsval *vp)
{
char *fileName = NULL;
void* startThing = NULL;
@ -440,12 +437,14 @@ DumpHeap(JSContext *cx,
void *thingToFind = NULL;
size_t maxDepth = (size_t)-1;
void *thingToIgnore = NULL;
jsval *vp;
FILE *dumpFile;
JSBool ok;
vp = &argv[0];
if (*vp != JSVAL_NULL && *vp != JSVAL_VOID) {
jsval *argv = JS_ARGV(cx, vp);
JS_SET_RVAL(cx, vp, JSVAL_VOID);
vp = argv + 0;
if (argc > 0 && *vp != JSVAL_NULL && *vp != JSVAL_VOID) {
JSString *str;
str = JS_ValueToString(cx, *vp);
@ -455,23 +454,23 @@ DumpHeap(JSContext *cx,
fileName = JS_GetStringBytes(str);
}
vp = &argv[1];
if (*vp != JSVAL_NULL && *vp != JSVAL_VOID) {
vp = argv + 1;
if (argc > 1 && *vp != JSVAL_NULL && *vp != JSVAL_VOID) {
if (!JSVAL_IS_TRACEABLE(*vp))
goto not_traceable_arg;
startThing = JSVAL_TO_TRACEABLE(*vp);
startTraceKind = JSVAL_TRACE_KIND(*vp);
}
vp = &argv[2];
if (*vp != JSVAL_NULL && *vp != JSVAL_VOID) {
vp = argv + 2;
if (argc > 2 && *vp != JSVAL_NULL && *vp != JSVAL_VOID) {
if (!JSVAL_IS_TRACEABLE(*vp))
goto not_traceable_arg;
thingToFind = JSVAL_TO_TRACEABLE(*vp);
}
vp = &argv[3];
if (*vp != JSVAL_NULL && *vp != JSVAL_VOID) {
vp = argv + 3;
if (argc > 3 && *vp != JSVAL_NULL && *vp != JSVAL_VOID) {
uint32 depth;
if (!JS_ValueToECMAUint32(cx, *vp, &depth))
@ -479,8 +478,8 @@ DumpHeap(JSContext *cx,
maxDepth = depth;
}
vp = &argv[4];
if (*vp != JSVAL_NULL && *vp != JSVAL_VOID) {
vp = argv + 4;
if (argc > 4 && *vp != JSVAL_NULL && *vp != JSVAL_VOID) {
if (!JSVAL_IS_TRACEABLE(*vp))
goto not_traceable_arg;
thingToIgnore = JSVAL_TO_TRACEABLE(*vp);
@ -514,46 +513,46 @@ DumpHeap(JSContext *cx,
static JSBool
Clear(JSContext *cx,
JSObject *obj,
uintN argc,
jsval *argv,
jsval *rval)
jsval *vp)
{
jsval *argv = JS_ARGV(cx, vp);
if (argc > 0 && !JSVAL_IS_PRIMITIVE(argv[0])) {
JS_ClearScope(cx, JSVAL_TO_OBJECT(argv[0]));
} else {
JS_ReportError(cx, "'clear' requires an object");
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
JSFunctionSpec gGlobalFunctions[] =
{
{"print", Print, 0,0,0},
{"load", Load, 1,0,0},
{"quit", Quit, 0,0,0},
{"version", Version, 1,0,0},
{"build", BuildDate, 0,0,0},
{"dumpXPC", DumpXPC, 1,0,0},
{"dump", Dump, 1,0,0},
{"gc", GC, 0,0,0},
{"clear", Clear, 1,0,0},
{"print", Print, 0,0},
{"load", Load, 1,0},
{"quit", Quit, 0,0},
{"version", Version, 1,0},
{"build", BuildDate, 0,0},
{"dumpXPC", DumpXPC, 1,0},
{"dump", Dump, 1,0},
{"gc", GC, 0,0},
{"clear", Clear, 1,0},
#ifdef DEBUG
{"dumpHeap", DumpHeap, 5,0,0},
{"dumpHeap", DumpHeap, 5,0},
#endif
#ifdef MOZ_SHARK
{"startShark", js_StartShark, 0,0,0},
{"stopShark", js_StopShark, 0,0,0},
{"connectShark", js_ConnectShark, 0,0,0},
{"disconnectShark", js_DisconnectShark, 0,0,0},
{"startShark", js_StartShark, 0,0},
{"stopShark", js_StopShark, 0,0},
{"connectShark", js_ConnectShark, 0,0},
{"disconnectShark", js_DisconnectShark, 0,0},
#endif
#ifdef MOZ_CALLGRIND
{"startCallgrind", js_StartCallgrind, 0,0,0},
{"stopCallgrind", js_StopCallgrind, 0,0,0},
{"dumpCallgrind", js_DumpCallgrind, 1,0,0},
{"startCallgrind", js_StartCallgrind, 0,0},
{"stopCallgrind", js_StopCallgrind, 0,0},
{"dumpCallgrind", js_DumpCallgrind, 1,0},
#endif
{nsnull,nsnull,0,0,0}
{nsnull,nsnull,0,0}
};
typedef enum JSShellErrNum
@ -566,25 +565,6 @@ typedef enum JSShellErrNum
#undef MSGDEF
} JSShellErrNum;
JSErrorFormatString gErrorFormatString[JSErr_Limit] =
{
#define MSG_DEF(name, number, count, exception, format) \
{ format, count } ,
#include "jsshell.msg"
#undef MSG_DEF
};
static const JSErrorFormatString *
GetErrorMessage(void *userRef,
const char *locale,
const uintN errorNumber)
{
if ((errorNumber > 0) && (errorNumber < JSShellErr_Limit))
return &gErrorFormatString[errorNumber];
return NULL;
}
static void
ProcessFile(JSContext *cx,
JSObject *obj,

Просмотреть файл

@ -642,19 +642,22 @@ ObjectWrapperParent::CPOW_Finalize(JSContext* cx, JSObject* obj)
}
/*static*/ JSBool
ObjectWrapperParent::CPOW_Call(JSContext* cx, JSObject* obj, uintN argc,
jsval* argv, jsval* rval)
ObjectWrapperParent::CPOW_Call(JSContext* cx, uintN argc, jsval* vp)
{
CPOW_LOG(("Calling CPOW_Call..."));
JSObject* thisobj = JS_THIS_OBJECT(cx, vp);
if (!thisobj)
return JS_FALSE;
ObjectWrapperParent* function =
Unwrap(cx, JSVAL_TO_OBJECT(JS_ARGV_CALLEE(argv)));
Unwrap(cx, JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)));
if (!function)
return with_error(cx, JS_FALSE, "Could not unwrap CPOW function");
AutoCheckOperation aco(cx, function);
ObjectWrapperParent* receiver = Unwrap(cx, obj);
ObjectWrapperParent* receiver = Unwrap(cx, thisobj);
if (!receiver) {
// Substitute child global for parent global object.
// TODO First make sure we're really replacing the global object?
@ -664,6 +667,7 @@ ObjectWrapperParent::CPOW_Call(JSContext* cx, JSObject* obj, uintN argc,
}
nsTArray<JSVariant> in_argv(argc);
jsval* argv = JS_ARGV(cx, vp);
for (uintN i = 0; i < argc; i++)
if (!jsval_to_JSVariant(cx, argv[i], in_argv.AppendElement()))
return JS_FALSE;
@ -674,23 +678,22 @@ ObjectWrapperParent::CPOW_Call(JSContext* cx, JSObject* obj, uintN argc,
function->CallCall(receiver, in_argv,
aco.StatusPtr(), &out_rval) &&
aco.Ok() &&
jsval_from_JSVariant(cx, out_rval, rval));
jsval_from_JSVariant(cx, out_rval, vp));
}
/*static*/ JSBool
ObjectWrapperParent::CPOW_Construct(JSContext *cx, JSObject *obj, uintN argc,
jsval *argv, jsval *rval)
ObjectWrapperParent::CPOW_Construct(JSContext* cx, uintN argc, jsval* vp)
{
CPOW_LOG(("Calling CPOW_Construct..."));
ObjectWrapperParent* constructor =
Unwrap(cx, JSVAL_TO_OBJECT(JS_ARGV_CALLEE(argv)));
ObjectWrapperParent* constructor = Unwrap(cx, JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)));
if (!constructor)
return with_error(cx, JS_FALSE, "Could not unwrap CPOW constructor function");
AutoCheckOperation aco(cx, constructor);
nsTArray<JSVariant> in_argv(argc);
jsval* argv = JS_ARGV(cx, vp);
for (uintN i = 0; i < argc; i++)
if (!jsval_to_JSVariant(cx, argv[i], in_argv.AppendElement()))
return JS_FALSE;
@ -698,10 +701,9 @@ ObjectWrapperParent::CPOW_Construct(JSContext *cx, JSObject *obj, uintN argc,
PObjectWrapperParent* out_powp;
return (constructor->Manager()->RequestRunToCompletion() &&
constructor->CallConstruct(in_argv,
aco.StatusPtr(), &out_powp) &&
constructor->CallConstruct(in_argv, aco.StatusPtr(), &out_powp) &&
aco.Ok() &&
jsval_from_PObjectWrapperParent(cx, out_powp, rval));
jsval_from_PObjectWrapperParent(cx, out_powp, vp));
}
/*static*/ JSBool

Просмотреть файл

@ -118,12 +118,10 @@ private:
CPOW_Finalize(JSContext* cx, JSObject* obj);
static JSBool
CPOW_Call(JSContext* cx, JSObject* obj, uintN argc, jsval* argv,
jsval* rval);
CPOW_Call(JSContext* cx, uintN argc, jsval* vp);
static JSBool
CPOW_Construct(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
jsval *rval);
CPOW_Construct(JSContext *cx, uintN argc, jsval *vp);
static JSBool
CPOW_HasInstance(JSContext *cx, JSObject *obj, const jsval *v, JSBool *bp);

Просмотреть файл

@ -347,8 +347,7 @@ Handle<BaseType>::sHandle_Properties[] = {
#undef HANDLE_PROP_FLAGS
#define HANDLE_FUN_FLAGS (JSFUN_FAST_NATIVE | \
JSPROP_READONLY | \
#define HANDLE_FUN_FLAGS (JSPROP_READONLY | \
JSPROP_PERMANENT)
template <class BaseType>

Просмотреть файл

@ -57,8 +57,7 @@ JetpackChild::~JetpackChild()
{
}
#define IMPL_METHOD_FLAGS (JSFUN_FAST_NATIVE | \
JSPROP_ENUMERATE | \
#define IMPL_METHOD_FLAGS (JSPROP_ENUMERATE | \
JSPROP_READONLY | \
JSPROP_PERMANENT)
const JSFunctionSpec

Просмотреть файл

@ -792,10 +792,6 @@ interface jsdIStackFrame : jsdIEphemeral
/** Internal use only. */
[noscript] readonly attribute JSDStackFrameInfo JSDStackFrameInfo;
/**
* True if stack frame represents a native frame.
*/
readonly attribute boolean isNative;
/**
* True if stack frame represents a frame created as a result of a debugger
* evaluation.

Просмотреть файл

@ -707,11 +707,6 @@ jsd_GetScopeChainForStackFrame(JSDContext* jsdc,
JSDThreadState* jsdthreadstate,
JSDStackFrameInfo* jsdframe);
extern JSBool
jsd_IsStackFrameNative(JSDContext* jsdc,
JSDThreadState* jsdthreadstate,
JSDStackFrameInfo* jsdframe);
extern JSBool
jsd_IsStackFrameDebugger(JSDContext* jsdc,
JSDThreadState* jsdthreadstate,

Просмотреть файл

@ -144,7 +144,7 @@ jsd_Constructing(JSDContext* jsdc, JSContext *cx, JSObject *obj,
JSD_LOCK_OBJECTS(jsdc);
jsdobj = jsd_GetJSDObjectForJSObject(jsdc, obj);
if( jsdobj && !jsdobj->ctorURL && !JS_IsNativeFrame(cx, fp) )
if( jsdobj && !jsdobj->ctorURL && JS_IsScriptFrame(cx, fp) )
{
script = JS_GetFrameScript(cx, fp);
if( script )

Просмотреть файл

@ -65,7 +65,7 @@ _addNewFrame(JSDContext* jsdc,
JSDStackFrameInfo* jsdframe;
JSDScript* jsdscript = NULL;
if (!JS_IsNativeFrame(jsdthreadstate->context, fp))
if (JS_IsScriptFrame(jsdthreadstate->context, fp))
{
JSD_LOCK_SCRIPTS(jsdc);
jsdscript = jsd_FindJSDScript(jsdc, script);
@ -133,7 +133,7 @@ jsd_NewThreadState(JSDContext* jsdc, JSContext *cx )
*/
if (JS_GetFrameThis(cx, fp) &&
((jsdc->flags & JSD_INCLUDE_NATIVE_FRAMES) ||
!JS_IsNativeFrame(cx, fp)))
JS_IsScriptFrame(cx, fp)))
{
JSDStackFrameInfo *frame;
@ -374,28 +374,6 @@ jsd_GetNameForStackFrame(JSDContext* jsdc,
return rv;
}
JSBool
jsd_IsStackFrameNative(JSDContext* jsdc,
JSDThreadState* jsdthreadstate,
JSDStackFrameInfo* jsdframe)
{
JSBool rv;
JSD_LOCK_THREADSTATES(jsdc);
if( jsd_IsValidFrameInThreadState(jsdc, jsdthreadstate, jsdframe) )
{
rv = JS_IsNativeFrame(jsdthreadstate->context, jsdframe->fp);
}
else
{
rv = JS_FALSE;
}
JSD_UNLOCK_THREADSTATES(jsdc);
return rv;
}
JSBool
jsd_IsStackFrameDebugger(JSDContext* jsdc,
JSDThreadState* jsdthreadstate,

Просмотреть файл

@ -1840,14 +1840,6 @@ jsdStackFrame::GetFunctionName(nsACString &_rval)
return NS_OK;
}
NS_IMETHODIMP
jsdStackFrame::GetIsNative(PRBool *_rval)
{
ASSERT_VALID_EPHEMERAL;
*_rval = JSD_IsStackFrameNative (mCx, mThreadState, mStackFrameInfo);
return NS_OK;
}
NS_IMETHODIMP
jsdStackFrame::GetIsDebugger(PRBool *_rval)
{
@ -1902,9 +1894,7 @@ jsdStackFrame::GetLine(PRUint32 *_rval)
jsuword pc = JSD_GetPCForStackFrame (mCx, mThreadState, mStackFrameInfo);
*_rval = JSD_GetClosestLine (mCx, script, pc);
} else {
if (!JSD_IsStackFrameNative(mCx, mThreadState, mStackFrameInfo))
return NS_ERROR_FAILURE;
*_rval = 1;
return NS_ERROR_FAILURE;
}
return NS_OK;
}

Просмотреть файл

@ -750,15 +750,6 @@ JSD_GetNameForStackFrame(JSDContext* jsdc,
return jsd_GetNameForStackFrame(jsdc, jsdthreadstate, jsdframe);
}
JSD_PUBLIC_API(JSBool)
JSD_IsStackFrameNative(JSDContext* jsdc,
JSDThreadState* jsdthreadstate,
JSDStackFrameInfo* jsdframe)
{
JSD_ASSERT_VALID_CONTEXT(jsdc);
return jsd_IsStackFrameNative(jsdc, jsdthreadstate, jsdframe);
}
JSD_PUBLIC_API(JSBool)
JSD_IsStackFrameDebugger(JSDContext* jsdc,
JSDThreadState* jsdthreadstate,

Просмотреть файл

@ -950,14 +950,6 @@ JSD_GetNameForStackFrame(JSDContext* jsdc,
JSDThreadState* jsdthreadstate,
JSDStackFrameInfo* jsdframe);
/*
* True if stack frame represents a native frame.
*/
extern JSD_PUBLIC_API(JSBool)
JSD_IsStackFrameNative(JSDContext* jsdc,
JSDThreadState* jsdthreadstate,
JSDStackFrameInfo* jsdframe);
/*
* True if stack frame represents a frame created as a result of a debugger
* evaluation.

Просмотреть файл

@ -44,10 +44,36 @@
* done by SpiderMonkey.
*/
Narcissus = {
options: { version: 185 },
hostGlobal: this
};
(function() {
var builderTypes = Object.create(null, {
"default": { value: function() {
return new narcissus.parser.DefaultBuilder;
} },
"ssa": { value: function() {
return new narcissus.parser.SSABuilder;
} }
});
var builderType;
var narcissus = {
options: {
version: 185,
get builderType() { return builderType },
set builderType(type) {
var ctor = builderTypes[type];
if (!ctor)
throw new Error("expected builder type ('default' or 'ssa'), got " + type);
builderType = type;
narcissus.definitions.Builder = ctor;
}
},
hostGlobal: this
};
Narcissus = narcissus;
})();
Narcissus.definitions = (function() {
@ -270,7 +296,11 @@ Narcissus.definitions = (function() {
defineGetter: defineGetter,
defineProperty: defineProperty,
isNativeCode: isNativeCode,
makePassthruHandler: makePassthruHandler
makePassthruHandler: makePassthruHandler,
Builder: function() {
throw new Error("no Builder type selected");
}
};
}());
Narcissus.options.builderType = "default";

Просмотреть файл

@ -85,7 +85,7 @@ Narcissus.interpreter = (function() {
x2.callee = x.callee;
x2.scope = x.scope;
try {
x2.execute(parser.parse(new parser.DefaultBuilder, s));
x2.execute(parser.parse(new definitions.Builder, s));
return x2.result;
} catch (e if e instanceof SyntaxError || isStackOverflow(e)) {
/*
@ -119,7 +119,7 @@ Narcissus.interpreter = (function() {
// NB: Use the STATEMENT_FORM constant since we don't want to push this
// function onto the fake compilation context.
var x = { builder: new parser.DefaultBuilder };
var x = { builder: new definitions.Builder };
var f = parser.FunctionDefinition(t, x, false, parser.STATEMENT_FORM);
var s = {object: global, parent: null};
return newFunction(f,{scope:s});
@ -347,7 +347,8 @@ Narcissus.interpreter = (function() {
u.filename, u.lineno);
}
if (u.readOnly || !hasDirectProperty(t, s)) {
definitions.defineProperty(t, s, undefined, x.type !== EVAL_CODE, u.readOnly);
// Does not correctly handle 'const x;' -- see bug 592335.
definitions.defineProperty(t, s, undefined, x.type !== EVAL_CODE, false);
}
}
// FALL THROUGH
@ -498,7 +499,8 @@ Narcissus.interpreter = (function() {
throw THROW;
case RETURN:
x.result = getValue(execute(n.value, x));
// Check for returns with no return value
x.result = n.value ? getValue(execute(n.value, x)) : undefined;
throw RETURN;
case WITH:
@ -1023,7 +1025,7 @@ Narcissus.interpreter = (function() {
return s;
var x = new ExecutionContext(GLOBAL_CODE);
x.execute(parser.parse(new parser.DefaultBuilder, s, f, l));
x.execute(parser.parse(new definitions.Builder, s, f, l));
return x.result;
}
@ -1059,7 +1061,7 @@ Narcissus.interpreter = (function() {
}
}
var b = new parser.DefaultBuilder;
var b = new definitions.Builder;
var x = new ExecutionContext(GLOBAL_CODE);
ExecutionContext.current = x;

Просмотреть файл

@ -105,6 +105,17 @@ Narcissus.parser = (function() {
bindSubBuilders(this, DefaultBuilder.prototype);
}
function pushDestructuringVarDecls(n, x) {
for (var i in n) {
var sub = n[i];
if (sub.type === IDENTIFIER) {
x.varDecls.push(sub);
} else {
pushDestructuringVarDecls(sub, x);
}
}
}
function mkBinopBuilder(type) {
return {
build: !type ? function(t) { return new Node(t); }
@ -381,7 +392,9 @@ Narcissus.parser = (function() {
RETURN: {
build: function(t) {
return new Node(t, RETURN);
var n = new Node(t, RETURN);
n.value = undefined;
return n;
},
setValue: function(n, e) {
@ -508,8 +521,14 @@ Narcissus.parser = (function() {
return new Node(t, VAR);
},
addDestructuringDecl: function(n, n2, x) {
n.push(n2);
pushDestructuringVarDecls(n2.name.destructuredNames, x);
},
addDecl: function(n, n2, x) {
n.push(n2);
x.varDecls.push(n2);
},
finish: function(n) {
@ -518,11 +537,17 @@ Narcissus.parser = (function() {
CONST: {
build: function(t) {
return new Node(t, VAR);
return new Node(t, CONST);
},
addDestructuringDecl: function(n, n2, x) {
n.push(n2);
pushDestructuringVarDecls(n2.name.destructuredNames, x);
},
addDecl: function(n, n2, x) {
n.push(n2);
x.varDecls.push(n2);
},
finish: function(n) {
@ -534,8 +559,14 @@ Narcissus.parser = (function() {
return new Node(t, LET);
},
addDestructuringDecl: function(n, n2, x) {
n.push(n2);
pushDestructuringVarDecls(n2.name.destructuredNames, x);
},
addDecl: function(n, n2, x) {
n.push(n2);
x.varDecls.push(n2);
},
finish: function(n) {
@ -565,7 +596,7 @@ Narcissus.parser = (function() {
LET_BLOCK: {
build: function(t) {
var n = Node(t, LET_BLOCK);
var n = new Node(t, LET_BLOCK);
n.varDecls = [];
return n;
},
@ -605,19 +636,6 @@ Narcissus.parser = (function() {
}
},
EXPRESSION: {
build: function(t, tt) {
return new Node(t, tt);
},
addOperand: function(n, n2) {
n.push(n2);
},
finish: function(n) {
}
},
ASSIGN: {
build: function(t) {
return new Node(t, ASSIGN);
@ -710,8 +728,8 @@ Narcissus.parser = (function() {
PRIMARY: {
build: function(t, tt) {
// NB t.token.type must be NULL, THIS, TRUIE, FALSE, IDENTIFIER,
// NUMBER, STRING, or REGEXP.
// NULL | THIS | TRUE | FALSE | IDENTIFIER | NUMBER
// STRING | REGEXP.
return new Node(t, tt);
},
@ -779,6 +797,15 @@ Narcissus.parser = (function() {
}
},
PROPERTY_NAME: {
build: function(t) {
return new Node(t, IDENTIFIER);
},
finish: function(n) {
}
},
PROPERTY_INIT: {
build: function(t) {
return new Node(t, PROPERTY_INIT);
@ -1106,15 +1133,32 @@ Narcissus.parser = (function() {
x.inForLoopInit = false;
}
if (n2 && t.match(IN)) {
// for-ins always get a for block to help desugaring.
if (!forBlock) {
var forBlock = builder.BLOCK.build(t, x.blockId++);
forBlock.isInternalForInBlock = true;
x.stmtStack.push(forBlock);
}
b.rebuildForIn(n);
b.setObject(n, Expression(t, x), forBlock);
b.setObject(n, Expression(t, x));
if (n2.type === VAR || n2.type === LET) {
if (n2.length !== 1) {
// Destructuring turns one decl into multiples, so either
// there must be only one destructuring or only one
// decl.
if (n2.length !== 1 && n2.destructurings.length !== 1) {
throw new SyntaxError("Invalid for..in left-hand side",
t.filename, n2.lineno);
}
b.setIterator(n, n2[0], n2, forBlock);
if (n2.destructurings.length > 0) {
b.setIterator(n, n2.destructurings[0], n2, forBlock);
} else {
b.setIterator(n, n2[0], n2, forBlock);
}
} else {
if (n2.type === ARRAY_INIT || n2.type === OBJECT_INIT) {
n2.destructuredNames = checkDestructuring(t, x, n2);
}
b.setIterator(n, n2, null, forBlock);
}
} else {
@ -1127,16 +1171,22 @@ Narcissus.parser = (function() {
: Expression(t, x));
t.mustMatch(SEMICOLON);
b.setUpdate(n, (t.peek() === RIGHT_PAREN)
? null
: Expression(t, x));
? null
: Expression(t, x));
}
t.mustMatch(RIGHT_PAREN);
b.setBody(n, nest(t, x, n, Statement));
b.finish(n);
// In case desugaring statements were added to the imaginary
// block.
if (forBlock) {
builder.BLOCK.finish(forBlock);
x.stmtStack.pop();
for (var i = 0, j = forBlock.length; i < j; i++) {
n.body.unshift(forBlock[i]);
}
}
b.finish(n);
return n;
case WHILE:
@ -1197,6 +1247,9 @@ Narcissus.parser = (function() {
i++;
if (i < ss.length - 1 && ss[i+1].isLoop)
i++;
else if (i < ss.length - 1 && ss[i+1].isInternalForInBlock
&& ss[i+2].isLoop)
i++;
else if (tt === CONTINUE)
throw t.newSyntaxError("Invalid continue");
} else {
@ -1225,7 +1278,8 @@ Narcissus.parser = (function() {
case LEFT_CURLY:
// Destructured catch identifiers.
t.unget();
b2.setVarName(n2, DestructuringExpression(t, x, true));
b2.setVarName(n2, DestructuringExpressionNoHoist(t, x, true));
break;
case IDENTIFIER:
b2.setVarName(n2, t.token.value);
break;
@ -1355,7 +1409,7 @@ Narcissus.parser = (function() {
if (!x.inFunction)
throw t.newSyntaxError("Return not in function");
b = x.builder.RETURN;
} else /* (tt === YIELD) */ {
} else /* if (tt === YIELD) */ {
if (!x.inFunction)
throw t.newSyntaxError("Yield not in function");
x.isGenerator = true;
@ -1364,7 +1418,8 @@ Narcissus.parser = (function() {
n = b.build(t);
tt2 = t.peek(true);
if (tt2 !== END && tt2 !== NEWLINE && tt2 !== SEMICOLON && tt2 !== RIGHT_CURLY
if (tt2 !== END && tt2 !== NEWLINE &&
tt2 !== SEMICOLON && tt2 !== RIGHT_CURLY
&& (tt !== YIELD ||
(tt2 !== tt && tt2 !== RIGHT_BRACKET && tt2 !== RIGHT_PAREN &&
tt2 !== COLON && tt2 !== COMMA))) {
@ -1496,10 +1551,8 @@ Narcissus.parser = (function() {
* Statements.
*/
if (x2.needsHoisting) {
/*
* Order is important here! Builders expect funDecls to come after
* varDecls!
*/
// Order is important here! Builders expect funDecls to come
// after varDecls!
builder.setHoists(f.body.id, x2.varDecls.concat(x2.funDecls));
if (x.inFunction) {
@ -1541,10 +1594,9 @@ Narcissus.parser = (function() {
* initializations).
*/
function Variables(t, x, letBlock) {
var b, bDecl, bAssign, n, n2, n3, ss, i, s, tt, id, data;
var b, n, n2, ss, i, s, tt;
var builder = x.builder;
var bDecl = builder.DECL;
var bAssign = builder.ASSIGN;
switch (t.token.type) {
case VAR:
@ -1579,25 +1631,24 @@ Narcissus.parser = (function() {
}
n = b.build(t);
initializers = [];
n.destructurings = [];
do {
tt = t.get();
/*
* FIXME Should have a special DECLARATION node instead of overloading
* IDENTIFIER to mean both identifier declarations and destructured
* declarations.
*/
n2 = bDecl.build(t);
if (tt === LEFT_BRACKET || tt === LEFT_CURLY) {
// Pass in s if we need to add each pattern matched into
// its varDecls, else pass in x.
data = null;
// Need to unget to parse the full destructured expression.
t.unget();
bDecl.setName(n2, DestructuringExpression(t, x, true, s));
var dexp = DestructuringExpressionNoHoist(t, x, true, s);
n2 = bDecl.build(t);
bDecl.setName(n2, dexp);
bDecl.setReadOnly(n2, n.type === CONST);
b.addDestructuringDecl(n, n2, s);
n.destructurings.push({ exp: dexp, decl: n2 });
if (x.inForLoopInit && t.peek() === IN) {
b.addDecl(n, n2, s);
continue;
}
@ -1605,22 +1656,16 @@ Narcissus.parser = (function() {
if (t.token.assignOp)
throw t.newSyntaxError("Invalid variable initialization");
// Parse the init as a normal assignment.
n3 = bAssign.build(t);
bAssign.addOperand(n3, n2.name);
bAssign.addOperand(n3, AssignExpression(t, x));
bAssign.finish(n3);
// But only add the rhs as the initializer.
bDecl.setInitializer(n2, n3[1]);
bDecl.setInitializer(n2, AssignExpression(t, x));
bDecl.finish(n2);
b.addDecl(n, n2, s);
continue;
}
if (tt !== IDENTIFIER)
throw t.newSyntaxError("missing variable name");
n2 = bDecl.build(t);
bDecl.setName(n2, t.token.value);
bDecl.setReadOnly(n2, n.type === CONST);
b.addDecl(n, n2, s);
@ -1629,21 +1674,10 @@ Narcissus.parser = (function() {
if (t.token.assignOp)
throw t.newSyntaxError("Invalid variable initialization");
// Parse the init as a normal assignment.
id = new Node(n2.tokenizer, IDENTIFIER);
n3 = bAssign.build(t);
id.name = id.value = n2.name;
bAssign.addOperand(n3, id);
bAssign.addOperand(n3, AssignExpression(t, x));
bAssign.finish(n3);
initializers.push(n3);
// But only add the rhs as the initializer.
bDecl.setInitializer(n2, n3[1]);
bDecl.setInitializer(n2, AssignExpression(t, x));
}
bDecl.finish(n2);
s.varDecls.push(n2);
} while (t.match(COMMA));
b.finish(n);
return n;
@ -1697,40 +1731,59 @@ Narcissus.parser = (function() {
if (n.type !== ARRAY_INIT && n.type !== OBJECT_INIT)
return;
var nn, n2, lhs, rhs, b = x.builder.DECL;
var lhss = {};
var nn, n2, idx, sub;
for (var i = 0, j = n.length; i < j; i++) {
nn = n[i];
if (!nn)
if (!(nn = n[i]))
continue;
if (nn.type === PROPERTY_INIT)
lhs = nn[0], rhs = nn[1];
else
lhs = null, rhs = null;
if (rhs && (rhs.type === ARRAY_INIT || rhs.type === OBJECT_INIT))
checkDestructuring(t, x, rhs, simpleNamesOnly, data);
if (lhs && simpleNamesOnly) {
// In declarations, lhs must be simple names
if (lhs.type !== IDENTIFIER) {
if (nn.type === PROPERTY_INIT) {
sub = nn[1];
idx = nn[0].value;
} else if (n.type === OBJECT_INIT) {
// Do we have destructuring shorthand {foo, bar}?
sub = nn;
idx = nn.value;
} else {
sub = nn;
idx = i;
}
if (sub.type === ARRAY_INIT || sub.type === OBJECT_INIT) {
lhss[idx] = checkDestructuring(t, x, sub,
simpleNamesOnly, data);
} else {
if (simpleNamesOnly && sub.type !== IDENTIFIER) {
// In declarations, lhs must be simple names
throw t.newSyntaxError("missing name in pattern");
} else if (data) {
n2 = b.build(t);
b.setName(n2, lhs.value);
// Don't need to set initializer because it's just for
// hoisting anyways.
b.finish(n2);
// Each pattern needs to be added to varDecls.
data.varDecls.push(n2);
}
lhss[idx] = sub;
}
}
return lhss;
}
function DestructuringExpression(t, x, simpleNamesOnly, data) {
var n = PrimaryExpression(t, x);
checkDestructuring(t, x, n, simpleNamesOnly, data);
// Keep the list of lefthand sides in case the builder wants to
// desugar.
n.destructuredNames = checkDestructuring(t, x, n,
simpleNamesOnly, data);
return n;
}
function DestructuringExpressionNoHoist(t, x, simpleNamesOnly, data) {
// Sometimes we don't want to flag the pattern as possible hoists, so
// pretend it's the second pass.
var builder = x.builder;
var oldSP = builder.secondPass;
builder.secondPass = true;
var dexp = DestructuringExpression(t, x, simpleNamesOnly, data);
builder.secondPass = oldSP;
return dexp;
}
function GeneratorExpression(t, x, e) {
var n, b = x.builder.GENERATOR;
@ -1770,7 +1823,7 @@ Narcissus.parser = (function() {
case LEFT_CURLY:
t.unget();
// Destructured left side of for in comprehension tails.
b2.setIterator(n, DestructuringExpression(t, x), null);
b2.setIterator(n, DestructuringExpressionNoHoist(t, x), null);
break;
case IDENTIFIER:
@ -1778,14 +1831,12 @@ Narcissus.parser = (function() {
bDecl.setName(n3, n3.value);
bDecl.finish(n3);
var n2 = bVar.build(t);
bVar.addDecl(n2, n3);
bVar.addDecl(n2, n3, x);
bVar.finish(n2);
bFor.setIterator(n, n3, n2);
/*
* Don't add to varDecls since the semantics of comprehensions is
* such that the variables are in their own function when
* desugared.
*/
// Don't add to varDecls since the semantics of comprehensions is
// such that the variables are in their own function when
// desugared.
break;
default:
@ -1876,7 +1927,7 @@ Narcissus.parser = (function() {
switch (lhs.type) {
case OBJECT_INIT:
case ARRAY_INIT:
checkDestructuring(t, x, lhs);
lhs.destructuredNames = checkDestructuring(t, x, lhs);
// FALL THROUGH
case IDENTIFIER: case DOT: case INDEX: case CALL:
break;
@ -2119,7 +2170,10 @@ Narcissus.parser = (function() {
}
function MemberExpression(t, x, allowCallSyntax) {
var n, n2, tt, b = x.builder.MEMBER;
var n, n2, name, tt;
var builder = x.builder;
var b = builder.MEMBER
var b2 = builder.PROPERTY_NAME;
if (t.match(NEW)) {
n = b.build(t);
@ -2139,7 +2193,9 @@ Narcissus.parser = (function() {
n2 = b.build(t);
b.addOperand(n2, n);
t.mustMatch(IDENTIFIER);
b.addOperand(n2, b.build(t));
name = b2.build(t);
b2.finish(name);
b.addOperand(n2, name);
break;
case LEFT_BRACKET:
@ -2200,6 +2256,7 @@ Narcissus.parser = (function() {
var bArrayInit = builder.ARRAY_INIT;
var bArrayComp = builder.ARRAY_COMP;
var bPrimary = builder.PRIMARY;
var bPropName = builder.PROPERTY_NAME;
var bObjInit = builder.OBJECT_INIT;
var bPropInit = builder.PROPERTY_INIT;
@ -2230,7 +2287,7 @@ Narcissus.parser = (function() {
n = n2;
}
t.mustMatch(RIGHT_BRACKET);
bPrimary.finish(n);
bArrayInit.finish(n);
break;
case LEFT_CURLY:
@ -2250,8 +2307,8 @@ Narcissus.parser = (function() {
} else {
switch (tt) {
case IDENTIFIER: case NUMBER: case STRING:
id = bPrimary.build(t, IDENTIFIER);
bPrimary.finish(id);
id = bPropName.build(t);
bPropName.finish(id);
break;
case RIGHT_CURLY:
if (x.ecma3OnlyMode)
@ -2259,8 +2316,8 @@ Narcissus.parser = (function() {
break object_init;
default:
if (t.token.value in definitions.keywords) {
id = bPrimary.build(t, IDENTIFIER);
bPrimary.finish(id);
id = bPropName.build(t);
bPropName.finish(id);
break;
}
throw t.newSyntaxError("Invalid property name");
@ -2328,6 +2385,9 @@ Narcissus.parser = (function() {
parse: parse,
Node: Node,
DefaultBuilder: DefaultBuilder,
get SSABuilder() {
throw new Error("SSA builder not yet supported");
},
bindSubBuilders: bindSubBuilders,
DECLARED_FORM: DECLARED_FORM,
EXPRESSED_FORM: EXPRESSED_FORM,

Просмотреть файл

@ -150,6 +150,7 @@ CPPSRCS = \
jsparse.cpp \
jsproxy.cpp \
jsprf.cpp \
jsprobes.cpp \
jspropertycache.cpp \
jspropertytree.cpp \
jsreflect.cpp \
@ -158,7 +159,6 @@ CPPSRCS = \
jsscope.cpp \
jsscript.cpp \
jsstr.cpp \
jstask.cpp \
jstypedarray.cpp \
jsutil.cpp \
jswrapper.cpp \
@ -167,11 +167,6 @@ CPPSRCS = \
prmjtime.cpp \
$(NULL)
ifdef HAVE_DTRACE
CPPSRCS += \
jsdtracef.cpp
endif
INSTALLED_HEADERS = \
js-config.h \
jsautocfg.h \
@ -211,6 +206,7 @@ INSTALLED_HEADERS = \
jsparse.h \
jsproxy.h \
jsprf.h \
jsprobes.h \
jspropertycache.h \
jspropertycacheinlines.h \
jspropertytree.h \
@ -226,7 +222,6 @@ INSTALLED_HEADERS = \
jsstaticcheck.h \
jsstdint.h \
jsstr.h \
jstask.h \
jstracer.h \
jstypedarray.h \
jstypes.h \
@ -287,8 +282,56 @@ ASFILES += jswince.asm
endif
endif # ENABLE_TRACEJIT
ifdef ENABLE_METHODJIT
ifeq ($(TARGET_CPU), powerpc)
###############################################
# BEGIN include sources for the method JIT
#
VPATH += $(srcdir)/methodjit
CPPSRCS += MethodJIT.cpp \
BytecodeAnalyzer.cpp \
StubCalls.cpp \
Compiler.cpp \
FrameState.cpp \
FastArithmetic.cpp \
FastOps.cpp \
StubCompiler.cpp \
MonoIC.cpp \
PolyIC.cpp \
ImmutableSync.cpp \
InvokeHelpers.cpp \
Retcon.cpp \
TrampolineCompiler.cpp \
$(NULL)
# PICStubCompiler.cpp \
ifeq (86, $(findstring 86,$(TARGET_CPU)))
ifeq (x86_64, $(TARGET_CPU))
ifdef _MSC_VER
ASFILES += TrampolineMasmX64.asm
endif
ifdef SOLARIS_SUNPRO_CXX
ASFILES += TrampolineSUNWX64.s
endif
#CPPSRCS += only_on_x86_64.cpp
else
#CPPSRCS += only_on_x86.cpp
ifdef SOLARIS_SUNPRO_CXX
ASFILES += TrampolineSUNWX86.s
endif
endif
endif
ifeq (arm, $(TARGET_CPU))
#CPPSRCS += only_on_arm.cpp
endif
#
# END enclude sources for the method JIT
#############################################
endif
ifeq (,$(filter-out powerpc sparc,$(TARGET_CPU)))
VPATH += $(srcdir)/assembler \
$(srcdir)/assembler/wtf \
@ -377,7 +420,6 @@ endif # JS_HAS_CTYPES
ifdef HAVE_DTRACE
INSTALLED_HEADERS += \
jsdtracef.h \
$(CURDIR)/javascript-trace.h \
$(NULL)
endif
@ -781,7 +823,7 @@ $(CURDIR)/javascript-trace.h: $(srcdir)/javascript-trace.d
# We can't automatically generate dependencies on auto-generated headers;
# we have to list them explicitly.
$(addsuffix .$(OBJ_SUFFIX),jsdtracef jsinterp jsobj): $(CURDIR)/javascript-trace.h
$(addsuffix .$(OBJ_SUFFIX),jsprobes jsinterp jsobj): $(CURDIR)/javascript-trace.h
endif
ifdef ENABLE_TRACEJIT
@ -830,6 +872,24 @@ CXXFLAGS += -DUSE_SYSTEM_MALLOC=1 -DENABLE_ASSEMBLER=1 -DENABLE_JIT=1
INCLUDES += -I$(srcdir)/assembler -I$(srcdir)/yarr
ifdef ENABLE_METHODJIT
# Build a standalone test program that exercises the assembler
# sources a bit.
TESTMAIN_OBJS = \
Assertions.$(OBJ_SUFFIX) \
ExecutableAllocatorPosix.$(OBJ_SUFFIX) \
ExecutableAllocatorWin.$(OBJ_SUFFIX) \
ExecutableAllocator.$(OBJ_SUFFIX) \
ARMAssembler.$(OBJ_SUFFIX) \
MacroAssemblerARM.$(OBJ_SUFFIX) \
TestMain.$(OBJ_SUFFIX) \
jsutil.$(OBJ_SUFFIX) \
jslog2.$(OBJ_SUFFIX)
TestMain$(HOST_BIN_SUFFIX): $(TESTMAIN_OBJS)
$(CXX) -o TestMain$(HOST_BIN_SUFFIX) $(TESTMAIN_OBJS)
endif
#
# END kludges for the Nitro assembler
###############################################

Просмотреть файл

@ -0,0 +1,929 @@
// A short test program with which to experiment with the assembler.
//satisfies CPU(X86_64)
//#define WTF_CPU_X86_64
// satisfies ENABLE(ASSEMBLER)
#define ENABLE_ASSEMBLER 1
// satisfies ENABLE(JIT)
#define ENABLE_JIT 1
#define USE_SYSTEM_MALLOC 1
// leads to FORCE_SYSTEM_MALLOC in wtf/FastMalloc.cpp
#include <jit/ExecutableAllocator.h>
#include <assembler/LinkBuffer.h>
#include <assembler/CodeLocation.h>
#include <assembler/RepatchBuffer.h>
#include <assembler/MacroAssembler.h>
#include <stdio.h>
/////////////////////////////////////////////////////////////////
// Temporary scaffolding for selecting the arch
#undef ARCH_x86
#undef ARCH_amd64
#undef ARCH_arm
#if defined(__APPLE__) && defined(__i386__)
# define ARCH_x86 1
#elif defined(__APPLE__) && defined(__x86_64__)
# define ARCH_amd64 1
#elif defined(__linux__) && defined(__i386__)
# define ARCH_x86 1
#elif defined(__linux__) && defined(__x86_64__)
# define ARCH_amd64 1
#elif defined(__linux__) && defined(__arm__)
# define ARCH_arm 1
#elif defined(_MSC_VER) && defined(_M_IX86)
# define ARCH_x86 1
#endif
/////////////////////////////////////////////////////////////////
// just somewhere convenient to put a breakpoint, before
// running gdb
#if WTF_COMPILER_GCC
__attribute__((noinline))
#endif
void pre_run ( void ) { }
/////////////////////////////////////////////////////////////////
//// test1 (simple straight line code)
#if WTF_COMPILER_GCC
void test1 ( void )
{
printf("\n------------ Test 1 (straight line code) ------------\n\n" );
// Create new assembler
JSC::MacroAssembler* am = new JSC::MacroAssembler();
#if defined(ARCH_amd64)
JSC::X86Registers::RegisterID areg = JSC::X86Registers::r15;
// dump some instructions into it
// xor %r15,%r15
// add $0x7b,%r15
// add $0x141,%r15
// retq
am->xorPtr(areg,areg);
am->addPtr(JSC::MacroAssembler::Imm32(123), areg);
am->addPtr(JSC::MacroAssembler::Imm32(321), areg);
am->ret();
#endif
#if defined(ARCH_x86)
JSC::X86Registers::RegisterID areg = JSC::X86Registers::edi;
// dump some instructions into it
// xor %edi,%edi
// add $0x7b,%edi
// add $0x141,%edi
// ret
am->xorPtr(areg,areg);
am->addPtr(JSC::MacroAssembler::Imm32(123), areg);
am->addPtr(JSC::MacroAssembler::Imm32(321), areg);
am->ret();
#endif
#if defined(ARCH_arm)
JSC::ARMRegisters::RegisterID areg = JSC::ARMRegisters::r8;
// eors r8, r8, r8
// adds r8, r8, #123 ; 0x7b
// mov r3, #256 ; 0x100
// orr r3, r3, #65 ; 0x41
// adds r8, r8, r3
// mov pc, lr
am->xorPtr(areg,areg);
am->addPtr(JSC::MacroAssembler::Imm32(123), areg);
am->addPtr(JSC::MacroAssembler::Imm32(321), areg);
am->ret();
#endif
// prepare a link buffer, into which we can copy the completed insns
JSC::ExecutableAllocator* eal = new JSC::ExecutableAllocator();
// intermediate step .. get the pool suited for the size of code in 'am'
//WTF::PassRefPtr<JSC::ExecutablePool> ep = eal->poolForSize( am->size() );
JSC::ExecutablePool* ep = eal->poolForSize( am->size() );
// constructor for LinkBuffer asks ep to allocate r-x memory,
// then copies it there.
JSC::LinkBuffer patchBuffer(am, ep);
// finalize
JSC::MacroAssemblerCodeRef cr = patchBuffer.finalizeCode();
// cr now holds a pointer to the final runnable code.
void* entry = cr.m_code.executableAddress();
printf("disas %p %p\n",
entry, (char*)entry + cr.m_size);
pre_run();
unsigned long result = 0x55555555;
#if defined(ARCH_amd64)
// call the generated piece of code. It puts its result in r15.
__asm__ __volatile__(
"callq *%1" "\n\t"
"movq %%r15, %0" "\n"
:/*out*/ "=r"(result)
:/*in*/ "r"(entry)
:/*trash*/ "r15","cc"
);
#endif
#if defined(ARCH_x86)
// call the generated piece of code. It puts its result in edi.
__asm__ __volatile__(
"calll *%1" "\n\t"
"movl %%edi, %0" "\n"
:/*out*/ "=r"(result)
:/*in*/ "r"(entry)
:/*trash*/ "edi","cc"
);
#endif
#if defined(ARCH_arm)
// call the generated piece of code. It puts its result in r8.
__asm__ __volatile__(
"blx %1" "\n\t"
"mov %0, %%r8" "\n"
:/*out*/ "=r"(result)
:/*in*/ "r"(entry)
:/*trash*/ "r8","cc"
);
#endif
printf("\n");
printf("value computed is %lu (expected 444)\n", result);
printf("\n");
delete eal;
delete am;
}
#endif /* WTF_COMPILER_GCC */
/////////////////////////////////////////////////////////////////
//// test2 (a simple counting-down loop)
#if WTF_COMPILER_GCC
void test2 ( void )
{
printf("\n------------ Test 2 (mini loop) ------------\n\n" );
// Create new assembler
JSC::MacroAssembler* am = new JSC::MacroAssembler();
#if defined(ARCH_amd64)
JSC::X86Registers::RegisterID areg = JSC::X86Registers::r15;
// xor %r15,%r15
// add $0x7b,%r15
// add $0x141,%r15
// sub $0x1,%r15
// mov $0x0,%r11
// cmp %r11,%r15
// jne 0x7ff6d3e6a00e
// retq
// so r15 always winds up being zero
am->xorPtr(areg,areg);
am->addPtr(JSC::MacroAssembler::Imm32(123), areg);
am->addPtr(JSC::MacroAssembler::Imm32(321), areg);
JSC::MacroAssembler::Label loopHeadLabel(am);
am->subPtr(JSC::MacroAssembler::Imm32(1), areg);
JSC::MacroAssembler::Jump j
= am->branchPtr(JSC::MacroAssembler::NotEqual,
areg, JSC::MacroAssembler::ImmPtr(0));
j.linkTo(loopHeadLabel, am);
am->ret();
#endif
#if defined(ARCH_x86)
JSC::X86Registers::RegisterID areg = JSC::X86Registers::edi;
// xor %edi,%edi
// add $0x7b,%edi
// add $0x141,%edi
// sub $0x1,%edi
// test %edi,%edi
// jne 0xf7f9700b
// ret
// so edi always winds up being zero
am->xorPtr(areg,areg);
am->addPtr(JSC::MacroAssembler::Imm32(123), areg);
am->addPtr(JSC::MacroAssembler::Imm32(321), areg);
JSC::MacroAssembler::Label loopHeadLabel(am);
am->subPtr(JSC::MacroAssembler::Imm32(1), areg);
JSC::MacroAssembler::Jump j
= am->branchPtr(JSC::MacroAssembler::NotEqual,
areg, JSC::MacroAssembler::ImmPtr(0));
j.linkTo(loopHeadLabel, am);
am->ret();
#endif
#if defined(ARCH_arm)
JSC::ARMRegisters::RegisterID areg = JSC::ARMRegisters::r8;
// eors r8, r8, r8
// adds r8, r8, #123 ; 0x7b
// mov r3, #256 ; 0x100
// orr r3, r3, #65 ; 0x41
// adds r8, r8, r3
// subs r8, r8, #1 ; 0x1
// ldr r3, [pc, #8] ; 0x40026028
// cmp r8, r3
// bne 0x40026014
// mov pc, lr
// andeq r0, r0, r0 // DATA (0)
// andeq r0, r0, r4, lsl r0 // DATA (?? what's this for?)
// so r8 always winds up being zero
am->xorPtr(areg,areg);
am->addPtr(JSC::MacroAssembler::Imm32(123), areg);
am->addPtr(JSC::MacroAssembler::Imm32(321), areg);
JSC::MacroAssembler::Label loopHeadLabel(am);
am->subPtr(JSC::MacroAssembler::Imm32(1), areg);
JSC::MacroAssembler::Jump j
= am->branchPtr(JSC::MacroAssembler::NotEqual,
areg, JSC::MacroAssembler::ImmPtr(0));
j.linkTo(loopHeadLabel, am);
am->ret();
#endif
// prepare a link buffer, into which we can copy the completed insns
JSC::ExecutableAllocator* eal = new JSC::ExecutableAllocator();
// intermediate step .. get the pool suited for the size of code in 'am'
//WTF::PassRefPtr<JSC::ExecutablePool> ep = eal->poolForSize( am->size() );
JSC::ExecutablePool* ep = eal->poolForSize( am->size() );
// constructor for LinkBuffer asks ep to allocate r-x memory,
// then copies it there.
JSC::LinkBuffer patchBuffer(am, ep);
// finalize
JSC::MacroAssemblerCodeRef cr = patchBuffer.finalizeCode();
// cr now holds a pointer to the final runnable code.
void* entry = cr.m_code.executableAddress();
printf("disas %p %p\n",
entry, (char*)entry + cr.m_size);
pre_run();
unsigned long result = 0x55555555;
#if defined(ARCH_amd64)
// call the generated piece of code. It puts its result in r15.
__asm__ __volatile__(
"callq *%1" "\n\t"
"movq %%r15, %0" "\n"
:/*out*/ "=r"(result)
:/*in*/ "r"(entry)
:/*trash*/ "r15","cc"
);
#endif
#if defined(ARCH_x86)
// call the generated piece of code. It puts its result in edi.
__asm__ __volatile__(
"calll *%1" "\n\t"
"movl %%edi, %0" "\n"
:/*out*/ "=r"(result)
:/*in*/ "r"(entry)
:/*trash*/ "edi","cc"
);
#endif
#if defined(ARCH_arm)
// call the generated piece of code. It puts its result in r8.
__asm__ __volatile__(
"blx %1" "\n\t"
"mov %0, %%r8" "\n"
:/*out*/ "=r"(result)
:/*in*/ "r"(entry)
:/*trash*/ "r8","cc"
);
#endif
printf("\n");
printf("value computed is %lu (expected 0)\n", result);
printf("\n");
delete eal;
delete am;
}
#endif /* WTF_COMPILER_GCC */
/////////////////////////////////////////////////////////////////
//// test3 (if-then-else)
#if WTF_COMPILER_GCC
void test3 ( void )
{
printf("\n------------ Test 3 (if-then-else) ------------\n\n" );
// Create new assembler
JSC::MacroAssembler* am = new JSC::MacroAssembler();
#if defined(ARCH_amd64)
JSC::X86Registers::RegisterID areg = JSC::X86Registers::r15;
// mov $0x64,%r15d
// mov $0x0,%r11
// cmp %r11,%r15
// jne 0x7ff6d3e6a024
// mov $0x40,%r15d
// jmpq 0x7ff6d3e6a02a
// mov $0x4,%r15d
// retq
// so r15 ends up being 4
// put a value in reg
am->move(JSC::MacroAssembler::Imm32(100), areg);
// test, and conditionally jump to 'else' branch
JSC::MacroAssembler::Jump jToElse
= am->branchPtr(JSC::MacroAssembler::NotEqual,
areg, JSC::MacroAssembler::ImmPtr(0));
// 'then' branch
am->move(JSC::MacroAssembler::Imm32(64), areg);
JSC::MacroAssembler::Jump jToAfter
= am->jump();
// 'else' branch
JSC::MacroAssembler::Label elseLbl(am);
am->move(JSC::MacroAssembler::Imm32(4), areg);
// after
JSC::MacroAssembler::Label afterLbl(am);
am->ret();
#endif
#if defined(ARCH_x86)
JSC::X86Registers::RegisterID areg = JSC::X86Registers::edi;
// mov $0x64,%edi
// test %edi,%edi
// jne 0xf7f22017
// mov $0x40,%edi
// jmp 0xf7f2201c
// mov $0x4,%edi
// ret
// so edi ends up being 4
// put a value in reg
am->move(JSC::MacroAssembler::Imm32(100), areg);
// test, and conditionally jump to 'else' branch
JSC::MacroAssembler::Jump jToElse
= am->branchPtr(JSC::MacroAssembler::NotEqual,
areg, JSC::MacroAssembler::ImmPtr(0));
// 'then' branch
am->move(JSC::MacroAssembler::Imm32(64), areg);
JSC::MacroAssembler::Jump jToAfter
= am->jump();
// 'else' branch
JSC::MacroAssembler::Label elseLbl(am);
am->move(JSC::MacroAssembler::Imm32(4), areg);
// after
JSC::MacroAssembler::Label afterLbl(am);
am->ret();
#endif
#if defined(ARCH_arm)
JSC::ARMRegisters::RegisterID areg = JSC::ARMRegisters::r8;
// mov r8, #100 ; 0x64
// ldr r3, [pc, #20] ; 0x40026020
// cmp r8, r3
// bne 0x40026018
// mov r8, #64 ; 0x40
// b 0x4002601c
// mov r8, #4 ; 0x4
// mov pc, lr
// andeq r0, r0, r0 // DATA
// andeq r0, r0, r8, lsl r0 // DATA
// andeq r0, r0, r12, lsl r0 // DATA
// ldr r3, [r3, -r3] // DATA
// so r8 ends up being 4
// put a value in reg
am->move(JSC::MacroAssembler::Imm32(100), areg);
// test, and conditionally jump to 'else' branch
JSC::MacroAssembler::Jump jToElse
= am->branchPtr(JSC::MacroAssembler::NotEqual,
areg, JSC::MacroAssembler::ImmPtr(0));
// 'then' branch
am->move(JSC::MacroAssembler::Imm32(64), areg);
JSC::MacroAssembler::Jump jToAfter
= am->jump();
// 'else' branch
JSC::MacroAssembler::Label elseLbl(am);
am->move(JSC::MacroAssembler::Imm32(4), areg);
// after
JSC::MacroAssembler::Label afterLbl(am);
am->ret();
#endif
// set branch targets appropriately
jToElse.linkTo(elseLbl, am);
jToAfter.linkTo(afterLbl, am);
// prepare a link buffer, into which we can copy the completed insns
JSC::ExecutableAllocator* eal = new JSC::ExecutableAllocator();
// intermediate step .. get the pool suited for the size of code in 'am'
//WTF::PassRefPtr<JSC::ExecutablePool> ep = eal->poolForSize( am->size() );
JSC::ExecutablePool* ep = eal->poolForSize( am->size() );
// constructor for LinkBuffer asks ep to allocate r-x memory,
// then copies it there.
JSC::LinkBuffer patchBuffer(am, ep);
// finalize
JSC::MacroAssemblerCodeRef cr = patchBuffer.finalizeCode();
// cr now holds a pointer to the final runnable code.
void* entry = cr.m_code.executableAddress();
printf("disas %p %p\n",
entry, (char*)entry + cr.m_size);
pre_run();
unsigned long result = 0x55555555;
#if defined(ARCH_amd64)
// call the generated piece of code. It puts its result in r15.
__asm__ __volatile__(
"callq *%1" "\n\t"
"movq %%r15, %0" "\n"
:/*out*/ "=r"(result)
:/*in*/ "r"(entry)
:/*trash*/ "r15","cc"
);
#endif
#if defined(ARCH_x86)
// call the generated piece of code. It puts its result in edi.
__asm__ __volatile__(
"calll *%1" "\n\t"
"movl %%edi, %0" "\n"
:/*out*/ "=r"(result)
:/*in*/ "r"(entry)
:/*trash*/ "edi","cc"
);
#endif
#if defined(ARCH_arm)
// call the generated piece of code. It puts its result in r8.
__asm__ __volatile__(
"blx %1" "\n\t"
"mov %0, %%r8" "\n"
:/*out*/ "=r"(result)
:/*in*/ "r"(entry)
:/*trash*/ "r8","cc"
);
#endif
printf("\n");
printf("value computed is %lu (expected 4)\n", result);
printf("\n");
delete eal;
delete am;
}
#endif /* WTF_COMPILER_GCC */
/////////////////////////////////////////////////////////////////
//// test4 (callable function)
void test4 ( void )
{
printf("\n------------ Test 4 (callable fn) ------------\n\n" );
// Create new assembler
JSC::MacroAssembler* am = new JSC::MacroAssembler();
#if defined(ARCH_amd64)
// ADD FN PROLOGUE/EPILOGUE so as to make a mini-function
// push %rbp
// mov %rsp,%rbp
// push %rbx
// push %r12
// push %r13
// push %r14
// push %r15
// xor %rax,%rax
// add $0x7b,%rax
// add $0x141,%rax
// pop %r15
// pop %r14
// pop %r13
// pop %r12
// pop %rbx
// mov %rbp,%rsp
// pop %rbp
// retq
// callable as a normal function, returns 444
JSC::X86Registers::RegisterID rreg = JSC::X86Registers::eax;
am->push(JSC::X86Registers::ebp);
am->move(JSC::X86Registers::esp, JSC::X86Registers::ebp);
am->push(JSC::X86Registers::ebx);
am->push(JSC::X86Registers::r12);
am->push(JSC::X86Registers::r13);
am->push(JSC::X86Registers::r14);
am->push(JSC::X86Registers::r15);
am->xorPtr(rreg,rreg);
am->addPtr(JSC::MacroAssembler::Imm32(123), rreg);
am->addPtr(JSC::MacroAssembler::Imm32(321), rreg);
am->pop(JSC::X86Registers::r15);
am->pop(JSC::X86Registers::r14);
am->pop(JSC::X86Registers::r13);
am->pop(JSC::X86Registers::r12);
am->pop(JSC::X86Registers::ebx);
am->move(JSC::X86Registers::ebp, JSC::X86Registers::esp);
am->pop(JSC::X86Registers::ebp);
am->ret();
#endif
#if defined(ARCH_x86)
// ADD FN PROLOGUE/EPILOGUE so as to make a mini-function
// push %ebp
// mov %esp,%ebp
// push %ebx
// push %esi
// push %edi
// xor %eax,%eax
// add $0x7b,%eax
// add $0x141,%eax
// pop %edi
// pop %esi
// pop %ebx
// mov %ebp,%esp
// pop %ebp
// ret
// callable as a normal function, returns 444
JSC::X86Registers::RegisterID rreg = JSC::X86Registers::eax;
am->push(JSC::X86Registers::ebp);
am->move(JSC::X86Registers::esp, JSC::X86Registers::ebp);
am->push(JSC::X86Registers::ebx);
am->push(JSC::X86Registers::esi);
am->push(JSC::X86Registers::edi);
am->xorPtr(rreg,rreg);
am->addPtr(JSC::MacroAssembler::Imm32(123), rreg);
am->addPtr(JSC::MacroAssembler::Imm32(321), rreg);
am->pop(JSC::X86Registers::edi);
am->pop(JSC::X86Registers::esi);
am->pop(JSC::X86Registers::ebx);
am->move(JSC::X86Registers::ebp, JSC::X86Registers::esp);
am->pop(JSC::X86Registers::ebp);
am->ret();
#endif
#if defined(ARCH_arm)
// ADD FN PROLOGUE/EPILOGUE so as to make a mini-function
// push {r4} ; (str r4, [sp, #-4]!)
// push {r5} ; (str r5, [sp, #-4]!)
// push {r6} ; (str r6, [sp, #-4]!)
// push {r7} ; (str r7, [sp, #-4]!)
// push {r8} ; (str r8, [sp, #-4]!)
// push {r9} ; (str r9, [sp, #-4]!)
// push {r10} ; (str r10, [sp, #-4]!)
// push {r11} ; (str r11, [sp, #-4]!)
// eors r0, r0, r0
// adds r0, r0, #123 ; 0x7b
// mov r3, #256 ; 0x100
// orr r3, r3, #65 ; 0x41
// adds r0, r0, r3
// pop {r11} ; (ldr r11, [sp], #4)
// pop {r10} ; (ldr r10, [sp], #4)
// pop {r9} ; (ldr r9, [sp], #4)
// pop {r8} ; (ldr r8, [sp], #4)
// pop {r7} ; (ldr r7, [sp], #4)
// pop {r6} ; (ldr r6, [sp], #4)
// pop {r5} ; (ldr r5, [sp], #4)
// pop {r4} ; (ldr r4, [sp], #4)
// mov pc, lr
// callable as a normal function, returns 444
JSC::ARMRegisters::RegisterID rreg = JSC::ARMRegisters::r0;
am->push(JSC::ARMRegisters::r4);
am->push(JSC::ARMRegisters::r5);
am->push(JSC::ARMRegisters::r6);
am->push(JSC::ARMRegisters::r7);
am->push(JSC::ARMRegisters::r8);
am->push(JSC::ARMRegisters::r9);
am->push(JSC::ARMRegisters::r10);
am->push(JSC::ARMRegisters::r11);
am->xorPtr(rreg,rreg);
am->addPtr(JSC::MacroAssembler::Imm32(123), rreg);
am->addPtr(JSC::MacroAssembler::Imm32(321), rreg);
am->pop(JSC::ARMRegisters::r11);
am->pop(JSC::ARMRegisters::r10);
am->pop(JSC::ARMRegisters::r9);
am->pop(JSC::ARMRegisters::r8);
am->pop(JSC::ARMRegisters::r7);
am->pop(JSC::ARMRegisters::r6);
am->pop(JSC::ARMRegisters::r5);
am->pop(JSC::ARMRegisters::r4);
am->ret();
#endif
// prepare a link buffer, into which we can copy the completed insns
JSC::ExecutableAllocator* eal = new JSC::ExecutableAllocator();
// intermediate step .. get the pool suited for the size of code in 'am'
//WTF::PassRefPtr<JSC::ExecutablePool> ep = eal->poolForSize( am->size() );
JSC::ExecutablePool* ep = eal->poolForSize( am->size() );
// constructor for LinkBuffer asks ep to allocate r-x memory,
// then copies it there.
JSC::LinkBuffer patchBuffer(am, ep);
// now fix up any branches/calls
//JSC::FunctionPtr target = JSC::FunctionPtr::FunctionPtr( &cube );
// finalize
JSC::MacroAssemblerCodeRef cr = patchBuffer.finalizeCode();
// cr now holds a pointer to the final runnable code.
void* entry = cr.m_code.executableAddress();
printf("disas %p %p\n",
entry, (char*)entry + cr.m_size);
pre_run();
// call the function
unsigned long (*fn)(void) = (unsigned long (*)())entry;
unsigned long result = fn();
printf("\n");
printf("value computed is %lu (expected 444)\n", result);
printf("\n");
delete eal;
delete am;
}
/////////////////////////////////////////////////////////////////
//// test5 (call in, out, repatch)
// a function which we will call from the JIT generated code
unsigned long cube ( unsigned long x ) { return x * x * x; }
unsigned long square ( unsigned long x ) { return x * x; }
void test5 ( void )
{
printf("\n--------- Test 5 (call in, out, repatch) ---------\n\n" );
// Create new assembler
JSC::MacroAssembler* am = new JSC::MacroAssembler();
JSC::MacroAssembler::Call cl;
ptrdiff_t offset_of_call_insn;
#if defined(ARCH_amd64)
// ADD FN PROLOGUE/EPILOGUE so as to make a mini-function
// and then call a non-JIT-generated helper from within
// this code
// push %rbp
// mov %rsp,%rbp
// push %rbx
// push %r12
// push %r13
// push %r14
// push %r15
// mov $0x9,%edi
// mov $0x40187e,%r11
// callq *%r11
// pop %r15
// pop %r14
// pop %r13
// pop %r12
// pop %rbx
// mov %rbp,%rsp
// pop %rbp
// retq
JSC::MacroAssembler::Label startOfFnLbl(am);
am->push(JSC::X86Registers::ebp);
am->move(JSC::X86Registers::esp, JSC::X86Registers::ebp);
am->push(JSC::X86Registers::ebx);
am->push(JSC::X86Registers::r12);
am->push(JSC::X86Registers::r13);
am->push(JSC::X86Registers::r14);
am->push(JSC::X86Registers::r15);
// let's compute cube(9). Move $9 to the first arg reg.
am->move(JSC::MacroAssembler::Imm32(9), JSC::X86Registers::edi);
cl = am->JSC::MacroAssembler::call();
// result is now in %rax. Leave it ther and just return.
am->pop(JSC::X86Registers::r15);
am->pop(JSC::X86Registers::r14);
am->pop(JSC::X86Registers::r13);
am->pop(JSC::X86Registers::r12);
am->pop(JSC::X86Registers::ebx);
am->move(JSC::X86Registers::ebp, JSC::X86Registers::esp);
am->pop(JSC::X86Registers::ebp);
am->ret();
offset_of_call_insn
= am->JSC::MacroAssembler::differenceBetween(startOfFnLbl, cl);
if (0) printf("XXXXXXXX offset = %lu\n", offset_of_call_insn);
#endif
#if defined(ARCH_x86)
// ADD FN PROLOGUE/EPILOGUE so as to make a mini-function
// and then call a non-JIT-generated helper from within
// this code
// push %ebp
// mov %esp,%ebp
// push %ebx
// push %esi
// push %edi
// push $0x9
// call 0x80490e9 <_Z4cubem>
// add $0x4,%esp
// pop %edi
// pop %esi
// pop %ebx
// mov %ebp,%esp
// pop %ebp
// ret
JSC::MacroAssembler::Label startOfFnLbl(am);
am->push(JSC::X86Registers::ebp);
am->move(JSC::X86Registers::esp, JSC::X86Registers::ebp);
am->push(JSC::X86Registers::ebx);
am->push(JSC::X86Registers::esi);
am->push(JSC::X86Registers::edi);
// let's compute cube(9). Push $9 on the stack.
am->push(JSC::MacroAssembler::Imm32(9));
cl = am->JSC::MacroAssembler::call();
am->addPtr(JSC::MacroAssembler::Imm32(4), JSC::X86Registers::esp);
// result is now in %eax. Leave it there and just return.
am->pop(JSC::X86Registers::edi);
am->pop(JSC::X86Registers::esi);
am->pop(JSC::X86Registers::ebx);
am->move(JSC::X86Registers::ebp, JSC::X86Registers::esp);
am->pop(JSC::X86Registers::ebp);
am->ret();
offset_of_call_insn
= am->JSC::MacroAssembler::differenceBetween(startOfFnLbl, cl);
if (0) printf("XXXXXXXX offset = %lu\n",
(unsigned long)offset_of_call_insn);
#endif
#if defined(ARCH_arm)
// ADD FN PROLOGUE/EPILOGUE so as to make a mini-function
// push {r4} ; (str r4, [sp, #-4]!)
// push {r5} ; (str r5, [sp, #-4]!)
// push {r6} ; (str r6, [sp, #-4]!)
// push {r7} ; (str r7, [sp, #-4]!)
// push {r8} ; (str r8, [sp, #-4]!)
// push {r9} ; (str r9, [sp, #-4]!)
// push {r10} ; (str r10, [sp, #-4]!)
// push {r11} ; (str r11, [sp, #-4]!)
// eors r0, r0, r0
// adds r0, r0, #123 ; 0x7b
// mov r3, #256 ; 0x100
// orr r3, r3, #65 ; 0x41
// adds r0, r0, r3
// pop {r11} ; (ldr r11, [sp], #4)
// pop {r10} ; (ldr r10, [sp], #4)
// pop {r9} ; (ldr r9, [sp], #4)
// pop {r8} ; (ldr r8, [sp], #4)
// pop {r7} ; (ldr r7, [sp], #4)
// pop {r6} ; (ldr r6, [sp], #4)
// pop {r5} ; (ldr r5, [sp], #4)
// pop {r4} ; (ldr r4, [sp], #4)
// mov pc, lr
// callable as a normal function, returns 444
JSC::MacroAssembler::Label startOfFnLbl(am);
am->push(JSC::ARMRegisters::r4);
am->push(JSC::ARMRegisters::r5);
am->push(JSC::ARMRegisters::r6);
am->push(JSC::ARMRegisters::r7);
am->push(JSC::ARMRegisters::r8);
am->push(JSC::ARMRegisters::r9);
am->push(JSC::ARMRegisters::r10);
am->push(JSC::ARMRegisters::r11);
am->push(JSC::ARMRegisters::lr);
// let's compute cube(9). Get $9 into r0.
am->move(JSC::MacroAssembler::Imm32(9), JSC::ARMRegisters::r0);
cl = am->JSC::MacroAssembler::call();
// result is now in r0. Leave it there and just return.
am->pop(JSC::ARMRegisters::lr);
am->pop(JSC::ARMRegisters::r11);
am->pop(JSC::ARMRegisters::r10);
am->pop(JSC::ARMRegisters::r9);
am->pop(JSC::ARMRegisters::r8);
am->pop(JSC::ARMRegisters::r7);
am->pop(JSC::ARMRegisters::r6);
am->pop(JSC::ARMRegisters::r5);
am->pop(JSC::ARMRegisters::r4);
am->ret();
offset_of_call_insn
= am->JSC::MacroAssembler::differenceBetween(startOfFnLbl, cl);
if (0) printf("XXXXXXXX offset = %lu\n",
(unsigned long)offset_of_call_insn);
#endif
// prepare a link buffer, into which we can copy the completed insns
JSC::ExecutableAllocator* eal = new JSC::ExecutableAllocator();
// intermediate step .. get the pool suited for the size of code in 'am'
//WTF::PassRefPtr<JSC::ExecutablePool> ep = eal->poolForSize( am->size() );
JSC::ExecutablePool* ep = eal->poolForSize( am->size() );
// constructor for LinkBuffer asks ep to allocate r-x memory,
// then copies it there.
JSC::LinkBuffer patchBuffer(am, ep);
// now fix up any branches/calls
JSC::FunctionPtr target = JSC::FunctionPtr::FunctionPtr( &cube );
patchBuffer.link(cl, target);
JSC::MacroAssemblerCodeRef cr = patchBuffer.finalizeCode();
// cr now holds a pointer to the final runnable code.
void* entry = cr.m_code.executableAddress();
printf("disas %p %p\n",
entry, (char*)entry + cr.m_size);
pre_run();
printf("\n");
unsigned long (*fn)() = (unsigned long(*)())entry;
unsigned long result = fn();
printf("value computed is %lu (expected 729)\n", result);
printf("\n");
// now repatch the call in the JITted code to go elsewhere
JSC::JITCode jc = JSC::JITCode::JITCode(entry, cr.m_size);
JSC::CodeBlock cb = JSC::CodeBlock::CodeBlock(jc);
// the address of the call insn, that we want to prod
JSC::MacroAssemblerCodePtr cp
= JSC::MacroAssemblerCodePtr( ((char*)entry) + offset_of_call_insn );
JSC::RepatchBuffer repatchBuffer(&cb);
repatchBuffer.relink( JSC::CodeLocationCall(cp),
JSC::FunctionPtr::FunctionPtr( &square ));
result = fn();
printf("value computed is %lu (expected 81)\n", result);
printf("\n\n");
delete eal;
delete am;
}
/////////////////////////////////////////////////////////////////
int main ( void )
{
#if WTF_COMPILER_GCC
test1();
test2();
test3();
#endif
test4();
test5();
return 0;
}

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2009 University of Szeged
* All rights reserved.
*
@ -22,7 +25,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#include "assembler/wtf/Platform.h"
@ -271,8 +275,8 @@ void ARMAssembler::dataTransfer32(bool isLoad, RegisterID srcDst, RegisterID bas
add_r(ARMRegisters::S0, base, OP2_IMM | (offset >> 12) | (10 << 8));
dtr_u(isLoad, srcDst, ARMRegisters::S0, (offset & 0xfff));
} else {
ARMWord reg = getImm(offset, ARMRegisters::S0);
dtr_ur(isLoad, srcDst, base, reg);
moveImm(offset, ARMRegisters::S0);
dtr_ur(isLoad, srcDst, base, ARMRegisters::S0);
}
} else {
offset = -offset;
@ -282,8 +286,8 @@ void ARMAssembler::dataTransfer32(bool isLoad, RegisterID srcDst, RegisterID bas
sub_r(ARMRegisters::S0, base, OP2_IMM | (offset >> 12) | (10 << 8));
dtr_d(isLoad, srcDst, ARMRegisters::S0, (offset & 0xfff));
} else {
ARMWord reg = getImm(offset, ARMRegisters::S0);
dtr_dr(isLoad, srcDst, base, reg);
moveImm(offset, ARMRegisters::S0);
dtr_dr(isLoad, srcDst, base, ARMRegisters::S0);
}
}
}
@ -297,8 +301,8 @@ void ARMAssembler::dataTransfer8(bool isLoad, RegisterID srcDst, RegisterID base
add_r(ARMRegisters::S0, base, OP2_IMM | (offset >> 12) | (10 << 8));
dtrb_u(isLoad, srcDst, ARMRegisters::S0, (offset & 0xfff));
} else {
ARMWord reg = getImm(offset, ARMRegisters::S0);
dtrb_ur(isLoad, srcDst, base, reg);
moveImm(offset, ARMRegisters::S0);
dtrb_ur(isLoad, srcDst, base, ARMRegisters::S0);
}
} else {
offset = -offset;
@ -308,8 +312,8 @@ void ARMAssembler::dataTransfer8(bool isLoad, RegisterID srcDst, RegisterID base
sub_r(ARMRegisters::S0, base, OP2_IMM | (offset >> 12) | (10 << 8));
dtrb_d(isLoad, srcDst, ARMRegisters::S0, (offset & 0xfff));
} else {
ARMWord reg = getImm(offset, ARMRegisters::S0);
dtrb_dr(isLoad, srcDst, base, reg);
moveImm(offset, ARMRegisters::S0);
dtrb_dr(isLoad, srcDst, base, ARMRegisters::S0);
}
}
}
@ -386,6 +390,10 @@ inline void ARMAssembler::fixUpOffsets(void * buffer)
ARMWord* ldrAddr = reinterpret_cast<ARMWord*>(data + pos);
ARMWord* addr = getLdrImmAddress(ldrAddr);
if (*addr != InvalidBranchTarget) {
// The following is disabled for JM because we patch some branches after
// calling fixUpOffset, and the branch patcher doesn't know how to handle 'B'
// instructions.
#if 0
if (!(*iter & 1)) {
int diff = reinterpret_cast<ARMWord*>(data + *addr) - (ldrAddr + DefaultPrefetching);
@ -394,6 +402,7 @@ inline void ARMAssembler::fixUpOffsets(void * buffer)
continue;
}
}
#endif
*addr = reinterpret_cast<ARMWord>(data + *addr);
}
}

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2009, 2010 University of Szeged
* All rights reserved.
*
@ -22,7 +25,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef ARMAssembler_h
#define ARMAssembler_h
@ -205,6 +209,7 @@ namespace JSC {
FMRS = 0x0e100a10,
FSITOD = 0x0eb80bc0,
FTOSID = 0x0ebd0b40,
FTOSIZD = 0x0ebd0bc0,
FMSTAT = 0x0ef1fa10
#if WTF_ARM_ARCH_VERSION >= 5
,CLZ = 0x016f0f10,
@ -426,6 +431,12 @@ namespace JSC {
emitInst(static_cast<ARMWord>(cc) | CMP | SET_CC, 0, rn, op2);
}
void cmn_r(int rn, ARMWord op2, Condition cc = AL)
{
spewInsWithOp2("cmn", cc, rn, op2);
emitInst(static_cast<ARMWord>(cc) | CMN | SET_CC, 0, rn, op2);
}
void orr_r(int rd, int rn, ARMWord op2, Condition cc = AL)
{
spewInsWithOp2("orr", cc, rd, rn, op2);
@ -717,16 +728,18 @@ namespace JSC {
void fdtr_u(bool isLoad, int dd, int rn, ARMWord offset, Condition cc = AL)
{
char const * ins = isLoad ? "vldr.f64" : "vstr.f64";
js::JaegerSpew(js::JSpew_Insns,
IPFX "%-15s %s, [%s, #+%u]\n", MAYBE_PAD, "vldr.f64", nameFpRegD(dd), nameGpReg(rn), offset);
IPFX "%-15s %s, [%s, #+%u]\n", MAYBE_PAD, ins, nameFpRegD(dd), nameGpReg(rn), offset);
ASSERT(offset <= 0xff);
emitInst(static_cast<ARMWord>(cc) | FDTR | DT_UP | (isLoad ? DT_LOAD : 0), dd, rn, offset);
}
void fdtr_d(bool isLoad, int dd, int rn, ARMWord offset, Condition cc = AL)
{
char const * ins = isLoad ? "vldr.f64" : "vstr.f64";
js::JaegerSpew(js::JSpew_Insns,
IPFX "%-15s %s, [%s, #-%u]\n", MAYBE_PAD, "vldr.f64", nameFpRegD(dd), nameGpReg(rn), offset);
IPFX "%-15s %s, [%s, #-%u]\n", MAYBE_PAD, ins, nameFpRegD(dd), nameGpReg(rn), offset);
ASSERT(offset <= 0xff);
emitInst(static_cast<ARMWord>(cc) | FDTR | (isLoad ? DT_LOAD : 0), dd, rn, offset);
}
@ -785,6 +798,13 @@ namespace JSC {
emitInst(static_cast<ARMWord>(cc) | FTOSID, fd, 0, dm);
}
void ftosizd_r(int fd, int dm, Condition cc = AL)
{
// TODO: emitInst doesn't work for VFP instructions, though it
// seems to work for current usage.
emitInst(static_cast<ARMWord>(cc) | FTOSIZD, fd, 0, dm);
}
void fmstat(Condition cc = AL)
{
// TODO: emitInst doesn't work for VFP instructions, though it
@ -1220,9 +1240,16 @@ namespace JSC {
ASSERT((op2 & ~0xfff) == 0);
uint32_t imm8 = op2 & 0xff;
uint32_t rot = 32 - ((op2 >> 7) & 0x1e);
uint32_t rot = ((op2 >> 7) & 0x1e);
return imm8 << (rot & 0x1f);
// 'rot' is a right-rotate count.
uint32_t imm = (imm8 >> rot);
if (rot > 0) {
imm |= (imm8 << (32-rot));
}
return imm;
}
// Format the operand 2 argument for debug spew. The operand can be

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2009 Apple Inc. All rights reserved.
* Copyright (C) 2010 University of Szeged
*
@ -22,7 +25,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef ARMAssembler_h
#define ARMAssembler_h

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef AbstractMacroAssembler_h
#define AbstractMacroAssembler_h
@ -278,8 +282,8 @@ public:
// DataLabel32:
//
// A DataLabelPtr is used to refer to a location in the code containing a pointer to be
// patched after the code has been generated.
// A DataLabel32 is used to refer to a location in the code containing a
// 32-bit constant to be patched after the code has been generated.
class DataLabel32 {
template<class TemplateAssemblerType>
friend class AbstractMacroAssembler;
@ -483,6 +487,11 @@ public:
return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label);
}
ptrdiff_t differenceBetween(DataLabel32 from, Label to)
{
return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_label);
}
ptrdiff_t differenceBetween(DataLabelPtr from, Jump to)
{
return AssemblerType::getDifferenceBetweenLabels(from.m_label, to.m_jmp);

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef AssemblerBuffer_h
#define AssemblerBuffer_h

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2009 University of Szeged
* All rights reserved.
*
@ -22,7 +25,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef AssemblerBufferWithConstantPool_h
#define AssemblerBufferWithConstantPool_h

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2009 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef CodeLocation_h
#define CodeLocation_h

Просмотреть файл

@ -1,4 +1,7 @@
/* vim: set ts=4 sw=4 tw=99 et:
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2009 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef LinkBuffer_h
#define LinkBuffer_h

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef MacroAssembler_h
#define MacroAssembler_h

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2009 University of Szeged
* All rights reserved.
*
@ -22,7 +25,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#include "assembler/wtf/Platform.h"

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2008 Apple Inc.
* Copyright (C) 2009, 2010 University of Szeged
* All rights reserved.
@ -23,7 +26,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef MacroAssemblerARM_h
#define MacroAssemblerARM_h
@ -279,6 +283,16 @@ public:
return dataLabel;
}
DataLabel32 load64WithAddressOffsetPatch(Address address, RegisterID hi, RegisterID lo)
{
DataLabel32 dataLabel(this);
m_assembler.ldr_un_imm(ARMRegisters::S0, 0);
m_assembler.add_r(ARMRegisters::S0, ARMRegisters::S0, address.base);
m_assembler.dtr_u(true, lo, ARMRegisters::S0, 0);
m_assembler.dtr_u(true, hi, ARMRegisters::S0, 4);
return dataLabel;
}
Label loadPtrWithPatchToLEA(Address address, RegisterID dest)
{
Label label(this);
@ -308,6 +322,27 @@ public:
return dataLabel;
}
DataLabel32 store64WithAddressOffsetPatch(RegisterID hi, RegisterID lo, Address address)
{
DataLabel32 dataLabel(this);
m_assembler.ldr_un_imm(ARMRegisters::S0, 0);
m_assembler.add_r(ARMRegisters::S0, ARMRegisters::S0, address.base);
m_assembler.dtr_u(false, lo, ARMRegisters::S0, 0);
m_assembler.dtr_u(false, hi, ARMRegisters::S0, 4);
return dataLabel;
}
DataLabel32 store64WithAddressOffsetPatch(Imm32 hi, RegisterID lo, Address address)
{
DataLabel32 dataLabel(this);
m_assembler.ldr_un_imm(ARMRegisters::S0, 0);
m_assembler.getImm(hi.m_value, ARMRegisters::S1);
m_assembler.add_r(ARMRegisters::S0, ARMRegisters::S0, address.base);
m_assembler.dtr_u(false, lo, ARMRegisters::S0, 0);
m_assembler.dtr_u(false, ARMRegisters::S1, ARMRegisters::S0, 4);
return dataLabel;
}
void store32(RegisterID src, ImplicitAddress address)
{
m_assembler.dataTransfer32(false, src, address.base, address.offset);
@ -433,6 +468,13 @@ public:
return Jump(m_assembler.jmp(ARMCondition(cond), useConstantPool));
}
// As branch32, but allow the value ('right') to be patched.
Jump branch32WithPatch(Condition cond, RegisterID left, Imm32 right, DataLabel32 &dataLabel)
{
dataLabel = moveWithPatch(right, ARMRegisters::S1);
return branch32(cond, left, ARMRegisters::S1, true);
}
Jump branch32(Condition cond, RegisterID left, Address right)
{
load32(right, ARMRegisters::S1);
@ -818,6 +860,13 @@ public:
return dataLabel;
}
DataLabel32 moveWithPatch(Imm32 initialValue, RegisterID dest)
{
DataLabel32 dataLabel(this);
m_assembler.ldr_un_imm(dest, initialValue.m_value);
return dataLabel;
}
Jump branchPtrWithPatch(Condition cond, RegisterID left, DataLabelPtr& dataLabel, ImmPtr initialRightValue = ImmPtr(0))
{
dataLabel = moveWithPatch(initialRightValue, ARMRegisters::S1);
@ -853,7 +902,7 @@ public:
bool supportsFloatingPointTruncate() const
{
return false;
return true;
}
bool supportsFloatingPointSqrt() const
@ -972,13 +1021,16 @@ public:
// Truncates 'src' to an integer, and places the resulting 'dest'.
// If the result is not representable as a 32 bit value, branch.
// May also branch for some values that are representable in 32 bits
// (specifically, in this case, INT_MIN).
Jump branchTruncateDoubleToInt32(FPRegisterID src, RegisterID dest)
{
(void)(src);
(void)(dest);
ASSERT_NOT_REACHED();
return jump();
m_assembler.ftosizd_r(ARMRegisters::SD0, src);
// If FTOSIZD (VCVT.S32.F64) can't fit the result into a 32-bit
// integer, it saturates at INT_MAX or INT_MIN. Testing this is
// probably quicker than testing FPSCR for exception.
m_assembler.fmrs_r(dest, ARMRegisters::SD0);
m_assembler.cmn_r(dest, ARMAssembler::getOp2(-0x7fffffff));
m_assembler.cmp_r(dest, ARMAssembler::getOp2(0x80000000), ARMCondition(NonZero));
return Jump(m_assembler.jmp(ARMCondition(Zero)));
}
// Convert 'src' to an integer, and places the resulting 'dest'.
@ -1109,8 +1161,8 @@ protected:
m_assembler.add_r(ARMRegisters::S0, base, ARMAssembler::OP2_IMM | (offset >> 12) | (10 << 8));
m_assembler.dtr_u(true, ARMRegisters::S0, ARMRegisters::S0, offset & 0xfff);
} else {
ARMWord reg = m_assembler.getImm(offset, ARMRegisters::S0);
m_assembler.dtr_ur(true, ARMRegisters::S0, base, reg);
m_assembler.moveImm(offset, ARMRegisters::S0);
m_assembler.dtr_ur(true, ARMRegisters::S0, base, ARMRegisters::S0);
}
} else {
offset = -offset;
@ -1120,8 +1172,8 @@ protected:
m_assembler.sub_r(ARMRegisters::S0, base, ARMAssembler::OP2_IMM | (offset >> 12) | (10 << 8));
m_assembler.dtr_d(true, ARMRegisters::S0, ARMRegisters::S0, offset & 0xfff);
} else {
ARMWord reg = m_assembler.getImm(offset, ARMRegisters::S0);
m_assembler.dtr_dr(true, ARMRegisters::S0, base, reg);
m_assembler.moveImm(offset, ARMRegisters::S0);
m_assembler.dtr_dr(true, ARMRegisters::S0, base, ARMRegisters::S0);
}
}
m_assembler.blx(ARMRegisters::S0);

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2009 Apple Inc. All rights reserved.
* Copyright (C) 2010 University of Szeged
*
@ -22,7 +25,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef MacroAssemblerARMv7_h
#define MacroAssemblerARMv7_h

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2009 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef MacroAssemblerCodeRef_h
#define MacroAssemblerCodeRef_h
@ -155,6 +159,13 @@ public:
return !m_value;
}
ptrdiff_t operator -(const MacroAssemblerCodePtr &other) const
{
JS_ASSERT(m_value);
return reinterpret_cast<uint8 *>(m_value) -
reinterpret_cast<uint8 *>(other.m_value);
}
private:
void* m_value;
};

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef MacroAssemblerX86_h
#define MacroAssemblerX86_h

Просмотреть файл

@ -1,11 +1,51 @@
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
* May 28, 2008.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation
*
* Contributor(s):
* Alex Miller <amiller@mozilla.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#include "assembler/wtf/Platform.h"
#if WTF_CPU_X86 && !WTF_PLATFORM_MAC
/* SSE checks only make sense on Intel platforms. */
#if WTF_CPU_X86 || WTF_CPU_X86_64
#include "MacroAssemblerX86Common.h"
using namespace JSC;
MacroAssemblerX86Common::SSECheckState MacroAssemblerX86Common::s_sseCheckState = NotCheckedSSE;
MacroAssemblerX86Common::SSE2CheckState MacroAssemblerX86Common::s_sse2CheckState = NotCheckedSSE2;
#endif /* WTF_CPU_X86 || WTF_CPU_X86_64 */
#endif

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef MacroAssemblerX86Common_h
#define MacroAssemblerX86Common_h
@ -719,11 +723,11 @@ public:
}
// Branch and record a label after the comparison.
Jump branch32WithPatch(Condition cond, RegisterID left, Imm32 right, Label &clabel)
Jump branch32WithPatch(Condition cond, RegisterID left, Imm32 right, DataLabel32 &dataLabel)
{
// Always use cmpl, since the value is to be patched.
m_assembler.cmpl_ir(right.m_value, left);
clabel = label();
dataLabel = DataLabel32(this);
return Jump(m_assembler.jCC(x86Condition(cond)));
}
@ -1074,6 +1078,25 @@ public:
m_assembler.movzbl_rr(dest, dest);
}
enum SSECheckState {
NotCheckedSSE = 0,
NoSSE = 1,
HasSSE2 = 2,
HasSSE4_1 = 3, // implies HasSSE2
HasSSE4_2 = 4 // implies HasSSE4_1
};
static SSECheckState getSSEState()
{
if (s_sseCheckState == NotCheckedSSE) {
MacroAssemblerX86Common::setSSECheckState();
}
// Only check once.
ASSERT(s_sseCheckState != NotCheckedSSE);
return s_sseCheckState;
}
protected:
X86Assembler::Condition x86Condition(Condition cond)
{
@ -1081,10 +1104,58 @@ protected:
}
private:
// Only MacroAssemblerX86 should be using the following method; SSE2 is always available on
// x86_64, and clients & subclasses of MacroAssembler should be using 'supportsFloatingPoint()'.
friend class MacroAssemblerX86;
static SSECheckState s_sseCheckState;
static void setSSECheckState()
{
// Default the flags value to zero; if the compiler is
// not MSVC or GCC we will read this as SSE2 not present.
int flags_edx = 0;
int flags_ecx = 0;
#if WTF_COMPILER_MSVC
#if WTF_CPU_X86_64
extern void __cpuid(int a[4], int b);
int cpuinfo[4];
__cpuid(cpuinfo, 1);
flags_ecx = cpuinfo[2];
flags_edx = cpuinfo[3];
#else
_asm {
mov eax, 1 // cpuid function 1 gives us the standard feature set
cpuid;
mov flags_ecx, ecx;
mov flags_edx, edx;
}
#endif
#elif WTF_COMPILER_GCC
asm (
"movl $0x1, %%eax;"
"pushl %%ebx;"
"cpuid;"
"popl %%ebx;"
"movl %%ecx, %0;"
"movl %%edx, %1;"
: "=g" (flags_ecx), "=g" (flags_edx)
:
: "%eax", "%ecx", "%edx"
);
#endif
static const int SSE2FeatureBit = 1 << 26;
static const int SSE41FeatureBit = 1 << 19;
static const int SSE42FeatureBit = 1 << 20;
if (flags_ecx & SSE42FeatureBit)
s_sseCheckState = HasSSE4_2;
else if (flags_ecx & SSE41FeatureBit)
s_sseCheckState = HasSSE4_1;
else if (flags_edx & SSE2FeatureBit)
s_sseCheckState = HasSSE2;
else
s_sseCheckState = NoSSE;
}
#if WTF_CPU_X86
#if WTF_PLATFORM_MAC
@ -1096,46 +1167,17 @@ private:
#else // PLATFORM(MAC)
enum SSE2CheckState {
NotCheckedSSE2,
HasSSE2,
NoSSE2
};
static bool isSSE2Present()
{
if (s_sse2CheckState == NotCheckedSSE2) {
// Default the flags value to zero; if the compiler is
// not MSVC or GCC we will read this as SSE2 not present.
int flags = 0;
#if WTF_COMPILER_MSVC
_asm {
mov eax, 1 // cpuid function 1 gives us the standard feature set
cpuid;
mov flags, edx;
}
#elif WTF_COMPILER_GCC
asm (
"movl $0x1, %%eax;"
"pushl %%ebx;"
"cpuid;"
"popl %%ebx;"
"movl %%edx, %0;"
: "=g" (flags)
:
: "%eax", "%ecx", "%edx"
);
#endif
static const int SSE2FeatureBit = 1 << 26;
s_sse2CheckState = (flags & SSE2FeatureBit) ? HasSSE2 : NoSSE2;
if (s_sseCheckState == NotCheckedSSE) {
setSSECheckState();
}
// Only check once.
ASSERT(s_sse2CheckState != NotCheckedSSE2);
ASSERT(s_sseCheckState != NotCheckedSSE);
return s_sse2CheckState == HasSSE2;
return s_sseCheckState >= HasSSE2;
}
static SSE2CheckState s_sse2CheckState;
#endif // PLATFORM(MAC)
#elif !defined(NDEBUG) // CPU(X86)
@ -1148,6 +1190,28 @@ private:
}
#endif
static bool isSSE41Present()
{
if (s_sseCheckState == NotCheckedSSE) {
setSSECheckState();
}
// Only check once.
ASSERT(s_sseCheckState != NotCheckedSSE);
return s_sseCheckState >= HasSSE4_1;
}
static bool isSSE42Present()
{
if (s_sseCheckState == NotCheckedSSE) {
setSSECheckState();
}
// Only check once.
ASSERT(s_sseCheckState != NotCheckedSSE);
return s_sseCheckState >= HasSSE4_2;
}
};
} // namespace JSC

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef MacroAssemblerX86_64_h
#define MacroAssemblerX86_64_h

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2009 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef RepatchBuffer_h
#define RepatchBuffer_h

Просмотреть файл

@ -1,4 +1,7 @@
/*
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=79:
*
* ***** BEGIN LICENSE BLOCK *****
* Copyright (C) 2008 Apple Inc. All rights reserved.
*
* Redistribution and use in source and binary forms, with or without
@ -21,7 +24,8 @@
* OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*/
*
* ***** END LICENSE BLOCK ***** */
#ifndef X86Assembler_h
#define X86Assembler_h
@ -181,6 +185,7 @@ private:
OP_OR_EvGv = 0x09,
OP_OR_GvEv = 0x0B,
OP_2BYTE_ESCAPE = 0x0F,
OP_3BYTE_ESCAPE = 0x3A,
OP_AND_EvGv = 0x21,
OP_AND_GvEv = 0x23,
OP_SUB_EvGv = 0x29,
@ -258,6 +263,10 @@ private:
OP2_PEXTRW_GdUdIb = 0xC5
} TwoByteOpcodeID;
typedef enum {
OP3_PINSRD_VsdWsd = 0x22
} ThreeByteOpcodeID;
TwoByteOpcodeID jccRel32(Condition cond)
{
return (TwoByteOpcodeID)(OP2_JCC_rel32 + cond);
@ -840,7 +849,9 @@ public:
#if WTF_CPU_X86_64
void xorq_rr(RegisterID src, RegisterID dst)
{
FIXME_INSN_PRINTING;
js::JaegerSpew(js::JSpew_Insns,
IPFX "xorq %s, %s\n", MAYBE_PAD,
nameIReg(4,src), nameIReg(4, dst));
m_formatter.oneByteOp64(OP_XOR_EvGv, src, dst);
}
@ -2006,6 +2017,16 @@ public:
m_formatter.twoByteOp(OP2_SQRTSD_VsdWsd, (RegisterID)dst, (RegisterID)src);
}
void pinsrd_rr(RegisterID src, XMMRegisterID dst)
{
js::JaegerSpew(js::JSpew_Insns,
IPFX "pinsrd $1, %s, %s\n", MAYBE_PAD,
nameIReg(src), nameFPReg(dst));
m_formatter.prefix(PRE_SSE_66);
m_formatter.threeByteOp(OP3_PINSRD_VsdWsd, (RegisterID)dst, (RegisterID)src);
m_formatter.immediate8(0x01); // the $1
}
// Misc instructions:
void int3()
@ -2361,6 +2382,16 @@ private:
}
#endif
void threeByteOp(ThreeByteOpcodeID opcode, int reg, RegisterID rm)
{
m_buffer.ensureSpace(maxInstructionSize);
emitRexIfNeeded(reg, 0, rm);
m_buffer.putByteUnchecked(OP_2BYTE_ESCAPE);
m_buffer.putByteUnchecked(OP_3BYTE_ESCAPE);
m_buffer.putByteUnchecked(opcode);
registerModRM(reg, rm);
}
#if WTF_CPU_X86_64
// Quad-word-sized operands:
//

Просмотреть файл

@ -100,6 +100,10 @@
#define WTF_COMPILER_WINSCW 1
#endif
/* COMPILER(SUNPRO) - Sun Studio for Solaris */
#if defined(__SUNPRO_C) || defined(__SUNPRO_CC)
#define WTF_COMPILER_SUNPRO 1
#endif
/* ==== CPU() - the target CPU architecture ==== */
@ -835,6 +839,26 @@ on MinGW. See https://bugs.webkit.org/show_bug.cgi?id=29268 */
#define ENABLE_JIT 1
#endif
#if WTF_PLATFORM_QT
#if WTF_CPU_X86_64 && WTF_PLATFORM_DARWIN
#define ENABLE_JIT 1
#elif WTF_CPU_X86 && WTF_PLATFORM_DARWIN
#define ENABLE_JIT 1
#define WTF_USE_JIT_STUB_ARGUMENT_VA_LIST 1
#elif WTF_CPU_X86 && WTF_PLATFORM_WIN_OS && WTF_COMPILER_MINGW && GCC_VERSION >= 40100
#define ENABLE_JIT 1
#define WTF_USE_JIT_STUB_ARGUMENT_VA_LIST 1
#elif WTF_CPU_X86 && WTF_PLATFORM_WIN_OS && WTF_COMPILER_MSVC
#define ENABLE_JIT 1
#define WTF_USE_JIT_STUB_ARGUMENT_REGISTER 1
#elif WTF_CPU_X86 && WTF_PLATFORM_LINUX && GCC_VERSION >= 40100
#define ENABLE_JIT 1
#define WTF_USE_JIT_STUB_ARGUMENT_VA_LIST 1
#elif WTF_CPU_ARM_TRADITIONAL && WTF_PLATFORM_LINUX
#define ENABLE_JIT 1
#endif
#endif /* PLATFORM(QT) */
#endif /* !defined(ENABLE_JIT) */
#if ENABLE_JIT
@ -878,6 +902,7 @@ on MinGW. See https://bugs.webkit.org/show_bug.cgi?id=29268 */
|| WTF_CPU_X86_64 \
|| WTF_CPU_ARM_THUMB2 \
|| WTF_CPU_ARM_TRADITIONAL \
|| WTF_CPU_ARM_TRADITIONAL \
|| WTF_CPU_X86)
#define ENABLE_YARR_JIT 1
#else

Просмотреть файл

@ -336,6 +336,7 @@ VISIBILITY_FLAGS = @VISIBILITY_FLAGS@
WRAP_SYSTEM_INCLUDES = @WRAP_SYSTEM_INCLUDES@
ENABLE_TRACEJIT = @ENABLE_TRACEJIT@
ENABLE_METHODJIT = @ENABLE_METHODJIT@
NANOJIT_ARCH = @NANOJIT_ARCH@
HAVE_ARM_SIMD= @HAVE_ARM_SIMD@

Просмотреть файл

@ -2058,6 +2058,9 @@ ia64*-hpux*)
HOST_CC='$(CC)'
HOST_CXX='$(CXX)'
HOST_LD='$(LD)'
if test "$AS_BIN"; then
AS="$(basename "$AS_BIN")"
fi
AR='lib -NOLOGO -OUT:"$@"'
AR_FLAGS=
RANLIB='echo not_ranlib'
@ -2068,6 +2071,7 @@ ia64*-hpux*)
UNZIP=unzip
DOXYGEN=:
GARBAGE='$(OBJDIR)/vc20.pdb $(OBJDIR)/vc40.pdb'
ASM_SUFFIX=asm
OBJ_SUFFIX=obj
LIB_SUFFIX=lib
DLL_PREFIX=
@ -2589,25 +2593,75 @@ case "$target" in
i?86-*)
ENABLE_TRACEJIT=1
NANOJIT_ARCH=i386
ENABLE_METHODJIT=1
ENABLE_MONOIC=1
ENABLE_POLYIC=1
AC_DEFINE(JS_CPU_X86)
AC_DEFINE(JS_NUNBOX32)
;;
x86_64*-*)
ENABLE_TRACEJIT=1
NANOJIT_ARCH=X64
ENABLE_METHODJIT=1
ENABLE_MONOIC=1
ENABLE_POLYIC=1
AC_DEFINE(JS_CPU_X64)
AC_DEFINE(JS_PUNBOX64)
;;
arm*-*)
ENABLE_TRACEJIT=1
NANOJIT_ARCH=ARM
AC_DEFINE(JS_CPU_ARM)
AC_DEFINE(JS_NUNBOX32)
;;
sparc*-*)
ENABLE_TRACEJIT=1
NANOJIT_ARCH=Sparc
AC_DEFINE(JS_CPU_SPARC)
;;
esac
MOZ_ARG_DISABLE_BOOL(methodjit,
[ --disable-methodjit Disable method JIT support],
ENABLE_METHODJIT= )
MOZ_ARG_DISABLE_BOOL(monoic,
[ --disable-monoic Disable use of MICs by JIT compiler],
ENABLE_MONOIC= )
MOZ_ARG_DISABLE_BOOL(polyic,
[ --disable-polyic Disable use of PICs by JIT compiler],
ENABLE_POLYIC= )
MOZ_ARG_DISABLE_BOOL(tracejit,
[ --disable-tracejit Disable tracing JIT support],
ENABLE_TRACEJIT=)
MOZ_ARG_ENABLE_BOOL(methodjit-spew,
[ --enable-methodjit-spew Enable method JIT spew support],
ENABLE_METHODJIT_SPEW=1,
ENABLE_METHODJIT_SPEW= )
AC_SUBST(ENABLE_METHODJIT)
if test "$ENABLE_METHODJIT"; then
AC_DEFINE(JS_METHODJIT)
else
ENABLE_TRACEJIT=
fi
if test "$ENABLE_MONOIC"; then
AC_DEFINE(JS_MONOIC)
fi
if test "$ENABLE_POLYIC"; then
AC_DEFINE(JS_POLYIC)
fi
if test "$ENABLE_METHODJIT_SPEW"; then
AC_DEFINE(JS_METHODJIT_SPEW)
fi
if test "$ENABLE_TRACEJIT"; then
AC_DEFINE(FEATURE_NANOJIT)

Просмотреть файл

@ -76,14 +76,11 @@ private:
** JSAPI function prototypes
*******************************************************************************/
static JSBool ConstructAbstract(JSContext* cx, JSObject* obj, uintN argc,
jsval* argv, jsval* rval);
static JSBool ConstructAbstract(JSContext* cx, uintN argc, jsval* vp);
namespace CType {
static JSBool ConstructData(JSContext* cx, JSObject* obj, uintN argc,
jsval* argv, jsval* rval);
static JSBool ConstructBasic(JSContext* cx, JSObject* obj, uintN argc,
jsval* argv, jsval* rval);
static JSBool ConstructData(JSContext* cx, uintN argc, jsval* vp);
static JSBool ConstructBasic(JSContext* cx, JSObject* obj, uintN argc, jsval* vp);
static void Trace(JSTracer* trc, JSObject* obj);
static void Finalize(JSContext* cx, JSObject* obj);
@ -99,13 +96,12 @@ namespace CType {
static JSBool CreateArray(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToString(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToSource(JSContext* cx, uintN argc, jsval* vp);
static JSBool HasInstance(JSContext* cx, JSObject* obj, const jsval *v, JSBool* bp);
static JSBool HasInstance(JSContext* cx, JSObject* obj, const jsval* v, JSBool* bp);
}
namespace PointerType {
static JSBool Create(JSContext* cx, uintN argc, jsval* vp);
static JSBool ConstructData(JSContext* cx, JSObject* obj, uintN argc,
jsval* argv, jsval* rval);
static JSBool ConstructData(JSContext* cx, JSObject* obj, uintN argc, jsval* vp);
static JSBool TargetTypeGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
@ -118,8 +114,7 @@ namespace PointerType {
namespace ArrayType {
static JSBool Create(JSContext* cx, uintN argc, jsval* vp);
static JSBool ConstructData(JSContext* cx, JSObject* obj, uintN argc,
jsval* argv, jsval* rval);
static JSBool ConstructData(JSContext* cx, JSObject* obj, uintN argc, jsval* vp);
static JSBool ElementTypeGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
@ -132,8 +127,7 @@ namespace ArrayType {
namespace StructType {
static JSBool Create(JSContext* cx, uintN argc, jsval* vp);
static JSBool ConstructData(JSContext* cx, JSObject* obj, uintN argc,
jsval* argv, jsval* rval);
static JSBool ConstructData(JSContext* cx, JSObject* obj, uintN argc, jsval* vp);
static JSBool FieldsArrayGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
@ -150,8 +144,7 @@ namespace FunctionType {
static JSBool ConstructData(JSContext* cx, JSObject* typeObj,
JSObject* dataObj, JSObject* fnObj, JSObject* thisObj);
static JSBool Call(JSContext* cx, JSObject* obj, uintN argc, jsval* argv,
jsval* rval);
static JSBool Call(JSContext* cx, uintN argc, jsval* vp);
static JSBool ArgTypesGetter(JSContext* cx, JSObject* obj, jsid idval,
jsval* vp);
@ -200,8 +193,7 @@ namespace Int64Base {
}
namespace Int64 {
static JSBool Construct(JSContext* cx, JSObject* obj, uintN argc, jsval* argv,
jsval* rval);
static JSBool Construct(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToString(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToSource(JSContext* cx, uintN argc, jsval* vp);
@ -213,8 +205,7 @@ namespace Int64 {
}
namespace UInt64 {
static JSBool Construct(JSContext* cx, JSObject* obj, uintN argc, jsval* argv,
jsval* rval);
static JSBool Construct(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToString(JSContext* cx, uintN argc, jsval* vp);
static JSBool ToSource(JSContext* cx, uintN argc, jsval* vp);
@ -284,13 +275,16 @@ static JSClass sCClosureClass = {
};
#define CTYPESFN_FLAGS \
(JSFUN_FAST_NATIVE | JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT)
(JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT)
#define CTYPESCTOR_FLAGS \
(CTYPESFN_FLAGS | JSFUN_CONSTRUCTOR)
#define CTYPESPROP_FLAGS \
(JSPROP_SHARED | JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT)
#define CDATAFN_FLAGS \
(JSFUN_FAST_NATIVE | JSPROP_READONLY | JSPROP_PERMANENT)
(JSPROP_READONLY | JSPROP_PERMANENT)
static JSPropertySpec sCTypeProps[] = {
{ "name", 0, CTYPESPROP_FLAGS, CType::NameGetter, NULL },
@ -322,7 +316,7 @@ static JSFunctionSpec sCDataFunctions[] = {
};
static JSFunctionSpec sPointerFunction =
JS_FN("PointerType", PointerType::Create, 1, CTYPESFN_FLAGS);
JS_FN("PointerType", PointerType::Create, 1, CTYPESCTOR_FLAGS);
static JSPropertySpec sPointerProps[] = {
{ "targetType", 0, CTYPESPROP_FLAGS, PointerType::TargetTypeGetter, NULL },
@ -341,7 +335,7 @@ static JSPropertySpec sPointerInstanceProps[] = {
};
static JSFunctionSpec sArrayFunction =
JS_FN("ArrayType", ArrayType::Create, 1, CTYPESFN_FLAGS);
JS_FN("ArrayType", ArrayType::Create, 1, CTYPESCTOR_FLAGS);
static JSPropertySpec sArrayProps[] = {
{ "elementType", 0, CTYPESPROP_FLAGS, ArrayType::ElementTypeGetter, NULL },
@ -361,7 +355,7 @@ static JSPropertySpec sArrayInstanceProps[] = {
};
static JSFunctionSpec sStructFunction =
JS_FN("StructType", StructType::Create, 2, CTYPESFN_FLAGS);
JS_FN("StructType", StructType::Create, 2, CTYPESCTOR_FLAGS);
static JSPropertySpec sStructProps[] = {
{ "fields", 0, CTYPESPROP_FLAGS, StructType::FieldsArrayGetter, NULL },
@ -379,7 +373,7 @@ static JSFunctionSpec sStructInstanceFunctions[] = {
};
static JSFunctionSpec sFunctionFunction =
JS_FN("FunctionType", FunctionType::Create, 2, CTYPESFN_FLAGS);
JS_FN("FunctionType", FunctionType::Create, 2, CTYPESCTOR_FLAGS);
static JSPropertySpec sFunctionProps[] = {
{ "argTypes", 0, CTYPESPROP_FLAGS, FunctionType::ArgTypesGetter, NULL },
@ -536,7 +530,7 @@ static JSObject*
InitCTypeClass(JSContext* cx, JSObject* parent)
{
JSFunction* fun = JS_DefineFunction(cx, parent, "CType", ConstructAbstract, 0,
CTYPESFN_FLAGS);
CTYPESCTOR_FLAGS);
if (!fun)
return NULL;
@ -574,7 +568,7 @@ static JSObject*
InitCDataClass(JSContext* cx, JSObject* parent, JSObject* CTypeProto)
{
JSFunction* fun = JS_DefineFunction(cx, parent, "CData", ConstructAbstract, 0,
CTYPESFN_FLAGS);
CTYPESCTOR_FLAGS);
if (!fun)
return NULL;
@ -2483,10 +2477,8 @@ BuildDataSource(JSContext* cx,
JSBool
ConstructAbstract(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* argv,
jsval* rval)
jsval* vp)
{
// Calling an abstract base class constructor is disallowed.
JS_ReportError(cx, "cannot construct from abstract type");
@ -2499,13 +2491,11 @@ ConstructAbstract(JSContext* cx,
JSBool
CType::ConstructData(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* argv,
jsval* rval)
jsval* vp)
{
// get the callee object...
obj = JSVAL_TO_OBJECT(JS_ARGV_CALLEE(argv));
JSObject* obj = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
if (!CType::IsCType(cx, obj)) {
JS_ReportError(cx, "not a CType");
return JS_FALSE;
@ -2523,13 +2513,13 @@ CType::ConstructData(JSContext* cx,
JS_ReportError(cx, "cannot construct from FunctionType; use FunctionType.ptr instead");
return JS_FALSE;
case TYPE_pointer:
return PointerType::ConstructData(cx, obj, argc, argv, rval);
return PointerType::ConstructData(cx, obj, argc, vp);
case TYPE_array:
return ArrayType::ConstructData(cx, obj, argc, argv, rval);
return ArrayType::ConstructData(cx, obj, argc, vp);
case TYPE_struct:
return StructType::ConstructData(cx, obj, argc, argv, rval);
return StructType::ConstructData(cx, obj, argc, vp);
default:
return ConstructBasic(cx, obj, argc, argv, rval);
return ConstructBasic(cx, obj, argc, vp);
}
}
@ -2537,8 +2527,7 @@ JSBool
CType::ConstructBasic(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* argv,
jsval* rval)
jsval* vp)
{
if (argc > 1) {
JS_ReportError(cx, "CType constructor takes zero or one argument");
@ -2550,13 +2539,12 @@ CType::ConstructBasic(JSContext* cx,
if (!result)
return JS_FALSE;
*rval = OBJECT_TO_JSVAL(result);
if (argc == 1) {
if (!ExplicitConvert(cx, argv[0], obj, CData::GetData(cx, result)))
if (!ExplicitConvert(cx, JS_ARGV(cx, vp)[0], obj, CData::GetData(cx, result)))
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
@ -3065,7 +3053,7 @@ CType::PtrGetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
}
JSBool
CType::CreateArray(JSContext* cx, uintN argc, jsval *vp)
CType::CreateArray(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* baseType = JS_THIS_OBJECT(cx, vp);
JS_ASSERT(baseType);
@ -3098,7 +3086,7 @@ CType::CreateArray(JSContext* cx, uintN argc, jsval *vp)
}
JSBool
CType::ToString(JSContext* cx, uintN argc, jsval *vp)
CType::ToString(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
JS_ASSERT(obj);
@ -3121,7 +3109,7 @@ CType::ToString(JSContext* cx, uintN argc, jsval *vp)
}
JSBool
CType::ToSource(JSContext* cx, uintN argc, jsval *vp)
CType::ToSource(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
JS_ASSERT(obj);
@ -3142,7 +3130,7 @@ CType::ToSource(JSContext* cx, uintN argc, jsval *vp)
}
JSBool
CType::HasInstance(JSContext* cx, JSObject* obj, const jsval *v, JSBool* bp)
CType::HasInstance(JSContext* cx, JSObject* obj, const jsval* v, JSBool* bp)
{
JS_ASSERT(CType::IsCType(cx, obj));
@ -3233,8 +3221,7 @@ JSBool
PointerType::ConstructData(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* argv,
jsval* rval)
jsval* vp)
{
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_pointer) {
JS_ReportError(cx, "not a PointerType");
@ -3250,13 +3237,15 @@ PointerType::ConstructData(JSContext* cx,
if (!result)
return JS_FALSE;
*rval = OBJECT_TO_JSVAL(result);
// Set return value early, must not observe *vp after
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
if (argc == 0) {
// Construct a null pointer.
return JS_TRUE;
}
jsval* argv = JS_ARGV(cx, vp);
if (argc >= 1) {
JSObject* baseObj = PointerType::GetBaseType(cx, obj);
if (CType::GetTypeCode(cx, baseObj) == TYPE_function &&
@ -3503,8 +3492,7 @@ JSBool
ArrayType::ConstructData(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* argv,
jsval* rval)
jsval* vp)
{
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_array) {
JS_ReportError(cx, "not an ArrayType");
@ -3531,6 +3519,7 @@ ArrayType::ConstructData(JSContext* cx,
JSObject* baseType = GetBaseType(cx, obj);
jsval* argv = JS_ARGV(cx, vp);
size_t length;
if (jsvalToSize(cx, argv[0], false, &length)) {
// Have a length, rather than an object to initialize from.
@ -3591,10 +3580,10 @@ ArrayType::ConstructData(JSContext* cx,
if (!result)
return JS_FALSE;
*rval = OBJECT_TO_JSVAL(result);
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
if (convertObject) {
if (!ExplicitConvert(cx, argv[0], obj, CData::GetData(cx, result)))
if (!ExplicitConvert(cx, JS_ARGV(cx, vp)[0], obj, CData::GetData(cx, result)))
return JS_FALSE;
}
@ -3801,7 +3790,7 @@ ArrayType::Setter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
}
JSBool
ArrayType::AddressOfElement(JSContext* cx, uintN argc, jsval *vp)
ArrayType::AddressOfElement(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
JS_ASSERT(obj);
@ -4217,8 +4206,7 @@ JSBool
StructType::ConstructData(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* argv,
jsval* rval)
jsval* vp)
{
if (!CType::IsCType(cx, obj) || CType::GetTypeCode(cx, obj) != TYPE_struct) {
JS_ReportError(cx, "not a StructType");
@ -4234,7 +4222,7 @@ StructType::ConstructData(JSContext* cx,
if (!result)
return JS_FALSE;
*rval = OBJECT_TO_JSVAL(result);
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
if (argc == 0)
return JS_TRUE;
@ -4242,6 +4230,7 @@ StructType::ConstructData(JSContext* cx,
char* buffer = static_cast<char*>(CData::GetData(cx, result));
const FieldInfoHash* fields = GetFieldInfo(cx, obj);
jsval* argv = JS_ARGV(cx, vp);
if (argc == 1) {
// There are two possible interpretations of the argument:
// 1) It may be an object '{ ... }' with properties representing the
@ -4429,7 +4418,7 @@ StructType::FieldSetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
}
JSBool
StructType::AddressOfField(JSContext* cx, uintN argc, jsval *vp)
StructType::AddressOfField(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
JS_ASSERT(obj);
@ -4925,13 +4914,11 @@ ConvertArgument(JSContext* cx,
JSBool
FunctionType::Call(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* argv,
jsval* rval)
jsval* vp)
{
// get the callee object...
obj = JSVAL_TO_OBJECT(JS_ARGV_CALLEE(argv));
JSObject* obj = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
if (!CData::IsCData(cx, obj)) {
JS_ReportError(cx, "not a CData");
return false;
@ -4977,6 +4964,7 @@ FunctionType::Call(JSContext* cx,
return false;
}
jsval* argv = JS_ARGV(cx, vp);
for (jsuint i = 0; i < argcFixed; ++i)
if (!ConvertArgument(cx, argv[i], fninfo->mArgTypes[i], &values[i], &strings))
return false;
@ -5026,12 +5014,11 @@ FunctionType::Call(JSContext* cx,
// suspend the request before we call into the function, since the call
// may block or otherwise take a long time to return.
jsrefcount rc = JS_SuspendRequest(cx);
ffi_call(&fninfo->mCIF, FFI_FN(fn), returnValue.mData,
reinterpret_cast<void**>(values.begin()));
JS_ResumeRequest(cx, rc);
{
JSAutoSuspendRequest suspend(cx);
ffi_call(&fninfo->mCIF, FFI_FN(fn), returnValue.mData,
reinterpret_cast<void**>(values.begin()));
}
// Small integer types get returned as a word-sized ffi_arg. Coerce it back
// into the correct size for ConvertToJS.
@ -5054,7 +5041,7 @@ FunctionType::Call(JSContext* cx,
// prepare a JS object from the result
return ConvertToJS(cx, fninfo->mReturnType, NULL, returnValue.mData,
false, true, rval);
false, true, vp);
}
FunctionInfo*
@ -5574,7 +5561,7 @@ CData::ValueSetter(JSContext* cx, JSObject* obj, jsid idval, jsval* vp)
}
JSBool
CData::Address(JSContext* cx, uintN argc, jsval *vp)
CData::Address(JSContext* cx, uintN argc, jsval* vp)
{
if (argc != 0) {
JS_ReportError(cx, "address takes zero arguments");
@ -5609,7 +5596,7 @@ CData::Address(JSContext* cx, uintN argc, jsval *vp)
}
JSBool
CData::Cast(JSContext* cx, uintN argc, jsval *vp)
CData::Cast(JSContext* cx, uintN argc, jsval* vp)
{
if (argc != 2) {
JS_ReportError(cx, "cast takes two arguments");
@ -5652,7 +5639,7 @@ CData::Cast(JSContext* cx, uintN argc, jsval *vp)
}
JSBool
CData::ReadString(JSContext* cx, uintN argc, jsval *vp)
CData::ReadString(JSContext* cx, uintN argc, jsval* vp)
{
if (argc != 0) {
JS_ReportError(cx, "readString takes zero arguments");
@ -5745,7 +5732,7 @@ CData::ReadString(JSContext* cx, uintN argc, jsval *vp)
}
JSBool
CData::ToSource(JSContext* cx, uintN argc, jsval *vp)
CData::ToSource(JSContext* cx, uintN argc, jsval* vp)
{
if (argc != 0) {
JS_ReportError(cx, "toSource takes zero arguments");
@ -5840,7 +5827,7 @@ JSBool
Int64Base::ToString(JSContext* cx,
JSObject* obj,
uintN argc,
jsval *vp,
jsval* vp,
bool isUnsigned)
{
if (argc > 1) {
@ -5878,7 +5865,7 @@ JSBool
Int64Base::ToSource(JSContext* cx,
JSObject* obj,
uintN argc,
jsval *vp,
jsval* vp,
bool isUnsigned)
{
if (argc != 0) {
@ -5907,10 +5894,8 @@ Int64Base::ToSource(JSContext* cx,
JSBool
Int64::Construct(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* argv,
jsval* rval)
jsval* vp)
{
// Construct and return a new Int64 object.
if (argc != 1) {
@ -5918,13 +5903,14 @@ Int64::Construct(JSContext* cx,
return JS_FALSE;
}
jsval* argv = JS_ARGV(cx, vp);
JSInt64 i = 0;
if (!jsvalToBigInteger(cx, argv[0], true, &i))
return TypeError(cx, "int64", argv[0]);
// Get ctypes.Int64.prototype from the 'prototype' property of the ctor.
jsval slot;
ASSERT_OK(JS_GetProperty(cx, JSVAL_TO_OBJECT(JS_ARGV_CALLEE(argv)),
ASSERT_OK(JS_GetProperty(cx, JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)),
"prototype", &slot));
JSObject* proto = JSVAL_TO_OBJECT(slot);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sInt64ProtoClass);
@ -5933,7 +5919,7 @@ Int64::Construct(JSContext* cx,
if (!result)
return JS_FALSE;
*rval = OBJECT_TO_JSVAL(result);
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
@ -5944,7 +5930,7 @@ Int64::IsInt64(JSContext* cx, JSObject* obj)
}
JSBool
Int64::ToString(JSContext* cx, uintN argc, jsval *vp)
Int64::ToString(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!Int64::IsInt64(cx, obj)) {
@ -5956,7 +5942,7 @@ Int64::ToString(JSContext* cx, uintN argc, jsval *vp)
}
JSBool
Int64::ToSource(JSContext* cx, uintN argc, jsval *vp)
Int64::ToSource(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!Int64::IsInt64(cx, obj)) {
@ -6063,7 +6049,7 @@ Int64::Join(JSContext* cx, uintN argc, jsval* vp)
JSInt64 i = (JSInt64(hi) << 32) + JSInt64(lo);
// Get Int64.prototype from the function's reserved slot.
JSObject* callee = JSVAL_TO_OBJECT(JS_ARGV_CALLEE(argv));
JSObject* callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, callee, SLOT_FN_INT64PROTO, &slot));
@ -6080,10 +6066,8 @@ Int64::Join(JSContext* cx, uintN argc, jsval* vp)
JSBool
UInt64::Construct(JSContext* cx,
JSObject* obj,
uintN argc,
jsval* argv,
jsval* rval)
jsval* vp)
{
// Construct and return a new UInt64 object.
if (argc != 1) {
@ -6091,13 +6075,14 @@ UInt64::Construct(JSContext* cx,
return JS_FALSE;
}
jsval* argv = JS_ARGV(cx, vp);
JSUint64 u = 0;
if (!jsvalToBigInteger(cx, argv[0], true, &u))
return TypeError(cx, "uint64", argv[0]);
// Get ctypes.UInt64.prototype from the 'prototype' property of the ctor.
jsval slot;
ASSERT_OK(JS_GetProperty(cx, JSVAL_TO_OBJECT(JS_ARGV_CALLEE(argv)),
ASSERT_OK(JS_GetProperty(cx, JSVAL_TO_OBJECT(JS_CALLEE(cx, vp)),
"prototype", &slot));
JSObject* proto = JSVAL_TO_OBJECT(slot);
JS_ASSERT(JS_GET_CLASS(cx, proto) == &sUInt64ProtoClass);
@ -6106,7 +6091,7 @@ UInt64::Construct(JSContext* cx,
if (!result)
return JS_FALSE;
*rval = OBJECT_TO_JSVAL(result);
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(result));
return JS_TRUE;
}
@ -6117,7 +6102,7 @@ UInt64::IsUInt64(JSContext* cx, JSObject* obj)
}
JSBool
UInt64::ToString(JSContext* cx, uintN argc, jsval *vp)
UInt64::ToString(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!UInt64::IsUInt64(cx, obj)) {
@ -6129,7 +6114,7 @@ UInt64::ToString(JSContext* cx, uintN argc, jsval *vp)
}
JSBool
UInt64::ToSource(JSContext* cx, uintN argc, jsval *vp)
UInt64::ToSource(JSContext* cx, uintN argc, jsval* vp)
{
JSObject* obj = JS_THIS_OBJECT(cx, vp);
if (!UInt64::IsUInt64(cx, obj)) {
@ -6232,7 +6217,7 @@ UInt64::Join(JSContext* cx, uintN argc, jsval* vp)
JSUint64 u = (JSUint64(hi) << 32) + JSUint64(lo);
// Get UInt64.prototype from the function's reserved slot.
JSObject* callee = JSVAL_TO_OBJECT(JS_ARGV_CALLEE(argv));
JSObject* callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
jsval slot;
ASSERT_OK(JS_GetReservedSlot(cx, callee, SLOT_FN_INT64PROTO, &slot));

Просмотреть файл

@ -71,7 +71,7 @@ static JSClass sLibraryClass = {
};
#define CTYPESFN_FLAGS \
(JSFUN_FAST_NATIVE | JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT)
(JSPROP_ENUMERATE | JSPROP_READONLY | JSPROP_PERMANENT)
static JSFunctionSpec sLibraryFunctions[] = {
JS_FN("close", Library::Close, 0, CTYPESFN_FLAGS),

Просмотреть файл

@ -32,15 +32,8 @@
* javascript provider probes
*
* function-entry (filename, classname, funcname)
* function-info (filename, classname, funcname, lineno,
* runfilename, runlineno)
* function-args (filename, classname, funcname, argc, argv, argv0,
* argv1, argv2, argv3, argv4)
* function-rval (filename, classname, funcname, lineno, rval, rval0)
* function-return (filename, classname, funcname)
* object-create-start (filename, classname)
* object-create (filename, classname, *object, rlineno)
* object-create-done (filename, classname)
* object-create (classname, *object)
* object-finalize (NULL, classname, *object)
* execute-start (filename, lineno)
* execute-done (filename, lineno)
@ -48,16 +41,10 @@
provider javascript {
probe function__entry(char *, char *, char *);
probe function__info(char *, char *, char *, int, char *, int);
probe function__args(char *, char *, char *, int, void *, void *, void *,
void *, void *, void *);
probe function__rval(char *, char *, char *, int, void *, void *);
probe function__return(char *, char *, char *);
probe object__create__start(char *, char *);
probe object__create__done(char *, char *);
/* XXX must use unsigned longs here instead of uintptr_t for OS X
(Apple radar: 5194316 & 5565198) */
probe object__create(char *, char *, unsigned long, int);
probe object__create(char *, unsigned long);
probe object__finalize(char *, char *, unsigned long);
probe execute__start(char *, int);
probe execute__done(char *, int);
@ -68,6 +55,5 @@ provider javascript {
#pragma D attributes Private/Private/Unknown provider mozilla module
#pragma D attributes Private/Private/Unknown provider mozilla function
#pragma D attributes Unstable/Unstable/Common provider mozilla name
#pragma D attributes Unstable/Unstable/Common provider mozilla args
*/

Просмотреть файл

@ -83,8 +83,9 @@
#undef JS_INTPTR_TYPE
#undef JS_BYTES_PER_WORD
/* Some mozilla code uses JS-friend APIs that depend on JS_TRACER being
correct. */
/* Some mozilla code uses JS-friend APIs that depend on JS_TRACER and
JS_METHODJIT being correct. */
#undef JS_TRACER
#undef JS_METHODJIT
#endif /* js_config_h___ */

Просмотреть файл

@ -335,3 +335,5 @@ MSG_DEF(JSMSG_THROW_TYPE_ERROR, 252, 0, JSEXN_TYPEERR, "'caller', 'callee'
MSG_DEF(JSMSG_BAD_TOISOSTRING_PROP, 253, 0, JSEXN_TYPEERR, "toISOString property is not callable")
MSG_DEF(JSMSG_BAD_PARSE_NODE, 254, 0, JSEXN_INTERNALERR, "bad parse node")
MSG_DEF(JSMSG_NOT_EXPECTED_TYPE, 255, 3, JSEXN_TYPEERR, "{0}: expected {1}, got {2}")
MSG_DEF(JSMSG_CALLER_IS_STRICT, 256, 0, JSEXN_TYPEERR, "access to strict mode caller function is censored")
MSG_DEF(JSMSG_NEED_DEBUG_MODE, 257, 0, JSEXN_ERR, "function can be called only in debug mode")

Просмотреть файл

@ -16,8 +16,12 @@ static JSBool test_prop_get( JSContext *cx, JSObject *obj, jsid id, jsval *vp )
}
static JSBool
PTest(JSContext* cx, JSObject* obj, uintN argc, jsval *argv, jsval* rval)
PTest(JSContext* cx, uintN argc, jsval *vp)
{
JSObject *obj = JS_NewObjectForConstructor(cx, vp);
if (!obj)
return JS_FALSE;
JS_SET_RVAL(cx, vp, OBJECT_TO_JSVAL(obj));
return JS_TRUE;
}
@ -36,14 +40,14 @@ static JSClass ptestClass = {
JSCLASS_NO_OPTIONAL_MEMBERS
};
static JSBool test_fn(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
static JSBool test_fn(JSContext *cx, uintN argc, jsval *vp)
{
called_test_fn++;
return JS_TRUE;
}
static JSFunctionSpec ptestFunctions[] = {
JS_FS( "test_fn", test_fn, 0, 0, 0 ),
JS_FS( "test_fn", test_fn, 0, 0 ),
JS_FS_END
};

Просмотреть файл

@ -31,17 +31,30 @@ BEGIN_TEST(testConservativeGC)
EVAL("var a = [];\n"
"for (var i = 0; i != 10000; ++i) {\n"
"a.push(i + 0.1, [1, 2], String(Math.sqrt(i)));\n"
"a.push(i + 0.1, [1, 2], String(Math.sqrt(i)), {a: i});\n"
"}", &tmp);
JS_GC(cx);
CHECK(!memcmp(&objCopy, JSVAL_TO_OBJECT(v2), sizeof(objCopy)));
CHECK(!memcmp(&strCopy, JSVAL_TO_STRING(v3), sizeof(strCopy)));
checkObjectFields(&objCopy, JSVAL_TO_OBJECT(v2));
CHECK(!memcmp(&strCopy, JSVAL_TO_STRING(v3), sizeof(strCopy)));
CHECK(!memcmp(&obj2Copy, obj2, sizeof(obj2Copy)));
CHECK(!memcmp(&str2Copy, str2, sizeof(str2Copy)));
checkObjectFields(&obj2Copy, obj2);
CHECK(!memcmp(&str2Copy, str2, sizeof(str2Copy)));
return true;
}
bool checkObjectFields(JSObject *savedCopy, JSObject *obj)
{
/*
* The GC can change the shape and shrink dslots so we update them before
* doing memcmp.
*/
savedCopy->objShape = obj->objShape;
savedCopy->dslots = obj->dslots;
CHECK(!memcmp(savedCopy, obj, sizeof(*obj)));
return true;
}
END_TEST(testConservativeGC)

Просмотреть файл

@ -11,7 +11,7 @@ BEGIN_TEST(testContexts_IsRunning)
return true;
}
static JSBool chk(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
static JSBool chk(JSContext *cx, uintN argc, jsval *vp)
{
JSRuntime *rt = JS_GetRuntime(cx);
JSContext *acx = JS_NewContext(rt, 8192);

Просмотреть файл

@ -6,7 +6,7 @@
#include "jsxdrapi.h"
static JSBool
native(JSContext *cx, JSObject *obj, uintN argc, jsval *argv, jsval *rval)
native(JSContext *cx, uintN argc, jsval *vp)
{
return JS_TRUE;
}

Просмотреть файл

@ -37,7 +37,7 @@ NativeFrameCleaner()
{
char buffer[1 << 16];
memset(buffer, 0, sizeof buffer);
ptrSink = buffer;
ptrSink = buffer;
}
BEGIN_TEST(testIsAboutToBeFinalized_bug528645)
@ -83,7 +83,7 @@ cls_testIsAboutToBeFinalized_bug528645::createAndTestRooted()
* Make sure to include unit and numeric strings to the set.
*/
EVAL("var x = 1.1; "
"[''+x, 'a', '42', 'something'.substring(1), "
"[''+x, 'a', '123456789', 'something'.substring(1), "
"{}, [], new Function('return 10;'), <xml/>];",
root.addr());
@ -117,6 +117,20 @@ cls_testIsAboutToBeFinalized_bug528645::createAndTestRooted()
*/
for (jsuint i = 0; i != checkPointersLength; ++i)
CHECK(checkPointers[i]);
/*
* Overwrite the registers and stack with new GC things to avoid false
* positives with the finalization test.
*/
EVAL("[]", root.addr());
array = JSVAL_TO_OBJECT(root.value());
JS_ASSERT(JS_IsArrayObject(cx, array));
jsuint tmp;
CHECK(JS_GetArrayLength(cx, array, &tmp));
CHECK(ok);
return true;
}

Просмотреть файл

@ -8,15 +8,17 @@ const size_t N = 1000;
static jsval argv[N];
static JSBool
constructHook(JSContext *cx, JSObject *thisobj, uintN argc, jsval *argv, jsval *rval)
constructHook(JSContext *cx, uintN argc, jsval *vp)
{
// Check that arguments were passed properly from JS_New.
JSObject *callee = JSVAL_TO_OBJECT(JS_ARGV_CALLEE(argv));
if (!thisobj) {
JS_ReportError(cx, "test failed, null 'this'");
JSObject *callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
JSObject *obj = JS_NewObjectForConstructor(cx, vp);
if (!obj) {
JS_ReportError(cx, "test failed, could not construct object");
return false;
}
if (strcmp(JS_GET_CLASS(cx, thisobj)->name, "Object") != 0) {
if (strcmp(JS_GET_CLASS(cx, obj)->name, "Object") != 0) {
JS_ReportError(cx, "test failed, wrong class for 'this'");
return false;
}
@ -28,7 +30,7 @@ constructHook(JSContext *cx, JSObject *thisobj, uintN argc, jsval *argv, jsval *
JS_ReportError(cx, "test failed, wrong value in argv[2]");
return false;
}
if (!JS_IsConstructing(cx)) {
if (!JS_IsConstructing(cx, vp)) {
JS_ReportError(cx, "test failed, not constructing");
return false;
}
@ -37,7 +39,7 @@ constructHook(JSContext *cx, JSObject *thisobj, uintN argc, jsval *argv, jsval *
if (!JS_SetElement(cx, callee, 0, &argv[0]))
return false;
*rval = OBJECT_TO_JSVAL(callee); // return the callee, perversely
*vp = OBJECT_TO_JSVAL(obj);
argv[0] = argv[1] = argv[2] = JSVAL_VOID; // trash the argv, perversely
return true;
}
@ -91,11 +93,8 @@ BEGIN_TEST(testNewObject_1)
jsvalRoot rt2(cx, OBJECT_TO_JSVAL(ctor));
obj = JS_New(cx, ctor, 3, argv);
CHECK(obj);
CHECK(obj == ctor); // constructHook returns ctor, perversely
CHECK(JS_GetElement(cx, ctor, 0, &v));
CHECK_SAME(v, JSVAL_ZERO);
CHECK_SAME(argv[0], JSVAL_ZERO); // original argv should not have been trashed
CHECK_SAME(argv[1], JSVAL_ONE);
return true;
}
END_TEST(testNewObject_1)

Просмотреть файл

@ -23,14 +23,14 @@ static JSClass myClass = {
};
static JSBool
createMyObject(JSContext* context, JSObject* obj, uintN argc, jsval *argv, jsval* rval)
createMyObject(JSContext* context, uintN argc, jsval *vp)
{
JS_BeginRequest(context);
//JS_GC(context); //<- if we make GC here, all is ok
JSObject* myObject = JS_NewObject(context, &myClass, NULL, NULL);
*rval = OBJECT_TO_JSVAL(myObject);
*vp = OBJECT_TO_JSVAL(myObject);
JS_EndRequest(context);
@ -40,7 +40,7 @@ createMyObject(JSContext* context, JSObject* obj, uintN argc, jsval *argv, jsval
static JSFunctionSpec s_functions[] =
{
{ "createMyObject", createMyObject, 0 },
{ 0,0,0,0,0 }
{ 0,0,0,0 }
};
BEGIN_TEST(testOps_bug559006)

Просмотреть файл

@ -45,6 +45,9 @@ BEGIN_TEST(testTrap_gc)
// Disable JIT for debugging
JS_SetOptions(cx, JS_GetOptions(cx) & ~JSOPTION_JIT);
// Enable debug mode
CHECK(JS_SetDebugMode(cx, JS_TRUE));
jsbytecode *line2 = JS_LineNumberToPC(cx, script, 1);
CHECK(line2);

Просмотреть файл

@ -80,7 +80,6 @@
#include "jsscope.h"
#include "jsscript.h"
#include "jsstr.h"
#include "jstask.h"
#include "jstracer.h"
#include "jsdbgapi.h"
#include "prmjtime.h"
@ -115,8 +114,9 @@ using namespace js;
#endif
#ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES
JS_PUBLIC_DATA(jsid) JS_DEFAULT_XML_NAMESPACE_ID = { (size_t)JSID_TYPE_DEFAULT_XML_NAMESPACE };
JS_PUBLIC_DATA(jsid) JSID_VOID = { (size_t)JSID_TYPE_VOID };
JS_PUBLIC_DATA(jsid) JS_DEFAULT_XML_NAMESPACE_ID = { size_t(JSID_TYPE_DEFAULT_XML_NAMESPACE) };
JS_PUBLIC_DATA(jsid) JSID_VOID = { size_t(JSID_TYPE_VOID) };
JS_PUBLIC_DATA(jsid) JSID_EMPTY = { size_t(JSID_TYPE_OBJECT) };
#endif
#ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES
@ -576,6 +576,12 @@ JSRuntime::init(uint32 maxbytes)
if (!unjoinedFunctionCountMap.init())
return false;
}
propTreeStatFilename = getenv("JS_PROPTREE_STATFILE");
propTreeDumpFilename = getenv("JS_PROPTREE_DUMPFILE");
if (meterEmptyShapes()) {
if (!emptyShapes.init())
return false;
}
#endif
if (!(defaultCompartment = new JSCompartment(this)) ||
@ -600,15 +606,6 @@ JSRuntime::init(uint32 maxbytes)
wrapObjectCallback = js::TransparentObjectWrapper;
#ifdef JS_THREADSAFE
gcLock = JS_NEW_LOCK();
if (!gcLock)
return false;
gcDone = JS_NEW_CONDVAR(gcLock);
if (!gcDone)
return false;
requestDone = JS_NEW_CONDVAR(gcLock);
if (!requestDone)
return false;
/* this is asymmetric with JS_ShutDown: */
if (!js_SetupLocks(8, 16))
return false;
@ -785,31 +782,15 @@ JS_SetRuntimePrivate(JSRuntime *rt, void *data)
rt->data = data;
}
JS_PUBLIC_API(void)
JS_BeginRequest(JSContext *cx)
{
#ifdef JS_THREADSAFE
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread));
JS_ASSERT(cx->requestDepth <= cx->outstandingRequests);
if (cx->requestDepth) {
JS_ASSERT(cx->thread->requestContext == cx);
cx->requestDepth++;
cx->outstandingRequests++;
} else if (JSContext *old = cx->thread->requestContext) {
JS_ASSERT(!cx->prevRequestContext);
JS_ASSERT(cx->prevRequestDepth == 0);
JS_ASSERT(old != cx);
JS_ASSERT(old->requestDepth != 0);
JS_ASSERT(old->requestDepth <= old->outstandingRequests);
/* Serialize access to JSContext::requestDepth from other threads. */
AutoLockGC lock(cx->runtime);
cx->prevRequestContext = old;
cx->prevRequestDepth = old->requestDepth;
cx->requestDepth = 1;
cx->outstandingRequests++;
old->requestDepth = 0;
cx->thread->requestContext = cx;
static void
StartRequest(JSContext *cx)
{
JSThread *t = cx->thread;
JS_ASSERT(CURRENT_THREAD_IS_ME(t));
if (t->requestDepth) {
t->requestDepth++;
} else {
JSRuntime *rt = cx->runtime;
AutoLockGC lock(rt);
@ -821,55 +802,32 @@ JS_BeginRequest(JSContext *cx)
}
/* Indicate that a request is running. */
cx->requestDepth = 1;
cx->outstandingRequests++;
cx->thread->requestContext = cx;
rt->requestCount++;
t->requestDepth = 1;
if (rt->requestCount == 1 && rt->activityCallback)
rt->activityCallback(rt->activityCallbackArg, true);
}
#endif
}
#ifdef JS_THREADSAFE
static void
StopRequest(JSContext *cx)
{
JSRuntime *rt;
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread));
JS_ASSERT(cx->requestDepth > 0);
JS_ASSERT(cx->outstandingRequests >= cx->requestDepth);
JS_ASSERT(cx->thread->requestContext == cx);
if (cx->requestDepth >= 2) {
cx->requestDepth--;
cx->outstandingRequests--;
} else if (JSContext *old = cx->prevRequestContext) {
JS_ASSERT(cx != old);
JS_ASSERT(old->requestDepth == 0);
JS_ASSERT(old->outstandingRequests >= cx->prevRequestDepth);
/* Serialize access to JSContext::requestDepth from other threads. */
AutoLockGC lock(cx->runtime);
cx->outstandingRequests--;
cx->requestDepth = 0;
old->requestDepth = cx->prevRequestDepth;
cx->prevRequestContext = NULL;
cx->prevRequestDepth = 0;
cx->thread->requestContext = old;
JSThread *t = cx->thread;
JS_ASSERT(CURRENT_THREAD_IS_ME(t));
JS_ASSERT(t->requestDepth != 0);
if (t->requestDepth != 1) {
t->requestDepth--;
} else {
JS_ASSERT(cx->prevRequestDepth == 0);
LeaveTrace(cx); /* for GC safety */
t->data.conservativeGC.updateForRequestEnd(t->suspendCount);
/* Lock before clearing to interlock with ClaimScope, in jslock.c. */
rt = cx->runtime;
JSRuntime *rt = cx->runtime;
AutoLockGC lock(rt);
cx->requestDepth = 0;
cx->outstandingRequests--;
cx->thread->requestContext = NULL;
t->requestDepth = 0;
js_ShareWaitingTitles(cx);
@ -883,19 +841,23 @@ StopRequest(JSContext *cx)
}
}
}
#endif /* JS_THREADSAFE */
JS_PUBLIC_API(void)
JS_BeginRequest(JSContext *cx)
{
#ifdef JS_THREADSAFE
cx->outstandingRequests++;
StartRequest(cx);
#endif
}
JS_PUBLIC_API(void)
JS_EndRequest(JSContext *cx)
{
#ifdef JS_THREADSAFE
/*
* We do not allow to use JS_EndRequest to exit the request when there are
* native frames on the stack that insist that the request must be on. But
* we do allow to call the API if the request was suspended.
*/
JS_ASSERT_IF(cx->requestDepth == 1 && cx->outstandingRequests == 1,
cx->checkRequestDepth == 0);
JS_ASSERT(cx->outstandingRequests != 0);
cx->outstandingRequests--;
StopRequest(cx);
#endif
}
@ -905,11 +867,7 @@ JS_PUBLIC_API(void)
JS_YieldRequest(JSContext *cx)
{
#ifdef JS_THREADSAFE
JS_ASSERT(cx->thread);
CHECK_REQUEST(cx);
cx = cx->thread->requestContext;
if (!cx)
return;
JS_ResumeRequest(cx, JS_SuspendRequest(cx));
#endif
}
@ -918,16 +876,16 @@ JS_PUBLIC_API(jsrefcount)
JS_SuspendRequest(JSContext *cx)
{
#ifdef JS_THREADSAFE
jsrefcount saveDepth = cx->requestDepth;
if (saveDepth == 0)
JSThread *t = cx->thread;
JS_ASSERT(CURRENT_THREAD_IS_ME(t));
jsrefcount saveDepth = t->requestDepth;
if (!saveDepth)
return 0;
JS_THREAD_DATA(cx)->conservativeGC.enable();
do {
cx->outstandingRequests++; /* compensate for StopRequest */
StopRequest(cx);
} while (cx->requestDepth);
t->suspendCount++;
t->requestDepth = 1;
StopRequest(cx);
return saveDepth;
#else
return 0;
@ -938,15 +896,16 @@ JS_PUBLIC_API(void)
JS_ResumeRequest(JSContext *cx, jsrefcount saveDepth)
{
#ifdef JS_THREADSAFE
JSThread *t = cx->thread;
JS_ASSERT(CURRENT_THREAD_IS_ME(t));
if (saveDepth == 0)
return;
JS_ASSERT(cx->outstandingRequests != 0);
do {
JS_BeginRequest(cx);
cx->outstandingRequests--; /* compensate for JS_BeginRequest */
} while (--saveDepth != 0);
JS_THREAD_DATA(cx)->conservativeGC.disable();
JS_ASSERT(saveDepth >= 1);
JS_ASSERT(!t->requestDepth);
JS_ASSERT(t->suspendCount);
StartRequest(cx);
t->requestDepth = saveDepth;
t->suspendCount--;
#endif
}
@ -1610,9 +1569,8 @@ static JSBool
AlreadyHasOwnProperty(JSContext *cx, JSObject *obj, JSAtom *atom)
{
JS_LOCK_OBJ(cx, obj);
JSScope *scope = obj->scope();
bool found = scope->hasProperty(ATOM_TO_JSID(atom));
JS_UNLOCK_SCOPE(cx, scope);
bool found = obj->nativeContains(ATOM_TO_JSID(atom));
JS_UNLOCK_OBJ(cx, obj);
return found;
}
@ -1878,7 +1836,7 @@ JS_free(JSContext *cx, void *p)
JS_PUBLIC_API(void)
JS_updateMallocCounter(JSContext *cx, size_t nbytes)
{
return cx->updateMallocCounter(nbytes);
return cx->runtime->updateMallocCounter(nbytes);
}
JS_PUBLIC_API(char *)
@ -2049,8 +2007,7 @@ JS_SetExtraGCRoots(JSRuntime *rt, JSTraceDataOp traceOp, void *data)
JS_PUBLIC_API(void)
JS_TraceRuntime(JSTracer *trc)
{
LeaveTrace(trc->context);
js_TraceRuntime(trc);
TraceRuntime(trc);
}
JS_PUBLIC_API(void)
@ -2137,8 +2094,7 @@ JS_PrintTraceThingInfo(char *buf, size_t bufsize, JSTracer *trc, void *thing, ui
JS_snprintf(buf, bufsize, "%p", fun);
} else {
if (fun->atom)
js_PutEscapedString(buf, bufsize,
ATOM_TO_STRING(fun->atom), 0);
js_PutEscapedString(buf, bufsize, ATOM_TO_STRING(fun->atom), 0);
}
} else if (clasp->flags & JSCLASS_HAS_PRIVATE) {
JS_snprintf(buf, bufsize, "%p", obj->getPrivate());
@ -2366,7 +2322,7 @@ JS_DumpHeap(JSContext *cx, FILE *fp, void* startThing, uint32 startKind,
dtrc.lastNodep = &node;
if (!startThing) {
JS_ASSERT(startKind == 0);
JS_TraceRuntime(&dtrc.base);
TraceRuntime(&dtrc.base);
} else {
JS_TraceChildren(&dtrc.base, startThing, startKind);
}
@ -2648,7 +2604,7 @@ JS_NewExternalString(JSContext *cx, jschar *chars, size_t length, intN type)
if (!str)
return NULL;
str->initFlat(chars, length);
cx->updateMallocCounter((length + 1) * sizeof(jschar));
cx->runtime->updateMallocCounter((length + 1) * sizeof(jschar));
return str;
}
@ -2963,73 +2919,62 @@ JS_NewObjectWithGivenProto(JSContext *cx, JSClass *jsclasp, JSObject *proto, JSO
return NewNonFunction<WithProto::Given>(cx, clasp, proto, parent);
}
JS_PUBLIC_API(JSObject *)
JS_NewObjectForConstructor(JSContext *cx, const jsval *vp)
{
CHECK_REQUEST(cx);
assertSameCompartment(cx, *vp);
return js_NewInstance(cx, JSVAL_TO_OBJECT(*vp));
}
JS_PUBLIC_API(JSBool)
JS_SealObject(JSContext *cx, JSObject *obj, JSBool deep)
{
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj);
JSScope *scope;
JSIdArray *ida;
uint32 nslots, i;
/* Nothing to do if obj is already sealed. */
if (obj->sealed())
return true;
if (obj->isDenseArray() && !obj->makeDenseArraySlow(cx))
return JS_FALSE;
return false;
if (!obj->isNative()) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
JSMSG_CANT_SEAL_OBJECT,
obj->getClass()->name);
return JS_FALSE;
return false;
}
scope = obj->scope();
#if defined JS_THREADSAFE && defined DEBUG
#ifdef JS_THREADSAFE
/* Insist on scope being used exclusively by cx's thread. */
if (scope->title.ownercx != cx) {
JS_LOCK_OBJ(cx, obj);
JS_ASSERT(obj->scope() == scope);
JS_ASSERT(scope->title.ownercx == cx);
JS_UNLOCK_SCOPE(cx, scope);
}
JS_ASSERT(obj->title.ownercx == cx);
#endif
/* Nothing to do if obj's scope is already sealed. */
if (scope->sealed())
return JS_TRUE;
/* XXX Enumerate lazy properties now, as they can't be added later. */
ida = JS_Enumerate(cx, obj);
JSIdArray *ida = JS_Enumerate(cx, obj);
if (!ida)
return JS_FALSE;
return false;
JS_DestroyIdArray(cx, ida);
/* Ensure that obj has its own, mutable scope, and seal that scope. */
JS_LOCK_OBJ(cx, obj);
scope = js_GetMutableScope(cx, obj);
if (scope)
scope->seal(cx);
JS_UNLOCK_OBJ(cx, obj);
if (!scope)
return JS_FALSE;
/* If we are not sealing an entire object graph, we're done. */
/* If not sealing an entire object graph, we're done after sealing obj. */
obj->seal(cx);
if (!deep)
return JS_TRUE;
return true;
/* Walk slots in obj and if any value is a non-null object, seal it. */
nslots = scope->freeslot;
for (i = 0; i != nslots; ++i) {
for (uint32 i = 0, n = obj->freeslot; i != n; ++i) {
const Value &v = obj->getSlot(i);
if (i == JSSLOT_PRIVATE && (obj->getClass()->flags & JSCLASS_HAS_PRIVATE))
continue;
if (v.isPrimitive())
continue;
if (!JS_SealObject(cx, &v.toObject(), deep))
return JS_FALSE;
return false;
}
return JS_TRUE;
return true;
}
JS_PUBLIC_API(JSObject *)
@ -3080,18 +3025,18 @@ LookupResult(JSContext *cx, JSObject *obj, JSObject *obj2, jsid id,
}
if (obj2->isNative()) {
JSScopeProperty *sprop = (JSScopeProperty *) prop;
Shape *shape = (Shape *) prop;
if (sprop->isMethod()) {
AutoScopePropertyRooter root(cx, sprop);
if (shape->isMethod()) {
AutoShapeRooter root(cx, shape);
JS_UNLOCK_OBJ(cx, obj2);
vp->setObject(sprop->methodObject());
return obj2->scope()->methodReadBarrier(cx, sprop, vp);
vp->setObject(shape->methodObject());
return obj2->methodReadBarrier(cx, *shape, vp);
}
/* Peek at the native property's slot value, without doing a Get. */
if (SPROP_HAS_VALID_SLOT(sprop, obj2->scope()))
*vp = obj2->lockedGetSlot(sprop->slot);
if (obj2->containsSlot(shape->slot))
*vp = obj2->lockedGetSlot(shape->slot);
else
vp->setBoolean(true);
JS_UNLOCK_OBJ(cx, obj2);
@ -3216,9 +3161,8 @@ JS_AlreadyHasOwnPropertyById(JSContext *cx, JSObject *obj, jsid id, JSBool *foun
}
JS_LOCK_OBJ(cx, obj);
JSScope *scope = obj->scope();
*foundp = scope->hasProperty(id);
JS_UNLOCK_SCOPE(cx, scope);
*foundp = obj->nativeContains(id);
JS_UNLOCK_OBJ(cx, obj);
return JS_TRUE;
}
@ -3315,7 +3259,7 @@ JS_DefinePropertyWithTinyId(JSContext *cx, JSObject *obj, const char *name, int8
jsval value, JSPropertyOp getter, JSPropertyOp setter, uintN attrs)
{
return DefineProperty(cx, obj, name, Valueify(value), Valueify(getter),
Valueify(setter), attrs, JSScopeProperty::HAS_SHORTID, tinyid);
Valueify(setter), attrs, Shape::HAS_SHORTID, tinyid);
}
static JSBool
@ -3342,7 +3286,7 @@ JS_DefineUCPropertyWithTinyId(JSContext *cx, JSObject *obj, const jschar *name,
uintN attrs)
{
return DefineUCProperty(cx, obj, name, namelen, Valueify(value), Valueify(getter),
Valueify(setter), attrs, JSScopeProperty::HAS_SHORTID, tinyid);
Valueify(setter), attrs, Shape::HAS_SHORTID, tinyid);
}
JS_PUBLIC_API(JSBool)
@ -3401,7 +3345,7 @@ JS_DefineProperties(JSContext *cx, JSObject *obj, JSPropertySpec *ps)
for (ok = true; ps->name; ps++) {
ok = DefineProperty(cx, obj, ps->name, UndefinedValue(),
Valueify(ps->getter), Valueify(ps->setter),
ps->flags, JSScopeProperty::HAS_SHORTID, ps->tinyid);
ps->flags, Shape::HAS_SHORTID, ps->tinyid);
if (!ok)
break;
}
@ -3414,7 +3358,7 @@ JS_AliasProperty(JSContext *cx, JSObject *obj, const char *name, const char *ali
JSObject *obj2;
JSProperty *prop;
JSBool ok;
JSScopeProperty *sprop;
Shape *shape;
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj);
@ -3438,11 +3382,11 @@ JS_AliasProperty(JSContext *cx, JSObject *obj, const char *name, const char *ali
if (!atom) {
ok = JS_FALSE;
} else {
sprop = (JSScopeProperty *)prop;
shape = (Shape *)prop;
ok = (js_AddNativeProperty(cx, obj, ATOM_TO_JSID(atom),
sprop->getter(), sprop->setter(), sprop->slot,
sprop->attributes(), sprop->getFlags() | JSScopeProperty::ALIAS,
sprop->shortid)
shape->getter(), shape->setter(), shape->slot,
shape->attributes(), shape->getFlags() | Shape::ALIAS,
shape->shortid)
!= NULL);
}
JS_UNLOCK_OBJ(cx, obj);
@ -3454,7 +3398,7 @@ JS_AliasElement(JSContext *cx, JSObject *obj, const char *name, jsint alias)
{
JSObject *obj2;
JSProperty *prop;
JSScopeProperty *sprop;
Shape *shape;
JSBool ok;
CHECK_REQUEST(cx);
@ -3477,11 +3421,11 @@ JS_AliasElement(JSContext *cx, JSObject *obj, const char *name, jsint alias)
numBuf, name, obj2->getClass()->name);
return JS_FALSE;
}
sprop = (JSScopeProperty *)prop;
shape = (Shape *)prop;
ok = (js_AddNativeProperty(cx, obj, INT_TO_JSID(alias),
sprop->getter(), sprop->setter(), sprop->slot,
sprop->attributes(), sprop->getFlags() | JSScopeProperty::ALIAS,
sprop->shortid)
shape->getter(), shape->setter(), shape->slot,
shape->attributes(), shape->getFlags() | Shape::ALIAS,
shape->shortid)
!= NULL);
JS_UNLOCK_OBJ(cx, obj);
return ok;
@ -3510,27 +3454,28 @@ GetPropertyDescriptorById(JSContext *cx, JSObject *obj, jsid id, uintN flags,
desc->obj = obj2;
if (obj2->isNative()) {
JSScopeProperty *sprop = (JSScopeProperty *) prop;
desc->attrs = sprop->attributes();
Shape *shape = (Shape *) prop;
desc->attrs = shape->attributes();
if (sprop->isMethod()) {
if (shape->isMethod()) {
desc->getter = desc->setter = PropertyStub;
desc->value.setObject(sprop->methodObject());
desc->value.setObject(shape->methodObject());
} else {
desc->getter = sprop->getter();
desc->setter = sprop->setter();
if (SPROP_HAS_VALID_SLOT(sprop, obj2->scope()))
desc->value = obj2->lockedGetSlot(sprop->slot);
desc->getter = shape->getter();
desc->setter = shape->setter();
if (obj2->containsSlot(shape->slot))
desc->value = obj2->lockedGetSlot(shape->slot);
else
desc->value.setUndefined();
}
JS_UNLOCK_OBJ(cx, obj2);
} else if (obj2->isProxy()) {
JSAutoResolveFlags rf(cx, flags);
return own
? JSProxy::getOwnPropertyDescriptor(cx, obj2, id, desc)
: JSProxy::getPropertyDescriptor(cx, obj2, id, desc);
} else {
if (obj2->isProxy()) {
JSAutoResolveFlags rf(cx, flags);
return own
? JSProxy::getOwnPropertyDescriptor(cx, obj2, id, desc)
: JSProxy::getPropertyDescriptor(cx, obj2, id, desc);
}
if (!obj2->getAttributes(cx, id, &desc->attrs))
return false;
desc->getter = NULL;
@ -3626,7 +3571,7 @@ SetPropertyAttributesById(JSContext *cx, JSObject *obj, jsid id, uintN attrs, JS
return true;
}
JSBool ok = obj->isNative()
? js_SetNativeAttributes(cx, obj, (JSScopeProperty *) prop, attrs)
? js_SetNativeAttributes(cx, obj, (Shape *) prop, attrs)
: obj->setAttributes(cx, id, &attrs);
if (ok)
*foundp = true;
@ -3819,7 +3764,7 @@ JS_Enumerate(JSContext *cx, JSObject *obj)
* XXX reverse iterator for properties, unreverse and meld with jsinterp.c's
* prop_iterator_class somehow...
* + preserve the obj->enumerate API while optimizing the native object case
* + native case here uses a JSScopeProperty *, but that iterates in reverse!
* + native case here uses a Shape *, but that iterates in reverse!
* + so we make non-native match, by reverse-iterating after JS_Enumerating
*/
const uint32 JSSLOT_ITER_INDEX = JSSLOT_PRIVATE + 1;
@ -3848,7 +3793,7 @@ prop_iter_trace(JSTracer *trc, JSObject *obj)
if (obj->fslots[JSSLOT_ITER_INDEX].toInt32() < 0) {
/* Native case: just mark the next property to visit. */
((JSScopeProperty *) pdata)->trace(trc);
((Shape *) pdata)->trace(trc);
} else {
/* Non-native case: mark each id in the JSIdArray private. */
JSIdArray *ida = (JSIdArray *) pdata;
@ -3881,8 +3826,7 @@ JS_PUBLIC_API(JSObject *)
JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
{
JSObject *iterobj;
JSScope *scope;
void *pdata;
const void *pdata;
jsint index;
JSIdArray *ida;
@ -3893,9 +3837,8 @@ JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
return NULL;
if (obj->isNative()) {
/* Native case: start with the last property in obj's own scope. */
scope = obj->scope();
pdata = scope->lastProperty();
/* Native case: start with the last property in obj. */
pdata = obj->lastProperty();
index = -1;
} else {
/*
@ -3913,7 +3856,7 @@ JS_NewPropertyIterator(JSContext *cx, JSObject *obj)
}
/* iterobj cannot escape to other threads here. */
iterobj->setPrivate(pdata);
iterobj->setPrivate(const_cast<void *>(pdata));
iterobj->fslots[JSSLOT_ITER_INDEX].setInt32(index);
return iterobj;
}
@ -3923,7 +3866,7 @@ JS_NextProperty(JSContext *cx, JSObject *iterobj, jsid *idp)
{
jsint i;
JSObject *obj;
JSScopeProperty *sprop;
const Shape *shape;
JSIdArray *ida;
CHECK_REQUEST(cx);
@ -3933,21 +3876,22 @@ JS_NextProperty(JSContext *cx, JSObject *iterobj, jsid *idp)
/* Native case: private data is a property tree node pointer. */
obj = iterobj->getParent();
JS_ASSERT(obj->isNative());
sprop = (JSScopeProperty *) iterobj->getPrivate();
shape = (Shape *) iterobj->getPrivate();
/*
* If the next property in the property tree ancestor line is
* not enumerable, or it's an alias, skip it and keep on trying
* to find an enumerable property that is still in scope.
* If the next property mapped by obj in the property tree ancestor
* line is not enumerable, or it's an alias, skip it and keep on trying
* to find an enumerable property that is still in obj.
*/
while (sprop && (!sprop->enumerable() || sprop->isAlias()))
sprop = sprop->parent;
while (shape->previous() && (!shape->enumerable() || shape->isAlias()))
shape = shape->previous();
if (!sprop) {
if (!shape->previous()) {
JS_ASSERT(JSID_IS_EMPTY(shape->id));
*idp = JSID_VOID;
} else {
iterobj->setPrivate(sprop->parent);
*idp = sprop->id;
iterobj->setPrivate(const_cast<Shape *>(shape->previous()));
*idp = shape->id;
}
} else {
/* Non-native case: use the ida enumerated when iterobj was created. */
@ -4122,9 +4066,8 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
}
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
JSObject *clone = CloneFunctionObject(cx, fun, parent);
if (!clone)
return NULL;
if (!FUN_FLAT_CLOSURE(fun))
return CloneFunctionObject(cx, fun, parent);
/*
* A flat closure carries its own environment, so why clone it? In case
@ -4138,42 +4081,27 @@ JS_CloneFunctionObject(JSContext *cx, JSObject *funobj, JSObject *parent)
* they were activations, respecting the skip field in each upvar's cookie
* but looking up the property by name instead of frame slot.
*/
if (FUN_FLAT_CLOSURE(fun)) {
JS_ASSERT(funobj->dslots);
if (!js_EnsureReservedSlots(cx, clone,
fun->countInterpretedReservedSlots())) {
return NULL;
}
JSObject *clone = js_AllocFlatClosure(cx, fun, parent);
if (!clone)
return NULL;
JSUpvarArray *uva = fun->u.i.script->upvars();
JS_ASSERT(uva->length <= clone->dslots[-1].toPrivateUint32());
JSUpvarArray *uva = fun->u.i.script->upvars();
uint32 i = uva->length;
JS_ASSERT(i != 0);
void *mark = JS_ARENA_MARK(&cx->tempPool);
jsuword *names = js_GetLocalNameArray(cx, fun, &cx->tempPool);
if (!names)
return NULL;
uint32 i = 0, n = uva->length;
for (; i < n; i++) {
JSObject *obj = parent;
int skip = uva->vector[i].level();
while (--skip > 0) {
if (!obj) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
JSMSG_BAD_CLONE_FUNOBJ_SCOPE);
goto break2;
}
obj = obj->getParent();
for (Shape::Range r(fun->lastUpvar()); i-- != 0; r.popFront()) {
JSObject *obj = parent;
int skip = uva->vector[i].level();
while (--skip > 0) {
if (!obj) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
JSMSG_BAD_CLONE_FUNOBJ_SCOPE);
return NULL;
}
JSAtom *atom = JS_LOCAL_NAME_TO_ATOM(names[i]);
if (!obj->getProperty(cx, ATOM_TO_JSID(atom), &clone->dslots[i]))
break;
obj = obj->getParent();
}
break2:
JS_ARENA_RELEASE(&cx->tempPool, mark);
if (i < n)
if (!obj->getProperty(cx, r.front().id, clone->getFlatClosureUpvars() + i))
return NULL;
}
@ -4219,14 +4147,14 @@ JS_ObjectIsFunction(JSContext *cx, JSObject *obj)
}
static JSBool
js_generic_fast_native_method_dispatcher(JSContext *cx, uintN argc, Value *vp)
js_generic_native_method_dispatcher(JSContext *cx, uintN argc, Value *vp)
{
JSFunctionSpec *fs;
JSObject *tmp;
FastNative native;
Native native;
fs = (JSFunctionSpec *) vp->toObject().getReservedSlot(0).toPrivate();
JS_ASSERT((~fs->flags & (JSFUN_FAST_NATIVE | JSFUN_GENERIC_NATIVE)) == 0);
JS_ASSERT((fs->flags & JSFUN_GENERIC_NATIVE) != 0);
if (argc < 1) {
js_ReportMissingArg(cx, *vp, 0);
@ -4264,62 +4192,13 @@ js_generic_fast_native_method_dispatcher(JSContext *cx, uintN argc, Value *vp)
native =
#ifdef JS_TRACER
(fs->flags & JSFUN_TRCINFO)
? (FastNative) JS_FUNC_TO_DATA_PTR(JSNativeTraceInfo *, fs->call)->native
? JS_FUNC_TO_DATA_PTR(JSNativeTraceInfo *, fs->call)->native
:
#endif
(FastNative) fs->call;
Valueify(fs->call);
return native(cx, argc, vp);
}
static JSBool
js_generic_native_method_dispatcher(JSContext *cx, JSObject *obj,
uintN argc, Value *argv, Value *rval)
{
JSFunctionSpec *fs;
JSObject *tmp;
fs = (JSFunctionSpec *) argv[-2].toObject().getReservedSlot(0).toPrivate();
JS_ASSERT((fs->flags & (JSFUN_FAST_NATIVE | JSFUN_GENERIC_NATIVE)) ==
JSFUN_GENERIC_NATIVE);
if (argc < 1) {
js_ReportMissingArg(cx, *(argv - 2), 0);
return JS_FALSE;
}
if (argv[0].isPrimitive()) {
/*
* Make sure that this is an object or null, as required by the generic
* functions.
*/
if (!js_ValueToObjectOrNull(cx, argv[0], &tmp))
return JS_FALSE;
argv[0].setObjectOrNull(tmp);
}
/*
* Copy all actual (argc) arguments down over our |this| parameter,
* argv[-1], which is almost always the class constructor object, e.g.
* Array. Then call the corresponding prototype native method with our
* first argument passed as |this|.
*/
memmove(argv - 1, argv, argc * sizeof(jsval));
/*
* Follow Function.prototype.apply and .call by using the global object as
* the 'this' param if no args.
*/
if (!ComputeThisFromArgv(cx, argv))
return JS_FALSE;
js_GetTopStackFrame(cx)->setThisValue(argv[-1]);
JS_ASSERT(cx->fp()->argv == argv);
/* Clear the last parameter in case too few arguments were passed. */
argv[--argc].setUndefined();
return fs->call(cx, &argv[-1].toObject(), argc, Jsvalify(argv), Jsvalify(rval));
}
JS_PUBLIC_API(JSBool)
JS_DefineFunctions(JSContext *cx, JSObject *obj, JSFunctionSpec *fs)
{
@ -4346,14 +4225,11 @@ JS_DefineFunctions(JSContext *cx, JSObject *obj, JSFunctionSpec *fs)
flags &= ~JSFUN_GENERIC_NATIVE;
fun = JS_DefineFunction(cx, ctor, fs->name,
(flags & JSFUN_FAST_NATIVE)
? (JSNative) js_generic_fast_native_method_dispatcher
: Jsvalify(js_generic_native_method_dispatcher),
Jsvalify(js_generic_native_method_dispatcher),
fs->nargs + 1,
flags & ~JSFUN_TRCINFO);
if (!fun)
return JS_FALSE;
fun->u.n.extra = (uint16)fs->extra;
/*
* As jsapi.h notes, fs must point to storage that lives as long
@ -4364,12 +4240,9 @@ JS_DefineFunctions(JSContext *cx, JSObject *obj, JSFunctionSpec *fs)
return JS_FALSE;
}
JS_ASSERT(!(flags & JSFUN_FAST_NATIVE) ||
(uint16)(fs->extra >> 16) <= fs->nargs);
fun = JS_DefineFunction(cx, obj, fs->name, fs->call, fs->nargs, flags);
if (!fun)
return JS_FALSE;
fun->u.n.extra = (uint16)fs->extra;
}
return JS_TRUE;
}
@ -4648,7 +4521,7 @@ JS_CompileUCFunctionForPrincipals(JSContext *cx, JSObject *obj,
fun = NULL;
goto out2;
}
if (!js_AddLocal(cx, fun, argAtom, JSLOCAL_ARG)) {
if (!fun->addLocal(cx, argAtom, JSLOCAL_ARG)) {
fun = NULL;
goto out2;
}
@ -4842,7 +4715,7 @@ JS_CallFunction(JSContext *cx, JSObject *obj, JSFunction *fun, uintN argc, jsval
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj, fun, JSValueArray(argv, argc));
ok = InternalCall(cx, obj, ObjectValue(*fun), argc, Valueify(argv), Valueify(rval));
ok = ExternalInvoke(cx, obj, ObjectValue(*fun), argc, Valueify(argv), Valueify(rval));
LAST_FRAME_CHECKS(cx, ok);
return ok;
}
@ -4858,7 +4731,7 @@ JS_CallFunctionName(JSContext *cx, JSObject *obj, const char *name, uintN argc,
JSAtom *atom = js_Atomize(cx, name, strlen(name), 0);
JSBool ok = atom &&
js_GetMethod(cx, obj, ATOM_TO_JSID(atom), JSGET_NO_METHOD_BARRIER, tvr.addr()) &&
InternalCall(cx, obj, tvr.value(), argc, Valueify(argv), Valueify(rval));
ExternalInvoke(cx, obj, tvr.value(), argc, Valueify(argv), Valueify(rval));
LAST_FRAME_CHECKS(cx, ok);
return ok;
}
@ -4871,7 +4744,7 @@ JS_CallFunctionValue(JSContext *cx, JSObject *obj, jsval fval, uintN argc, jsval
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj, fval, JSValueArray(argv, argc));
ok = InternalCall(cx, obj, Valueify(fval), argc, Valueify(argv), Valueify(rval));
ok = ExternalInvoke(cx, obj, Valueify(fval), argc, Valueify(argv), Valueify(rval));
LAST_FRAME_CHECKS(cx, ok);
return ok;
}
@ -4942,7 +4815,10 @@ JS_TriggerOperationCallback(JSContext *cx)
JS_PUBLIC_API(void)
JS_TriggerAllOperationCallbacks(JSRuntime *rt)
{
js_TriggerAllOperationCallbacks(rt, JS_FALSE);
#ifdef JS_THREADSAFE
AutoLockGC lock(rt);
#endif
TriggerAllOperationCallbacks(rt);
}
JS_PUBLIC_API(JSBool)
@ -4964,12 +4840,6 @@ JS_IsRunning(JSContext *cx)
return fp != NULL;
}
JS_PUBLIC_API(JSBool)
JS_IsConstructing(JSContext *cx)
{
return cx->isConstructing();
}
JS_PUBLIC_API(JSStackFrame *)
JS_SaveFrameChain(JSContext *cx)
{
@ -5650,7 +5520,7 @@ JS_PUBLIC_API(jsword)
JS_SetContextThread(JSContext *cx)
{
#ifdef JS_THREADSAFE
JS_ASSERT(cx->requestDepth == 0);
JS_ASSERT(!cx->outstandingRequests);
if (cx->thread) {
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread));
return reinterpret_cast<jsword>(cx->thread->id);
@ -5672,15 +5542,15 @@ JS_ClearContextThread(JSContext *cx)
{
#ifdef JS_THREADSAFE
/*
* This must be called outside a request and, if cx is associated with a
* thread, this must be called only from that thread. If not, this is a
* harmless no-op.
* cx must have exited all requests it entered and, if cx is associated
* with a thread, this must be called only from that thread. If not, this
* is a harmless no-op.
*/
JS_ASSERT(cx->requestDepth == 0);
if (!cx->thread)
JS_ASSERT(cx->outstandingRequests == 0);
JSThread *t = cx->thread;
if (!t)
return 0;
JS_ASSERT(CURRENT_THREAD_IS_ME(cx->thread));
void *old = cx->thread->id;
JS_ASSERT(CURRENT_THREAD_IS_ME(t));
/*
* We must not race with a GC that accesses cx->thread for all threads,
@ -5690,7 +5560,13 @@ JS_ClearContextThread(JSContext *cx)
AutoLockGC lock(rt);
js_WaitForGC(rt);
js_ClearContextThread(cx);
return reinterpret_cast<jsword>(old);
JS_ASSERT_IF(JS_CLIST_IS_EMPTY(&t->contextList), !t->requestDepth);
/*
* We can access t->id as long as the GC lock is held and we cannot race
* with the GC that may delete t.
*/
return reinterpret_cast<jsword>(t->id);
#else
return 0;
#endif
@ -5722,7 +5598,7 @@ JS_SetGCZeal(JSContext *cx, uint8 zeal)
#if !defined(STATIC_EXPORTABLE_JS_API) && !defined(STATIC_JS_API) && defined(XP_WIN) && !defined (WINCE)
#include <windows.h>
#include "jswin.h"
/*
* Initialization routine for the JS DLL.

Просмотреть файл

@ -308,21 +308,22 @@ JSVAL_TO_PRIVATE(jsval v)
#define JSID_TYPE_MASK 0x7
/*
* Do not use canonical 'id' for jsid parameters since this is a magic word in
* Avoid using canonical 'id' for jsid parameters since this is a magic word in
* Objective-C++ which, apparently, wants to be able to #include jsapi.h.
*/
#define id iden
static JS_ALWAYS_INLINE JSBool
JSID_IS_STRING(jsid iden)
JSID_IS_STRING(jsid id)
{
return (JSID_BITS(iden) & JSID_TYPE_MASK) == 0;
return (JSID_BITS(id) & JSID_TYPE_MASK) == 0;
}
static JS_ALWAYS_INLINE JSString *
JSID_TO_STRING(jsid iden)
JSID_TO_STRING(jsid id)
{
JS_ASSERT(JSID_IS_STRING(iden));
return (JSString *)(JSID_BITS(iden));
JS_ASSERT(JSID_IS_STRING(id));
return (JSString *)(JSID_BITS(id));
}
JS_PUBLIC_API(JSBool)
@ -332,24 +333,24 @@ JS_StringHasBeenInterned(JSString *str);
static JS_ALWAYS_INLINE jsid
INTERNED_STRING_TO_JSID(JSString *str)
{
jsid iden;
jsid id;
JS_ASSERT(JS_StringHasBeenInterned(str));
JS_ASSERT(((size_t)str & JSID_TYPE_MASK) == 0);
JSID_BITS(iden) = (size_t)str;
return iden;
JSID_BITS(id) = (size_t)str;
return id;
}
static JS_ALWAYS_INLINE JSBool
JSID_IS_INT(jsid iden)
JSID_IS_INT(jsid id)
{
return !!(JSID_BITS(iden) & JSID_TYPE_INT);
return !!(JSID_BITS(id) & JSID_TYPE_INT);
}
static JS_ALWAYS_INLINE int32
JSID_TO_INT(jsid iden)
JSID_TO_INT(jsid id)
{
JS_ASSERT(JSID_IS_INT(iden));
return ((int32)JSID_BITS(iden)) >> 1;
JS_ASSERT(JSID_IS_INT(id));
return ((int32)JSID_BITS(id)) >> 1;
}
#define JSID_INT_MIN (-(1 << 30))
@ -365,45 +366,46 @@ INT_FITS_IN_JSID(int32 i)
static JS_ALWAYS_INLINE jsid
INT_TO_JSID(int32 i)
{
jsid iden;
jsid id;
JS_ASSERT(INT_FITS_IN_JSID(i));
JSID_BITS(iden) = ((i << 1) | JSID_TYPE_INT);
return iden;
JSID_BITS(id) = ((i << 1) | JSID_TYPE_INT);
return id;
}
static JS_ALWAYS_INLINE JSBool
JSID_IS_OBJECT(jsid iden)
JSID_IS_OBJECT(jsid id)
{
return (JSID_BITS(iden) & JSID_TYPE_MASK) == JSID_TYPE_OBJECT;
return (JSID_BITS(id) & JSID_TYPE_MASK) == JSID_TYPE_OBJECT &&
(size_t)JSID_BITS(id) != JSID_TYPE_OBJECT;
}
static JS_ALWAYS_INLINE JSObject *
JSID_TO_OBJECT(jsid iden)
JSID_TO_OBJECT(jsid id)
{
JS_ASSERT(JSID_IS_OBJECT(iden));
return (JSObject *)(JSID_BITS(iden) & ~(size_t)JSID_TYPE_MASK);
JS_ASSERT(JSID_IS_OBJECT(id));
return (JSObject *)(JSID_BITS(id) & ~(size_t)JSID_TYPE_MASK);
}
static JS_ALWAYS_INLINE jsid
OBJECT_TO_JSID(JSObject *obj)
{
jsid iden;
jsid id;
JS_ASSERT(obj != NULL);
JS_ASSERT(((size_t)obj & JSID_TYPE_MASK) == 0);
JSID_BITS(iden) = ((size_t)obj | JSID_TYPE_OBJECT);
return iden;
JSID_BITS(id) = ((size_t)obj | JSID_TYPE_OBJECT);
return id;
}
static JS_ALWAYS_INLINE JSBool
JSID_IS_GCTHING(jsid iden)
JSID_IS_GCTHING(jsid id)
{
return JSID_IS_STRING(iden) || JSID_IS_OBJECT(iden);
return JSID_IS_STRING(id) || JSID_IS_OBJECT(id);
}
static JS_ALWAYS_INLINE void *
JSID_TO_GCTHING(jsid iden)
JSID_TO_GCTHING(jsid id)
{
return (void *)(JSID_BITS(iden) & ~(size_t)JSID_TYPE_MASK);
return (void *)(JSID_BITS(id) & ~(size_t)JSID_TYPE_MASK);
}
/*
@ -412,11 +414,11 @@ JSID_TO_GCTHING(jsid iden)
*/
static JS_ALWAYS_INLINE JSBool
JSID_IS_DEFAULT_XML_NAMESPACE(jsid iden)
JSID_IS_DEFAULT_XML_NAMESPACE(jsid id)
{
JS_ASSERT_IF(((size_t)JSID_BITS(iden) & JSID_TYPE_MASK) == JSID_TYPE_DEFAULT_XML_NAMESPACE,
JSID_BITS(iden) == JSID_TYPE_DEFAULT_XML_NAMESPACE);
return ((size_t)JSID_BITS(iden) == JSID_TYPE_DEFAULT_XML_NAMESPACE);
JS_ASSERT_IF(((size_t)JSID_BITS(id) & JSID_TYPE_MASK) == JSID_TYPE_DEFAULT_XML_NAMESPACE,
JSID_BITS(id) == JSID_TYPE_DEFAULT_XML_NAMESPACE);
return ((size_t)JSID_BITS(id) == JSID_TYPE_DEFAULT_XML_NAMESPACE);
}
#ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES
@ -433,17 +435,27 @@ extern JS_PUBLIC_DATA(jsid) JS_DEFAULT_XML_NAMESPACE_ID;
*/
static JS_ALWAYS_INLINE JSBool
JSID_IS_VOID(jsid iden)
JSID_IS_VOID(jsid id)
{
JS_ASSERT_IF(((size_t)JSID_BITS(iden) & JSID_TYPE_MASK) == JSID_TYPE_VOID,
JSID_BITS(iden) == JSID_TYPE_VOID);
return ((size_t)JSID_BITS(iden) == JSID_TYPE_VOID);
JS_ASSERT_IF(((size_t)JSID_BITS(id) & JSID_TYPE_MASK) == JSID_TYPE_VOID,
JSID_BITS(id) == JSID_TYPE_VOID);
return ((size_t)JSID_BITS(id) == JSID_TYPE_VOID);
}
static JS_ALWAYS_INLINE JSBool
JSID_IS_EMPTY(jsid id)
{
return ((size_t)JSID_BITS(id) == JSID_TYPE_OBJECT);
}
#undef id
#ifdef JS_USE_JSVAL_JSID_STRUCT_TYPES
extern JS_PUBLIC_DATA(jsid) JSID_VOID;
extern JS_PUBLIC_DATA(jsid) JSID_EMPTY;
#else
#define JSID_VOID ((jsid)JSID_TYPE_VOID)
# define JSID_VOID ((jsid)JSID_TYPE_VOID)
# define JSID_EMPTY ((jsid)JSID_TYPE_OBJECT)
#endif
/************************************************************************/
@ -472,6 +484,8 @@ extern JS_PUBLIC_DATA(jsid) JSID_VOID;
if getters/setters use a shortid */
/* Function flags, set in JSFunctionSpec and passed to JS_NewFunction etc. */
#define JSFUN_CONSTRUCTOR 0x02 /* native that can be called as a ctor
without creating a this object */
#define JSFUN_LAMBDA 0x08 /* expressed, not declared, function */
#define JSFUN_HEAVYWEIGHT 0x80 /* activation requires a Call object */
@ -485,9 +499,7 @@ extern JS_PUBLIC_DATA(jsid) JSID_VOID;
#define JSFUN_THISP_BOOLEAN 0x0400 /* |this| may be a primitive boolean */
#define JSFUN_THISP_PRIMITIVE 0x0700 /* |this| may be any primitive value */
#define JSFUN_FAST_NATIVE 0x0800 /* JSFastNative needs no JSStackFrame */
#define JSFUN_FLAGS_MASK 0x0ff8 /* overlay JSFUN_* attributes --
#define JSFUN_FLAGS_MASK 0x07fa /* overlay JSFUN_* attributes --
bits 12-15 are used internally to
flag interpreted functions */
@ -907,6 +919,8 @@ JS_StringToVersion(const char *string);
leaving that up to the
embedding. */
#define JSOPTION_METHODJIT JS_BIT(14) /* Whole-method JIT. */
extern JS_PUBLIC_API(uint32)
JS_GetOptions(JSContext *cx);
@ -954,10 +968,18 @@ class JS_PUBLIC_API(JSAutoCrossCompartmentCall)
bool enter(JSContext *cx, JSObject *target);
bool entered() const { return call != NULL; }
~JSAutoCrossCompartmentCall() {
if (call)
JS_LeaveCrossCompartmentCall(call);
}
void swap(JSAutoCrossCompartmentCall &other) {
JSCrossCompartmentCall *tmp = call;
call = other.call;
other.call = tmp;
}
};
class JS_FRIEND_API(JSAutoEnterCompartment)
@ -1064,9 +1086,11 @@ JS_InitCTypesClass(JSContext *cx, JSObject *global);
* WARNING: These are not (yet) mandatory macros, but new code outside of the
* engine should use them. In the Mozilla 2.0 milestone their definitions may
* change incompatibly.
*
* N.B. constructors must not use JS_THIS, as no 'this' object has been created.
*/
#define JS_CALLEE(cx,vp) ((vp)[0])
#define JS_ARGV_CALLEE(argv) ((argv)[-2])
#define JS_THIS(cx,vp) JS_ComputeThis(cx, vp)
#define JS_THIS_OBJECT(cx,vp) (JSVAL_TO_OBJECT(JS_THIS(cx,vp)))
#define JS_ARGV(cx,vp) ((vp) + 2)
@ -1076,6 +1100,15 @@ JS_InitCTypesClass(JSContext *cx, JSObject *global);
extern JS_PUBLIC_API(jsval)
JS_ComputeThis(JSContext *cx, jsval *vp);
#ifdef __cplusplus
#undef JS_THIS
static inline jsval
JS_THIS(JSContext *cx, jsval *vp)
{
return JSVAL_IS_PRIMITIVE(vp[1]) ? JS_ComputeThis(cx, vp) : vp[1];
}
#endif
extern JS_PUBLIC_API(void *)
JS_malloc(JSContext *cx, size_t nbytes);
@ -1630,7 +1663,6 @@ struct JSClass {
#define JSCLASS_NEW_ENUMERATE (1<<1) /* has JSNewEnumerateOp hook */
#define JSCLASS_NEW_RESOLVE (1<<2) /* has JSNewResolveOp hook */
#define JSCLASS_PRIVATE_IS_NSISUPPORTS (1<<3) /* private is (nsISupports *) */
/* (1<<4) was JSCLASS_SHARE_ALL_PROPERTIES, now obsolete. See bug 527805. */
#define JSCLASS_NEW_RESOLVE_GETS_START (1<<5) /* JSNewResolveOp gets starting
object in prototype chain
passed in via *objp in/out
@ -1763,38 +1795,23 @@ struct JSFunctionSpec {
JSNative call;
uint16 nargs;
uint16 flags;
/*
* extra & 0xFFFF: Number of extra argument slots for local GC roots.
* If fast native, must be zero.
* extra >> 16: Reserved for future use (must be 0).
*/
uint32 extra;
};
/*
* Terminating sentinel initializer to put at the end of a JSFunctionSpec array
* that's passed to JS_DefineFunctions or JS_InitClass.
*/
#define JS_FS_END JS_FS(NULL,NULL,0,0,0)
#define JS_FS_END JS_FS(NULL,NULL,0,0)
/*
* Initializer macro for a JSFunctionSpec array element. This is the original
* kind of native function specifier initializer. Use JS_FN ("fast native", see
* JSFastNative in jspubtd.h) for all functions that do not need a stack frame
* when activated.
* Initializer macros for a JSFunctionSpec array element. JS_FN (whose name
* pays homage to the old JSNative/JSFastNative split) simply adds the flag
* JSFUN_STUB_GSOPS.
*/
#define JS_FS(name,call,nargs,flags,extra) \
{name, call, nargs, flags, extra}
/*
* "Fast native" initializer macro for a JSFunctionSpec array element. Use this
* in preference to JS_FS if the native in question does not need its own stack
* frame when activated.
*/
#define JS_FN(name,fastcall,nargs,flags) \
JS_FS(name, (JSNative)(fastcall), nargs, \
(flags) | JSFUN_FAST_NATIVE | JSFUN_STUB_GSOPS, 0)
#define JS_FS(name,call,nargs,flags) \
{name, call, nargs, flags}
#define JS_FN(name,call,nargs,flags) \
{name, call, nargs, (flags) | JSFUN_STUB_GSOPS}
extern JS_PUBLIC_API(JSObject *)
JS_InitClass(JSContext *cx, JSObject *obj, JSObject *parent_proto,
@ -2551,9 +2568,6 @@ JS_TriggerAllOperationCallbacks(JSRuntime *rt);
extern JS_PUBLIC_API(JSBool)
JS_IsRunning(JSContext *cx);
extern JS_PUBLIC_API(JSBool)
JS_IsConstructing(JSContext *cx);
/*
* Saving and restoring frame chains.
*
@ -3029,6 +3043,87 @@ JS_GetFunctionCallback(JSContext *cx);
/************************************************************************/
/*
* JS_IsConstructing must be called from within a native given the
* native's original cx and vp arguments. If JS_IsConstructing is true,
* JS_THIS must not be used; the constructor should construct and return a
* new object. Otherwise, the native is called as an ordinary function and
* JS_THIS may be used.
*/
static JS_ALWAYS_INLINE JSBool
JS_IsConstructing(JSContext *cx, const jsval *vp)
{
jsval_layout l;
#ifdef DEBUG
JSObject *callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
if (JS_ObjectIsFunction(cx, callee)) {
JSFunction *fun = JS_ValueToFunction(cx, JS_CALLEE(cx, vp));
JS_ASSERT((JS_GetFunctionFlags(fun) & JSFUN_CONSTRUCTOR) != 0);
} else {
JS_ASSERT(JS_GET_CLASS(cx, callee)->construct != NULL);
}
#endif
l.asBits = JSVAL_BITS(vp[1]);
return JSVAL_IS_MAGIC_IMPL(l);
}
/*
* In the case of a constructor called from JS_ConstructObject and
* JS_InitClass where the class has the JSCLASS_CONSTRUCT_PROTOTYPE flag set,
* the JS engine passes the constructor a non-standard 'this' object. In such
* cases, the following query provides the additional information of whether a
* special 'this' was supplied. E.g.:
*
* JSBool foo_native(JSContext *cx, uintN argc, jsval *vp) {
* JSObject *maybeThis;
* if (JS_IsConstructing_PossiblyWithGivenThisObject(cx, vp, &maybeThis)) {
* // native called as a constructor
* if (maybeThis)
* // native called as a constructor with maybeThis as 'this'
* } else {
* // native called as function, maybeThis is still uninitialized
* }
* }
*
* Note that embeddings do not need to use this query unless they use the
* aforementioned API/flags.
*/
static JS_ALWAYS_INLINE JSBool
JS_IsConstructing_PossiblyWithGivenThisObject(JSContext *cx, const jsval *vp,
JSObject **maybeThis)
{
jsval_layout l;
JSBool isCtor;
#ifdef DEBUG
JSObject *callee = JSVAL_TO_OBJECT(JS_CALLEE(cx, vp));
if (JS_ObjectIsFunction(cx, callee)) {
JSFunction *fun = JS_ValueToFunction(cx, JS_CALLEE(cx, vp));
JS_ASSERT((JS_GetFunctionFlags(fun) & JSFUN_CONSTRUCTOR) != 0);
} else {
JS_ASSERT(JS_GET_CLASS(cx, callee)->construct != NULL);
}
#endif
l.asBits = JSVAL_BITS(vp[1]);
isCtor = JSVAL_IS_MAGIC_IMPL(l);
if (isCtor)
*maybeThis = MAGIC_JSVAL_TO_OBJECT_OR_NULL_IMPL(l);
return isCtor;
}
/*
* If a constructor does not have any static knowledge about the type of
* object to create, it can request that the JS engine create a default new
* 'this' object, as is done for non-constructor natives when called with new.
*/
extern JS_PUBLIC_API(JSObject *)
JS_NewObjectForConstructor(JSContext *cx, const jsval *vp);
/************************************************************************/
#ifdef DEBUG
#define JS_GC_ZEAL 1
#endif

Просмотреть файл

@ -517,6 +517,23 @@ SetArrayElement(JSContext *cx, JSObject *obj, jsdouble index, const Value &v)
return obj->setProperty(cx, idr.id(), &tmp);
}
#ifdef JS_TRACER
JSBool JS_FASTCALL
js_EnsureDenseArrayCapacity(JSContext *cx, JSObject *obj, jsint i)
{
jsuint u = jsuint(i);
jsuint capacity = obj->getDenseArrayCapacity();
if (u < capacity)
return true;
if (INDEX_TOO_SPARSE(obj, u))
return false;
return obj->ensureDenseArrayElements(cx, u + 1);
}
JS_DEFINE_CALLINFO_3(extern, BOOL, js_EnsureDenseArrayCapacity, CONTEXT, OBJECT, INT32, 0,
nanojit::ACCSET_STORE_ANY)
#endif
static JSBool
DeleteArrayElement(JSContext *cx, JSObject *obj, jsdouble index)
{
@ -755,7 +772,7 @@ array_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
obj->getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) {
JSObject *obj2;
JSProperty *prop;
JSScopeProperty *sprop;
const Shape *shape;
JSObject *proto = obj->getProto();
if (!proto) {
@ -769,8 +786,8 @@ array_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
return JS_FALSE;
if (prop && obj2->isNative()) {
sprop = (JSScopeProperty *) prop;
if (!js_NativeGet(cx, obj, obj2, sprop, JSGET_METHOD_BARRIER, vp))
shape = (const Shape *) prop;
if (!js_NativeGet(cx, obj, obj2, shape, JSGET_METHOD_BARRIER, vp))
return JS_FALSE;
JS_UNLOCK_OBJ(cx, obj2);
}
@ -811,7 +828,8 @@ array_setProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
if (!obj->isDenseArray())
return js_SetProperty(cx, obj, id, vp);
if (!js_IdIsIndex(id, &i) || INDEX_TOO_SPARSE(obj, i)) {
if (!js_IdIsIndex(id, &i) || js_PrototypeHasIndexedProperties(cx, obj) ||
INDEX_TOO_SPARSE(obj, i)) {
if (!obj->makeDenseArraySlow(cx))
return JS_FALSE;
return js_SetProperty(cx, obj, id, vp);
@ -842,7 +860,7 @@ js_PrototypeHasIndexedProperties(JSContext *cx, JSObject *obj)
*/
if (!obj->isNative())
return JS_TRUE;
if (obj->scope()->hadIndexedProperties())
if (obj->isIndexed())
return JS_TRUE;
}
return JS_FALSE;
@ -850,66 +868,18 @@ js_PrototypeHasIndexedProperties(JSContext *cx, JSObject *obj)
#ifdef JS_TRACER
static JS_ALWAYS_INLINE JSBool FASTCALL
dense_grow(JSContext* cx, JSObject* obj, jsint i, const Value &v)
JSBool FASTCALL
js_Array_dense_setelem_hole(JSContext* cx, JSObject* obj, jsint i)
{
JS_ASSERT(obj->isDenseArray());
if (js_PrototypeHasIndexedProperties(cx, obj))
return false;
/*
* Let the interpreter worry about negative array indexes.
*/
JS_ASSERT((MAX_DSLOTS_LENGTH > MAX_DSLOTS_LENGTH32) == (sizeof(intptr_t) != sizeof(uint32)));
if (MAX_DSLOTS_LENGTH > MAX_DSLOTS_LENGTH32) {
/*
* Have to check for negative values bleeding through on 64-bit machines only,
* since we can't allocate large enough arrays for this on 32-bit machines.
*/
if (i < 0)
return JS_FALSE;
}
/*
* If needed, grow the array as long it remains dense, otherwise fall off trace.
*/
jsuint u = jsuint(i);
jsuint capacity = obj->getDenseArrayCapacity();
if ((u >= capacity) && (INDEX_TOO_SPARSE(obj, u) || !obj->ensureDenseArrayElements(cx, u + 1)))
return JS_FALSE;
if (obj->getDenseArrayElement(u).isMagic()) {
if (js_PrototypeHasIndexedProperties(cx, obj))
return JS_FALSE;
if (u >= obj->getArrayLength())
obj->setArrayLength(u + 1);
}
obj->setDenseArrayElement(u, v);
return JS_TRUE;
if (u >= obj->getArrayLength())
obj->setArrayLength(u + 1);
return true;
}
JSBool FASTCALL
js_Array_dense_setelem(JSContext* cx, JSObject* obj, jsint i, ValueArgType v)
{
return dense_grow(cx, obj, i, ValueArgToConstRef(v));
}
JS_DEFINE_CALLINFO_4(extern, BOOL, js_Array_dense_setelem, CONTEXT, OBJECT, INT32, VALUE,
0, nanojit::ACCSET_STORE_ANY)
JSBool FASTCALL
js_Array_dense_setelem_int(JSContext* cx, JSObject* obj, jsint i, int32 j)
{
return dense_grow(cx, obj, i, Int32Value(j));
}
JS_DEFINE_CALLINFO_4(extern, BOOL, js_Array_dense_setelem_int, CONTEXT, OBJECT, INT32, INT32,
0, nanojit::ACCSET_STORE_ANY)
JSBool FASTCALL
js_Array_dense_setelem_double(JSContext* cx, JSObject* obj, jsint i, jsdouble d)
{
return dense_grow(cx, obj, i, NumberValue(d));
}
JS_DEFINE_CALLINFO_4(extern, BOOL, js_Array_dense_setelem_double, CONTEXT, OBJECT, INT32, DOUBLE,
JS_DEFINE_CALLINFO_3(extern, BOOL, js_Array_dense_setelem_hole, CONTEXT, OBJECT, INT32,
0, nanojit::ACCSET_STORE_ANY)
#endif
@ -1006,9 +976,8 @@ array_trace(JSTracer *trc, JSObject *obj)
Class js_ArrayClass = {
"Array",
Class::NON_NATIVE |
JSCLASS_HAS_RESERVED_SLOTS(JSObject::DENSE_ARRAY_FIXED_RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_Array) |
JSCLASS_FAST_CONSTRUCTOR,
JSCLASS_HAS_RESERVED_SLOTS(JSObject::DENSE_ARRAY_CLASS_RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_Array),
PropertyStub, /* addProperty */
PropertyStub, /* delProperty */
PropertyStub, /* getProperty */
@ -1044,8 +1013,7 @@ Class js_ArrayClass = {
Class js_SlowArrayClass = {
"Array",
JSCLASS_HAS_PRIVATE |
JSCLASS_HAS_CACHED_PROTO(JSProto_Array) |
JSCLASS_FAST_CONSTRUCTOR,
JSCLASS_HAS_CACHED_PROTO(JSProto_Array),
slowarray_addProperty,
PropertyStub, /* delProperty */
PropertyStub, /* getProperty */
@ -1063,30 +1031,25 @@ JSObject::makeDenseArraySlow(JSContext *cx)
{
JS_ASSERT(isDenseArray());
/*
* Save old map now, before calling InitScopeForObject. We'll have to undo
* on error. This is gross, but a better way is not obvious.
*/
JSObjectMap *oldMap = map;
/*
* Create a native scope. All slow arrays other than Array.prototype get
* the same initial shape.
*/
uint32 emptyShape;
JSObject *obj = this;
JSObject *arrayProto = obj->getProto();
if (arrayProto->getClass() == &js_ObjectClass) {
/* obj is Array.prototype. */
emptyShape = js_GenerateShape(cx, false);
} else {
/* arrayProto is Array.prototype. */
JS_ASSERT(arrayProto->getClass() == &js_SlowArrayClass);
emptyShape = arrayProto->scope()->emptyScope->shape;
}
JSScope *scope = JSScope::create(cx, &js_SlowArrayClass, obj, emptyShape);
if (!scope)
return JS_FALSE;
JSObject *arrayProto = getProto();
if (!InitScopeForObject(cx, this, &js_SlowArrayClass, arrayProto))
return false;
uint32 capacity;
if (obj->dslots) {
capacity = obj->getDenseArrayCapacity();
obj->dslots[-1].setPrivateUint32(JS_INITIAL_NSLOTS + capacity);
if (dslots) {
capacity = getDenseArrayCapacity();
dslots[-1].setPrivateUint32(JS_INITIAL_NSLOTS + capacity);
} else {
/*
* Array.prototype is constructed as a dense array, but is immediately slowified before
@ -1095,28 +1058,40 @@ JSObject::makeDenseArraySlow(JSContext *cx)
capacity = 0;
}
scope->freeslot = obj->numSlots();
uint32 nslots = numSlots();
if (nslots >= JS_NSLOTS_LIMIT) {
setMap(oldMap);
JS_ReportOutOfMemory(cx);
return false;
}
freeslot = nslots;
/* Begin with the length property to share more of the property tree. */
if (!scope->addProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom),
array_length_getter, array_length_setter,
JSSLOT_ARRAY_LENGTH, JSPROP_PERMANENT | JSPROP_SHARED, 0, 0)) {
goto out_bad;
if (!addProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.lengthAtom),
array_length_getter, array_length_setter,
JSSLOT_ARRAY_LENGTH, JSPROP_PERMANENT | JSPROP_SHARED, 0, 0)) {
setMap(oldMap);
return false;
}
/* Create new properties pointing to existing elements. */
for (uint32 i = 0; i < capacity; i++) {
jsid id;
if (!ValueToId(cx, Int32Value(i), &id))
goto out_bad;
if (!ValueToId(cx, Int32Value(i), &id)) {
setMap(oldMap);
return false;
}
if (obj->getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) {
obj->setDenseArrayElement(i, UndefinedValue());
if (getDenseArrayElement(i).isMagic(JS_ARRAY_HOLE)) {
setDenseArrayElement(i, UndefinedValue());
continue;
}
if (!scope->addDataProperty(cx, id, JS_INITIAL_NSLOTS + i, JSPROP_ENUMERATE))
goto out_bad;
if (!addDataProperty(cx, id, JS_INITIAL_NSLOTS + i, JSPROP_ENUMERATE)) {
setMap(oldMap);
return false;
}
}
/*
@ -1126,14 +1101,16 @@ JSObject::makeDenseArraySlow(JSContext *cx)
* can store an arbitrary value.
*/
JS_ASSERT(js_SlowArrayClass.flags & JSCLASS_HAS_PRIVATE);
obj->voidDenseOnlyArraySlots();
obj->clasp = &js_SlowArrayClass;
obj->map = scope;
return JS_TRUE;
voidDenseOnlyArraySlots();
out_bad:
scope->destroy(cx);
return JS_FALSE;
/*
* Finally, update class. If |this| is Array.prototype, then js_InitClass
* will create an emptyShape whose class is &js_SlowArrayClass, to ensure
* that delegating instances can share shapes in the tree rooted at the
* proto's empty shape.
*/
clasp = &js_SlowArrayClass;
return true;
}
/* Transfer ownership of buffer to returned string. */
@ -2900,11 +2877,11 @@ array_every(JSContext *cx, uintN argc, Value *vp)
#endif
static JSBool
array_isArray(JSContext *cx, uintN argc, jsval *vp)
array_isArray(JSContext *cx, uintN argc, Value *vp)
{
*vp = BOOLEAN_TO_JSVAL(argc > 0 &&
!JSVAL_IS_PRIMITIVE(vp[2]) &&
JSVAL_TO_OBJECT(vp[2])->wrappedObject(cx)->isArray());
vp->setBoolean(argc > 0 &&
vp[2].isObject() &&
vp[2].toObject().wrappedObject(cx)->isArray());
return JS_TRUE;
}
@ -2998,9 +2975,9 @@ js_NewEmptyArray(JSContext* cx, JSObject* proto, int32 len)
if (!obj)
return NULL;
/* Initialize all fields of JSObject. */
obj->map = const_cast<JSObjectMap *>(&JSObjectMap::sharedNonNative);
obj->init(&js_ArrayClass, proto, proto->getParent(), NullValue());
/* Initialize all fields, calling init before setting obj->map. */
obj->init(&js_ArrayClass, proto, proto->getParent(), NullValue(), cx);
obj->setSharedNonNativeMap();
obj->setArrayLength(len);
obj->setDenseArrayCapacity(0);
return obj;
@ -3028,12 +3005,18 @@ JS_DEFINE_CALLINFO_3(extern, OBJECT, js_NewPreallocatedArray, CONTEXT, OBJECT, I
JSObject *
js_InitArrayClass(JSContext *cx, JSObject *obj)
{
JSObject *proto = js_InitClass(cx, obj, NULL, &js_ArrayClass, (Native) js_Array, 1,
JSObject *proto = js_InitClass(cx, obj, NULL, &js_ArrayClass, js_Array, 1,
NULL, array_methods, NULL, array_static_methods);
if (!proto)
return NULL;
proto->setArrayLength(0);
/*
* Assert that js_InitClass used the correct (slow array, not dense array)
* class for proto's emptyShape class.
*/
JS_ASSERT(proto->emptyShape->getClass() == proto->getClass());
proto->setArrayLength(0);
return proto;
}

Просмотреть файл

@ -110,7 +110,7 @@ JSObject::isArray() const
/*
* Dense arrays are not native -- aobj->isNative() for a dense array aobj
* results in false, meaning aobj->map does not point to a JSScope.
* results in false, meaning aobj->map does not point to a js::Shape.
*
* But Array methods are called via aobj.sort(), e.g., and the interpreter and
* the trace recorder must consult the property cache in order to perform well.
@ -249,8 +249,8 @@ js_Array(JSContext *cx, uintN argc, js::Value *vp);
* parameter. The caller promises to fill in the first |capacity| values
* starting from that pointer immediately after this function returns and
* without triggering GC (so this method is allowed to leave those
* uninitialized) and to set them to non-JSVAL_HOLE values, so that the
* resulting array has length and count both equal to |capacity|.
* uninitialized) and to set them to non-JS_ARRAY_HOLE-magic-why values, so
* that the resulting array has length and count both equal to |capacity|.
*
* FIXME: for some strange reason, when this file is included from
* dom/ipc/TabParent.cpp in MSVC, jsuint resolves to a slightly different
@ -280,4 +280,7 @@ js_CloneDensePrimitiveArray(JSContext *cx, JSObject *obj, JSObject **clone);
JS_FRIEND_API(JSBool)
js_IsDensePrimitiveArray(JSObject *obj);
extern JSBool JS_FASTCALL
js_EnsureDenseArrayCapacity(JSContext *cx, JSObject *obj, jsint i);
#endif /* jsarray_h___ */

Просмотреть файл

@ -68,6 +68,7 @@ ASTDEF(AST_GRAPH_IDX_EXPR, "GraphIndexExpression")
ASTDEF(AST_COMP_EXPR, "ComprehensionExpression")
ASTDEF(AST_GENERATOR_EXPR, "GeneratorExpression")
ASTDEF(AST_YIELD_EXPR, "YieldExpression")
ASTDEF(AST_LET_EXPR, "LetExpression")
ASTDEF(AST_EMPTY_STMT, "EmptyStatement")
ASTDEF(AST_BLOCK_STMT, "BlockStatement")
@ -86,6 +87,7 @@ ASTDEF(AST_RETURN_STMT, "ReturnStatement")
ASTDEF(AST_TRY_STMT, "TryStatement")
ASTDEF(AST_THROW_STMT, "ThrowStatement")
ASTDEF(AST_DEBUGGER_STMT, "DebuggerStatement")
ASTDEF(AST_LET_STMT, "LetStatement")
ASTDEF(AST_CASE, "SwitchCase")
ASTDEF(AST_CATCH, "CatchClause")
@ -100,6 +102,7 @@ ASTDEF(AST_XMLESCAPE, "XMLEscape")
ASTDEF(AST_XMLFILTER, "XMLFilterExpression")
ASTDEF(AST_XMLDEFAULT, "XMLDefaultDeclaration")
ASTDEF(AST_XMLQUAL, "XMLQualifiedIdentifier")
ASTDEF(AST_XMLFUNCQUAL, "XMLFunctionQualifiedIdentifier")
ASTDEF(AST_XMLELEM, "XMLElement")
ASTDEF(AST_XMLTEXT, "XMLText")
ASTDEF(AST_XMLLIST, "XMLList")

Просмотреть файл

@ -176,6 +176,7 @@ const char *const js_common_atom_names[] = {
js_configurable_str, /* configurableAtom */
js_writable_str, /* writableAtom */
js_value_str, /* valueAtom */
js_test_str, /* testAtom */
"use strict", /* useStrictAtom */
#if JS_HAS_XML_SUPPORT
@ -265,6 +266,7 @@ const char js_enumerable_str[] = "enumerable";
const char js_configurable_str[] = "configurable";
const char js_writable_str[] = "writable";
const char js_value_str[] = "value";
const char js_test_str[] = "test";
#if JS_HAS_XML_SUPPORT
const char js_etago_str[] = "</";

Просмотреть файл

@ -368,6 +368,7 @@ struct JSAtomState
JSAtom *configurableAtom;
JSAtom *writableAtom;
JSAtom *valueAtom;
JSAtom *testAtom;
JSAtom *useStrictAtom;
#if JS_HAS_XML_SUPPORT
@ -524,6 +525,7 @@ extern const char js_enumerable_str[];
extern const char js_configurable_str[];
extern const char js_writable_str[];
extern const char js_value_str[];
extern const char js_test_str[];
/*
* Initialize atom state. Return true on success, false on failure to allocate

Просмотреть файл

@ -60,7 +60,8 @@ using namespace js;
Class js_BooleanClass = {
"Boolean",
JSCLASS_HAS_RESERVED_SLOTS(1) | JSCLASS_HAS_CACHED_PROTO(JSProto_Boolean),
JSCLASS_HAS_RESERVED_SLOTS(1) |
JSCLASS_HAS_CACHED_PROTO(JSProto_Boolean),
PropertyStub, /* addProperty */
PropertyStub, /* delProperty */
PropertyStub, /* getProperty */
@ -126,18 +127,20 @@ static JSFunctionSpec boolean_methods[] = {
};
static JSBool
Boolean(JSContext *cx, JSObject *obj, uintN argc, Value *argv, Value *rval)
Boolean(JSContext *cx, uintN argc, Value *vp)
{
Value bval;
Value *argv = vp + 2;
bool b = argc != 0 ? js_ValueToBoolean(argv[0]) : false;
if (argc != 0)
bval.setBoolean(!!js_ValueToBoolean(argv[0]));
else
bval.setBoolean(false);
if (!JS_IsConstructing(cx))
*rval = bval;
else
obj->setPrimitiveThis(bval);
if (IsConstructing(vp)) {
JSObject *obj = NewBuiltinClassInstance(cx, &js_BooleanClass);
if (!obj)
return false;
obj->setPrimitiveThis(BooleanValue(b));
vp->setObject(*obj);
} else {
vp->setBoolean(b);
}
return true;
}
@ -170,14 +173,14 @@ js_BooleanToCharBuffer(JSContext *cx, JSBool b, JSCharBuffer &cb)
JSBool
js_ValueToBoolean(const Value &v)
{
if (v.isNullOrUndefined())
return JS_FALSE;
if (v.isObject())
return JS_TRUE;
if (v.isString())
return v.toString()->length() != 0;
if (v.isInt32())
return v.toInt32() != 0;
if (v.isString())
return v.toString()->length() != 0;
if (v.isObject())
return JS_TRUE;
if (v.isNullOrUndefined())
return JS_FALSE;
if (v.isDouble()) {
jsdouble d;

Просмотреть файл

@ -183,73 +183,55 @@ JS_DEFINE_CALLINFO_2(extern, INT32, js_StringToInt32, CONTEXT, STRING, 1, ACCSET
/* Nb: it's always safe to set isDefinitelyAtom to false if you're unsure or don't know. */
static inline JSBool
AddPropertyHelper(JSContext* cx, JSObject* obj, JSScopeProperty* sprop, bool isDefinitelyAtom)
AddPropertyHelper(JSContext* cx, JSObject* obj, Shape* shape, bool isDefinitelyAtom)
{
JS_LOCK_OBJ(cx, obj);
JS_ASSERT(shape->previous() == obj->lastProperty());
uint32 slot = sprop->slot;
JSScope* scope = obj->scope();
if (slot != scope->freeslot)
return false;
JS_ASSERT(sprop->parent == scope->lastProperty());
if (scope->isSharedEmpty()) {
scope = js_GetMutableScope(cx, obj);
if (!scope)
return false;
} else {
JS_ASSERT(!scope->hasProperty(sprop));
}
if (!scope->table) {
if (slot < obj->numSlots()) {
JS_ASSERT(obj->getSlot(scope->freeslot).isUndefined());
++scope->freeslot;
} else {
if (!js_AllocSlot(cx, obj, &slot))
goto exit_trace;
if (slot != sprop->slot) {
js_FreeSlot(cx, obj, slot);
goto exit_trace;
}
}
scope->extend(cx, sprop, isDefinitelyAtom);
} else {
JSScopeProperty *sprop2 =
scope->addProperty(cx, sprop->id, sprop->getter(), sprop->setter(),
SPROP_INVALID_SLOT, sprop->attributes(), sprop->getFlags(),
sprop->shortid);
if (sprop2 != sprop)
if (obj->nativeEmpty()) {
if (!obj->ensureClassReservedSlotsForEmptyObject(cx))
goto exit_trace;
}
uint32 slot;
slot = shape->slot;
JS_ASSERT(slot == obj->freeslot);
if (slot < obj->numSlots()) {
JS_ASSERT(obj->getSlot(slot).isUndefined());
++obj->freeslot;
JS_ASSERT(obj->freeslot != 0);
} else {
if (!obj->allocSlot(cx, &slot))
goto exit_trace;
JS_ASSERT(slot == shape->slot);
}
obj->extend(cx, shape, isDefinitelyAtom);
if (js_IsPropertyCacheDisabled(cx))
goto exit_trace;
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, obj);
return true;
exit_trace:
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, obj);
return false;
}
JSBool FASTCALL
js_AddProperty(JSContext* cx, JSObject* obj, JSScopeProperty* sprop)
js_AddProperty(JSContext* cx, JSObject* obj, Shape* shape)
{
return AddPropertyHelper(cx, obj, sprop, /* isDefinitelyAtom = */false);
return AddPropertyHelper(cx, obj, shape, /* isDefinitelyAtom = */false);
}
JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddProperty, CONTEXT, OBJECT, SCOPEPROP, 0, ACCSET_STORE_ANY)
JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddProperty, CONTEXT, OBJECT, SHAPE, 0, ACCSET_STORE_ANY)
JSBool FASTCALL
js_AddAtomProperty(JSContext* cx, JSObject* obj, JSScopeProperty* sprop)
js_AddAtomProperty(JSContext* cx, JSObject* obj, Shape* shape)
{
return AddPropertyHelper(cx, obj, sprop, /* isDefinitelyAtom = */true);
return AddPropertyHelper(cx, obj, shape, /* isDefinitelyAtom = */true);
}
JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddAtomProperty, CONTEXT, OBJECT, SCOPEPROP,
0, ACCSET_STORE_ANY)
JS_DEFINE_CALLINFO_3(extern, BOOL, js_AddAtomProperty, CONTEXT, OBJECT, SHAPE, 0, ACCSET_STORE_ANY)
static JSBool
HasProperty(JSContext* cx, JSObject* obj, jsid id)
@ -326,41 +308,9 @@ js_NewNullClosure(JSContext* cx, JSObject* funobj, JSObject* proto, JSObject* pa
if (!closure)
return NULL;
closure->initSharingEmptyScope(&js_FunctionClass, proto, parent, PrivateValue(fun));
closure->initSharingEmptyShape(&js_FunctionClass, proto, parent, fun, cx);
return closure;
}
JS_DEFINE_CALLINFO_4(extern, OBJECT, js_NewNullClosure, CONTEXT, OBJECT, OBJECT, OBJECT,
0, ACCSET_STORE_ANY)
JS_REQUIRES_STACK JSBool FASTCALL
js_PopInterpFrame(JSContext* cx, TracerState* state)
{
JS_ASSERT(cx->hasfp() && cx->fp()->down);
JSStackFrame* const fp = cx->fp();
/*
* Mirror frame popping code from inline_return in js_Interpret. There are
* some things we just don't want to handle. In those cases, the trace will
* MISMATCH_EXIT.
*/
if (fp->hasHookData())
return JS_FALSE;
if (cx->version != fp->getCallerVersion())
return JS_FALSE;
if (fp->flags & JSFRAME_CONSTRUCTING)
return JS_FALSE;
if (fp->hasIMacroPC())
return JS_FALSE;
if (fp->hasBlockChain())
return JS_FALSE;
fp->putActivationObjects(cx);
/* Pop the frame and its memory. */
cx->stack().popInlineFrame(cx, fp, fp->down);
/* Update the inline call count. */
*state->inlineCallCountp = *state->inlineCallCountp - 1;
return JS_TRUE;
}
JS_DEFINE_CALLINFO_2(extern, BOOL, js_PopInterpFrame, CONTEXT, TRACERSTATE, 0, ACCSET_STORE_ANY)

Просмотреть файл

@ -43,6 +43,7 @@
#ifdef JS_TRACER
#include "nanojit/nanojit.h"
#include "jsvalue.h"
#ifdef THIS
#undef THIS
@ -61,7 +62,7 @@ enum {
#define JSTN_ERRTYPE(jstn) ((jstn)->flags & JSTN_ERRTYPE_MASK)
/*
* Type describing a type specialization of a JSFastNative.
* Type describing a type specialization of a js::Native.
*
* |prefix| and |argtypes| declare what arguments should be passed to the
* native function. |prefix| can contain the following characters:
@ -103,7 +104,7 @@ struct JSSpecializedNative {
* terminated by the lack of having the JSTN_MORE flag set.
*/
struct JSNativeTraceInfo {
JSFastNative native;
js::Native native;
JSSpecializedNative *specializations;
};
@ -222,7 +223,7 @@ struct ClosureVarInfo;
#define _JS_CTYPE_CONSTRUCTOR_RETRY _JS_CTYPE(JSObject *, _JS_PTR, --, --, FAIL_NULL | \
JSTN_CONSTRUCTOR)
#define _JS_CTYPE_REGEXP _JS_CTYPE(JSObject *, _JS_PTR, "","r", INFALLIBLE)
#define _JS_CTYPE_SCOPEPROP _JS_CTYPE(JSScopeProperty *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_SHAPE _JS_CTYPE(js::Shape *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_TRACERSTATE _JS_CTYPE(TracerState *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_FRAGMENT _JS_CTYPE(nanojit::Fragment *, _JS_PTR, --, --, INFALLIBLE)
#define _JS_CTYPE_CLASS _JS_CTYPE(js::Class *, _JS_PTR, --, --, INFALLIBLE)
@ -506,7 +507,7 @@ struct ClosureVarInfo;
JSSpecializedNative name##_sns[] = { \
{ _JS_TN_INIT_HELPER_n tn0 } \
}; \
JSNativeTraceInfo name##_trcinfo = { (JSFastNative)name, name##_sns };
JSNativeTraceInfo name##_trcinfo = { JS_VALUEIFY_NATIVE(name), name##_sns };
#define JS_DEFINE_TRCINFO_2(name, tn0, tn1) \
_JS_DEFINE_CALLINFO_n tn0 \
@ -515,7 +516,7 @@ struct ClosureVarInfo;
{ _JS_TN_INIT_HELPER_n tn0 | JSTN_MORE }, \
{ _JS_TN_INIT_HELPER_n tn1 } \
}; \
JSNativeTraceInfo name##_trcinfo = { (JSFastNative)name, name##_sns };
JSNativeTraceInfo name##_trcinfo = { JS_VALUEIFY_NATIVE(name), name##_sns };
#define JS_DEFINE_TRCINFO_3(name, tn0, tn1, tn2) \
_JS_DEFINE_CALLINFO_n tn0 \
@ -526,7 +527,7 @@ struct ClosureVarInfo;
{ _JS_TN_INIT_HELPER_n tn1 | JSTN_MORE }, \
{ _JS_TN_INIT_HELPER_n tn2 } \
}; \
JSNativeTraceInfo name##_trcinfo = { (JSFastNative)name, name##_sns };
JSNativeTraceInfo name##_trcinfo = { JS_VALUEIFY_NATIVE(name), name##_sns };
#define JS_DEFINE_TRCINFO_4(name, tn0, tn1, tn2, tn3) \
_JS_DEFINE_CALLINFO_n tn0 \
@ -539,7 +540,7 @@ struct ClosureVarInfo;
{ _JS_TN_INIT_HELPER_n tn2 | JSTN_MORE }, \
{ _JS_TN_INIT_HELPER_n tn3 } \
}; \
JSNativeTraceInfo name##_trcinfo = { (JSFastNative)name, name##_sns };
JSNativeTraceInfo name##_trcinfo = { JS_VALUEIFY_NATIVE(name), name##_sns };
#define _JS_DEFINE_CALLINFO_n(n, args) JS_DEFINE_CALLINFO_##n args
@ -572,12 +573,11 @@ js_dmod(jsdouble a, jsdouble b);
#endif /* !JS_TRACER */
/* Defined in jsarray.cpp. */
JS_DECLARE_CALLINFO(js_Array_dense_setelem)
JS_DECLARE_CALLINFO(js_Array_dense_setelem_int)
JS_DECLARE_CALLINFO(js_Array_dense_setelem_double)
JS_DECLARE_CALLINFO(js_Array_dense_setelem_hole)
JS_DECLARE_CALLINFO(js_NewEmptyArray)
JS_DECLARE_CALLINFO(js_NewPreallocatedArray)
JS_DECLARE_CALLINFO(js_ArrayCompPush_tn)
JS_DECLARE_CALLINFO(js_EnsureDenseArrayCapacity)
/* Defined in jsbuiltins.cpp. */
JS_DECLARE_CALLINFO(js_UnboxDouble)
@ -595,7 +595,6 @@ JS_DECLARE_CALLINFO(js_HasNamedPropertyInt32)
JS_DECLARE_CALLINFO(js_TypeOfObject)
JS_DECLARE_CALLINFO(js_BooleanIntToString)
JS_DECLARE_CALLINFO(js_NewNullClosure)
JS_DECLARE_CALLINFO(js_PopInterpFrame)
/* Defined in jsfun.cpp. */
JS_DECLARE_CALLINFO(js_AllocFlatClosure)
@ -612,7 +611,7 @@ JS_DECLARE_CALLINFO(js_NumberToString)
/* Defined in jsobj.cpp. */
JS_DECLARE_CALLINFO(js_Object_tn)
JS_DECLARE_CALLINFO(js_NewInstance)
JS_DECLARE_CALLINFO(js_NewInstanceFromTrace)
JS_DECLARE_CALLINFO(js_NonEmptyObject)
/* Defined in jsregexp.cpp. */

Просмотреть файл

@ -78,7 +78,7 @@
#include "jscntxtinlines.h"
#ifdef XP_WIN
# include <windows.h>
# include "jswin.h"
#elif defined(XP_OS2)
# define INCL_DOSMEMMGR
# include <os2.h>
@ -205,6 +205,11 @@ StackSpace::mark(JSTracer *trc)
/*
* The correctness/completeness of marking depends on the continuity
* invariants described by the StackSegment and StackSpace definitions.
*
* NB:
* Stack slots might be torn or uninitialized in the presence of method
* JIT'd code. Arguments are an exception and are always fully synced
* (so they can be read by functions).
*/
Value *end = firstUnused();
for (StackSegment *seg = currentSegment; seg; seg = seg->getPreviousInMemory()) {
@ -215,13 +220,13 @@ StackSpace::mark(JSTracer *trc)
/* Mark slots/args trailing off of the last stack frame. */
JSStackFrame *fp = seg->getCurrentFrame();
MarkValueRange(trc, fp->slots(), end, "stack");
MarkStackRangeConservatively(trc, fp->slots(), end);
/* Mark stack frames and slots/args between stack frames. */
JSStackFrame *initialFrame = seg->getInitialFrame();
for (JSStackFrame *f = fp; f != initialFrame; f = f->down) {
js_TraceStackFrame(trc, f);
MarkValueRange(trc, f->down->slots(), f->argEnd(), "stack");
MarkStackRangeConservatively(trc, f->down->slots(), f->argEnd());
}
/* Mark initialFrame stack frame and leading args. */
@ -336,37 +341,6 @@ FrameGuard::~FrameGuard()
cx->stack().popFrame(cx);
}
JS_REQUIRES_STACK void
StackSpace::getSynthesizedSlowNativeFrame(JSContext *cx, StackSegment *&seg, JSStackFrame *&fp)
{
Value *start = firstUnused();
JS_ASSERT(size_t(end - start) >= VALUES_PER_STACK_SEGMENT + VALUES_PER_STACK_FRAME);
seg = new(start) StackSegment;
fp = reinterpret_cast<JSStackFrame *>(seg + 1);
}
JS_REQUIRES_STACK void
StackSpace::pushSynthesizedSlowNativeFrame(JSContext *cx, StackSegment *seg, JSFrameRegs &regs)
{
JS_ASSERT(!regs.fp->hasScript() && FUN_SLOW_NATIVE(regs.fp->getFunction()));
regs.fp->down = cx->maybefp();
seg->setPreviousInMemory(currentSegment);
currentSegment = seg;
cx->pushSegmentAndFrame(seg, regs);
seg->setInitialVarObj(NULL);
}
JS_REQUIRES_STACK void
StackSpace::popSynthesizedSlowNativeFrame(JSContext *cx)
{
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->hasActiveSegment());
JS_ASSERT(currentSegment->getInitialFrame() == cx->fp());
JS_ASSERT(!cx->fp()->hasScript() && FUN_SLOW_NATIVE(cx->fp()->getFunction()));
cx->popSegmentAndFrame();
currentSegment = currentSegment->getPreviousInMemory();
}
JS_REQUIRES_STACK bool
StackSpace::pushDummyFrame(JSContext *cx, FrameGuard &fg, JSFrameRegs &regs, JSObject *scopeChain)
{
@ -436,6 +410,25 @@ FrameRegsIter::incSlow(JSStackFrame *up, JSStackFrame *down)
}
}
AllFramesIter::AllFramesIter(JSContext *cx)
: curcs(cx->stack().getCurrentSegment()),
curfp(curcs ? curcs->getCurrentFrame() : NULL)
{
}
AllFramesIter&
AllFramesIter::operator++()
{
JS_ASSERT(!done());
if (curfp == curcs->getInitialFrame()) {
curcs = curcs->getPreviousInMemory();
curfp = curcs ? curcs->getCurrentFrame() : NULL;
} else {
curfp = curfp->down;
}
return *this;
}
bool
JSThreadData::init()
{
@ -448,6 +441,9 @@ JSThreadData::init()
return false;
#ifdef JS_TRACER
InitJIT(&traceMonitor);
#endif
#ifdef JS_METHODJIT
jmData.Initialize();
#endif
dtoaState = js_NewDtoaState();
if (!dtoaState) {
@ -466,7 +462,6 @@ JSThreadData::finish()
JS_ASSERT(gcFreeLists.isEmpty());
for (size_t i = 0; i != JS_ARRAY_LENGTH(scriptsToGC); ++i)
JS_ASSERT(!scriptsToGC[i]);
JS_ASSERT(!conservativeGC.isEnabled());
#endif
if (dtoaState)
@ -476,6 +471,9 @@ JSThreadData::finish()
propertyCache.~PropertyCache();
#if defined JS_TRACER
FinishJIT(&traceMonitor);
#endif
#if defined JS_METHODJIT
jmData.Finish();
#endif
stackSpace.finish();
}
@ -513,6 +511,7 @@ JSThreadData::purge(JSContext *cx)
/* Purge cached native iterators. */
memset(cachedNativeIterators, 0, sizeof(cachedNativeIterators));
lastNativeIterator = NULL;
dtoaCache.s = NULL;
}
@ -541,6 +540,13 @@ DestroyThread(JSThread *thread)
/* The thread must have zero contexts. */
JS_ASSERT(JS_CLIST_IS_EMPTY(&thread->contextList));
JS_ASSERT(!thread->titleToShare);
/*
* The conservative GC scanner should be disabled when the thread leaves
* the last request.
*/
JS_ASSERT(!thread->data.conservativeGC.hasStackToScan());
thread->data.finish();
js_free(thread);
}
@ -670,7 +676,6 @@ js_PurgeThreads(JSContext *cx)
e.removeFront();
} else {
thread->data.purge(cx);
thread->gcThreadMallocBytes = JS_GC_THREAD_MALLOC_LIMIT;
}
}
#else
@ -826,13 +831,13 @@ js_NewContext(JSRuntime *rt, size_t stackChunkSize)
if (ok) {
/*
* Ensure that the empty scopes initialized by
* JSScope::initRuntimeState get the desired special shapes.
* Shape::initRuntimeState get the desired special shapes.
* (The rt->state dance above guarantees that this abuse of
* rt->shapeGen is thread-safe.)
*/
uint32 shapeGen = rt->shapeGen;
rt->shapeGen = 0;
ok = JSScope::initRuntimeState(cx);
ok = Shape::initRuntimeState(cx);
if (rt->shapeGen < shapeGen)
rt->shapeGen = shapeGen;
}
@ -1000,7 +1005,13 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
if (!cx->thread)
JS_SetContextThread(cx);
JS_ASSERT_IF(rt->gcRunning, cx->outstandingRequests == 0);
/*
* For API compatibility we support destroying contexts with non-zero
* cx->outstandingRequests but we assume that all JS_BeginRequest calls
* on this cx contributes to cx->thread->requestDepth and there is no
* JS_SuspendRequest calls that set aside the counter.
*/
JS_ASSERT(cx->outstandingRequests <= cx->thread->requestDepth);
#endif
if (mode != JSDCM_NEW_FAILED) {
@ -1025,7 +1036,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
* Typically we are called outside a request, so ensure that the GC is not
* running before removing the context from rt->contextList, see bug 477021.
*/
if (cx->requestDepth == 0)
if (cx->thread->requestDepth == 0)
js_WaitForGC(rt);
#endif
JS_REMOVE_LINK(&cx->link);
@ -1034,7 +1045,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
rt->state = JSRTS_LANDING;
if (last || mode == JSDCM_FORCE_GC || mode == JSDCM_MAYBE_GC
#ifdef JS_THREADSAFE
|| cx->requestDepth != 0
|| cx->outstandingRequests != 0
#endif
) {
JS_ASSERT(!rt->gcRunning);
@ -1044,20 +1055,20 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
if (last) {
#ifdef JS_THREADSAFE
/*
* If cx is not in a request already, begin one now so that we wait
* for any racing GC started on a not-last context to finish, before
* we plow ahead and unpin atoms. Note that even though we begin a
* request here if necessary, we end all requests on cx below before
* forcing a final GC. This lets any not-last context destruction
* racing in another thread try to force or maybe run the GC, but by
* that point, rt->state will not be JSRTS_UP, and that GC attempt
* will return early.
* If this thread is not in a request already, begin one now so
* that we wait for any racing GC started on a not-last context to
* finish, before we plow ahead and unpin atoms. Note that even
* though we begin a request here if necessary, we end all
* thread's requests before forcing a final GC. This lets any
* not-last context destruction racing in another thread try to
* force or maybe run the GC, but by that point, rt->state will
* not be JSRTS_UP, and that GC attempt will return early.
*/
if (cx->requestDepth == 0)
if (cx->thread->requestDepth == 0)
JS_BeginRequest(cx);
#endif
JSScope::finishRuntimeState(cx);
Shape::finishRuntimeState(cx);
js_FinishRuntimeNumberState(cx);
/* Unpin all common atoms before final GC. */
@ -1079,7 +1090,7 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
* request to end. We'll let it run below, just before we do the truly
* final GC and then free atom state.
*/
while (cx->requestDepth != 0)
while (cx->outstandingRequests != 0)
JS_EndRequest(cx);
#endif
@ -1102,7 +1113,11 @@ js_DestroyContext(JSContext *cx, JSDestroyContextMode mode)
}
}
#ifdef JS_THREADSAFE
#ifdef DEBUG
JSThread *t = cx->thread;
#endif
js_ClearContextThread(cx);
JS_ASSERT_IF(JS_CLIST_IS_EMPTY(&t->contextList), !t->requestDepth);
#endif
#ifdef JS_METER_DST_OFFSET_CACHING
cx->dstOffsetCache.dumpStats();
@ -1178,7 +1193,7 @@ js_NextActiveContext(JSRuntime *rt, JSContext *cx)
JSContext *iter = cx;
#ifdef JS_THREADSAFE
while ((cx = js_ContextIterator(rt, JS_FALSE, &iter)) != NULL) {
if (cx->requestDepth)
if (cx->outstandingRequests && cx->thread->requestDepth)
break;
}
return cx;
@ -1825,14 +1840,15 @@ JSBool
js_InvokeOperationCallback(JSContext *cx)
{
JS_ASSERT_REQUEST_DEPTH(cx);
JS_ASSERT(JS_THREAD_DATA(cx)->operationCallbackFlag);
JS_ASSERT(JS_THREAD_DATA(cx)->interruptFlags & JSThreadData::INTERRUPT_OPERATION_CALLBACK);
/*
* Reset the callback flag first, then yield. If another thread is racing
* us here we will accumulate another callback request which will be
* serviced at the next opportunity.
*/
JS_THREAD_DATA(cx)->operationCallbackFlag = 0;
JS_ATOMIC_CLEAR_MASK(&JS_THREAD_DATA(cx)->interruptFlags,
JSThreadData::INTERRUPT_OPERATION_CALLBACK);
/*
* Unless we are going to run the GC, we automatically yield the current
@ -1875,27 +1891,35 @@ js_InvokeOperationCallback(JSContext *cx)
return !cb || cb(cx);
}
void
js_TriggerAllOperationCallbacks(JSRuntime *rt, JSBool gcLocked)
JSBool
js_HandleExecutionInterrupt(JSContext *cx)
{
JSBool result = JS_TRUE;
if (JS_THREAD_DATA(cx)->interruptFlags & JSThreadData::INTERRUPT_OPERATION_CALLBACK)
result = js_InvokeOperationCallback(cx) && result;
return result;
}
namespace js {
void
TriggerAllOperationCallbacks(JSRuntime *rt)
{
#ifdef JS_THREADSAFE
Conditionally<AutoLockGC> lockIf(!gcLocked, rt);
#endif
for (ThreadDataIter i(rt); !i.empty(); i.popFront())
i.threadData()->triggerOperationCallback();
}
} /* namespace js */
JSStackFrame *
js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
{
if (!fp)
fp = js_GetTopStackFrame(cx);
while (fp) {
if (fp->hasScript())
return fp;
while (fp && fp->isDummyFrame())
fp = fp->down;
}
return NULL;
JS_ASSERT_IF(fp, fp->hasScript());
return fp;
}
jsbytecode*
@ -2097,61 +2121,40 @@ JSContext::containingSegment(const JSStackFrame *target)
return NULL;
}
void
JSContext::checkMallocGCPressure(void *p)
JS_FRIEND_API(void)
JSRuntime::onTooMuchMalloc()
{
if (!p) {
js_ReportOutOfMemory(this);
return;
}
#ifdef JS_THREADSAFE
JS_ASSERT(thread);
JS_ASSERT(thread->gcThreadMallocBytes <= 0);
ptrdiff_t n = JS_GC_THREAD_MALLOC_LIMIT - thread->gcThreadMallocBytes;
thread->gcThreadMallocBytes = JS_GC_THREAD_MALLOC_LIMIT;
AutoLockGC lock(runtime);
runtime->gcMallocBytes -= n;
AutoLockGC lock(this);
/*
* Trigger the GC on memory pressure but only if we are inside a request
* and not inside a GC.
* We can be called outside a request and can race against a GC that
* mutates the JSThread set during the sweeping phase.
*/
if (runtime->isGCMallocLimitReached() && requestDepth != 0)
js_WaitForGC(this);
#endif
{
if (!runtime->gcRunning) {
JS_ASSERT(runtime->isGCMallocLimitReached());
runtime->gcMallocBytes = -1;
/*
* Empty the GC free lists to trigger a last-ditch GC when any GC
* thing is allocated later on this thread. This makes unnecessary
* to check for the memory pressure on the fast path of the GC
* allocator. We cannot touch the free lists on other threads as
* their manipulation is not thread-safe.
*/
JS_THREAD_DATA(this)->gcFreeLists.purge();
js_TriggerGC(this, true);
}
}
TriggerGC(this);
}
bool
JSContext::isConstructing()
JS_FRIEND_API(void *)
JSRuntime::onOutOfMemory(void *p, size_t nbytes, JSContext *cx)
{
#ifdef JS_TRACER
if (JS_ON_TRACE(this)) {
JS_ASSERT(bailExit);
return *bailExit->pc == JSOP_NEW;
}
#ifdef JS_THREADSAFE
gcHelperThread.waitBackgroundSweepEnd(this);
if (!p)
p = ::js_malloc(nbytes);
else if (p == reinterpret_cast<void *>(1))
p = ::js_calloc(nbytes);
else
p = ::js_realloc(p, nbytes);
if (p)
return p;
#endif
JSStackFrame *fp = js_GetTopStackFrame(this);
return fp && (fp->flags & JSFRAME_CONSTRUCTING);
if (cx)
js_ReportOutOfMemory(cx);
return NULL;
}
/*
* Release pool's arenas if the stackPool has existed for longer than the
* limit specified by gcEmptyArenaPoolLifespan.
@ -2171,9 +2174,10 @@ void
JSContext::purge()
{
FreeOldArenas(runtime, &regExpPool);
/* FIXME: bug 586161 */
compartment->purge(this);
}
namespace js {
void

Просмотреть файл

@ -59,6 +59,7 @@
#include "jsatom.h"
#include "jsdhash.h"
#include "jsdtoa.h"
#include "jsfun.h"
#include "jsgc.h"
#include "jsgcchunk.h"
#include "jshashtable.h"
@ -69,7 +70,6 @@
#include "jsregexp.h"
#include "jsutil.h"
#include "jsarray.h"
#include "jstask.h"
#include "jsvector.h"
#include "prmjtime.h"
@ -127,6 +127,10 @@ namespace JSC {
namespace js {
#ifdef JS_METHODJIT
struct VMFrame;
#endif
/* Tracer constants. */
static const size_t MONITOR_N_GLOBAL_STATES = 4;
static const size_t FRAGMENT_TABLE_SIZE = 512;
@ -156,6 +160,10 @@ struct FragPI;
typedef nanojit::HashMap<uint32, FragPI, nanojit::DefaultHash<uint32> > FragStatsMap;
#endif
namespace mjit {
class CallStackIterator;
}
/*
* Allocation policy that calls JSContext memory functions and reports errors
* to the context. Since the JSContext given on construction is stored for
@ -178,7 +186,7 @@ class ContextAllocPolicy
};
/* Holds the execution state during trace execution. */
struct TracerState
struct TracerState
{
JSContext* cx; // current VM context handle
double* stackBase; // native stack base
@ -221,6 +229,33 @@ struct TracerState
~TracerState();
};
#ifdef JS_METHODJIT
namespace mjit {
struct Trampolines
{
void (* forceReturn)();
JSC::ExecutablePool *forceReturnPool;
#if (defined(JS_NO_FASTCALL) && defined(JS_CPU_X86)) || defined(_WIN64)
void (* forceReturnFast)();
JSC::ExecutablePool *forceReturnFastPool;
#endif
};
struct ThreadData
{
JSC::ExecutableAllocator *execPool;
// Trampolines for JIT code.
Trampolines trampolines;
VMFrame *activeFrame;
bool Initialize();
void Finish();
};
}
#endif /* JS_METHODJIT */
/*
* Storage for the execution state and store during trace execution. Generated
* code depends on the fact that the globals begin |MAX_NATIVE_STACK_SLOTS|
@ -659,9 +694,8 @@ class StackSpace
inline Value *firstUnused() const;
inline bool isCurrentAndActive(JSContext *cx) const;
#ifdef DEBUG
friend class AllFramesIter;
StackSegment *getCurrentSegment() const { return currentSegment; }
#endif
/*
* Allocate nvals on the top of the stack, report error on failure.
@ -763,6 +797,10 @@ class StackSpace
inline JSStackFrame *getInlineFrame(JSContext *cx, Value *sp,
uintN nmissing, uintN nfixed) const;
JS_REQUIRES_STACK
inline JSStackFrame *getInlineFrameUnchecked(JSContext *cx, Value *sp,
uintN nmissing) const;
JS_REQUIRES_STACK
inline void pushInlineFrame(JSContext *cx, JSStackFrame *fp, jsbytecode *pc,
JSStackFrame *newfp);
@ -770,24 +808,23 @@ class StackSpace
JS_REQUIRES_STACK
inline void popInlineFrame(JSContext *cx, JSStackFrame *up, JSStackFrame *down);
/*
* For the special case of the slow native stack frame pushed and popped by
* tracing deep bail logic.
*/
JS_REQUIRES_STACK
void getSynthesizedSlowNativeFrame(JSContext *cx, StackSegment *&seg, JSStackFrame *&fp);
JS_REQUIRES_STACK
void pushSynthesizedSlowNativeFrame(JSContext *cx, StackSegment *seg, JSFrameRegs &regs);
JS_REQUIRES_STACK
void popSynthesizedSlowNativeFrame(JSContext *cx);
/*
* For pushing a bookkeeping frame.
*/
JS_REQUIRES_STACK
bool pushDummyFrame(JSContext *cx, FrameGuard &fg, JSFrameRegs &regs, JSObject *scopeChain);
/*
* Ensure space based on an over-recursion limit.
*/
inline bool ensureSpace(JSContext *maybecx, Value *start, Value *from,
Value *& limit, uint32 nslots) const;
/*
* Create a stack limit for quickly detecting over-recursion and whether
* a commit bump is needed.
*/
inline Value *makeStackLimit(Value *start) const;
};
JS_STATIC_ASSERT(StackSpace::CAPACITY_VALS % StackSpace::COMMIT_VALS == 0);
@ -824,6 +861,24 @@ class FrameRegsIter
jsbytecode *pc() const { return curpc; }
};
/*
* Utility class for iteration over all active stack frames.
*/
class AllFramesIter
{
public:
AllFramesIter(JSContext *cx);
bool done() const { return curfp == NULL; }
AllFramesIter& operator++();
JSStackFrame *fp() const { return curfp; }
private:
StackSegment *curcs;
JSStackFrame *curfp;
};
/* Holds the number of recording attemps for an address. */
typedef HashMap<jsbytecode*,
size_t,
@ -1010,7 +1065,7 @@ struct JSThreadData {
* If this flag is set, we were asked to call back the operation callback
* as soon as possible.
*/
volatile int32 operationCallbackFlag;
volatile jsword interruptFlags;
JSGCFreeLists gcFreeLists;
@ -1036,6 +1091,13 @@ struct JSThreadData {
#ifdef JS_TRACER
/* Trace-tree JIT recorder/interpreter state. */
js::TraceMonitor traceMonitor;
/* Counts the number of iterations run by a trace. */
unsigned iterationCounter;
#endif
#ifdef JS_METHODJIT
js::mjit::ThreadData jmData;
#endif
/* Lock-free hashed lists of scripts created by eval to garbage-collect. */
@ -1048,7 +1110,7 @@ struct JSThreadData {
/* State used by dtoa.c. */
DtoaState *dtoaState;
/*
/*
* State used to cache some double-to-string conversions. A stupid
* optimization aimed directly at v8-splay.js, which stupidly converts
* many doubles multiple times in a row.
@ -1062,6 +1124,9 @@ struct JSThreadData {
/* Cached native iterators. */
JSObject *cachedNativeIterators[NATIVE_ITER_CACHE_SIZE];
/* Native iterator most recently started. */
JSObject *lastNativeIterator;
/* Base address of the native stack for the current thread. */
jsuword *nativeStackBase;
@ -1075,6 +1140,8 @@ struct JSThreadData {
void mark(JSTracer *trc);
void purge(JSContext *cx);
static const jsword INTERRUPT_OPERATION_CALLBACK = 0x1;
void triggerOperationCallback() {
/*
* Use JS_ATOMIC_SET in the hope that it will make sure the write will
@ -1082,7 +1149,7 @@ struct JSThreadData {
* Note that we only care about visibility here, not read/write
* ordering.
*/
JS_ATOMIC_SET(&operationCallbackFlag, 1);
JS_ATOMIC_SET_MASK(&interruptFlags, INTERRUPT_OPERATION_CALLBACK);
}
};
@ -1107,12 +1174,6 @@ struct JSThread {
/* Indicates that the thread is waiting in ClaimTitle from jslock.cpp. */
JSTitle *titleToShare;
/*
* Thread-local version of JSRuntime.gcMallocBytes to avoid taking
* locks on each JS_malloc.
*/
ptrdiff_t gcThreadMallocBytes;
/*
* This thread is inside js_GC, either waiting until it can start GC, or
* waiting for GC to finish on another thread. This thread holds no locks;
@ -1122,22 +1183,23 @@ struct JSThread {
*/
bool gcWaiting;
/*
* The context running the requests.
*/
JSContext *requestContext;
/* The request depth for this thread. */
unsigned requestDepth;
/* Number of JS_SuspendRequest calls withot JS_ResumeRequest. */
unsigned suspendCount;
# ifdef DEBUG
unsigned checkRequestDepth;
# endif
/* Weak ref, for low-cost sealed title locking */
JSTitle *lockedSealedTitle;
/* Factored out of JSThread for !JS_THREADSAFE embedding in JSRuntime. */
JSThreadData data;
};
/*
* Only when JSThread::gcThreadMallocBytes exhausts the following limit we
* update JSRuntime::gcMallocBytes.
* .
*/
const size_t JS_GC_THREAD_MALLOC_LIMIT = 1 << 19;
#define JS_THREAD_DATA(cx) (&(cx)->thread->data)
extern JSThread *
@ -1175,7 +1237,7 @@ typedef enum JSRuntimeState {
typedef struct JSPropertyTreeEntry {
JSDHashEntryHdr hdr;
JSScopeProperty *child;
js::Shape *child;
} JSPropertyTreeEntry;
@ -1184,7 +1246,7 @@ namespace js {
struct GCPtrHasher
{
typedef void *Lookup;
static HashNumber hash(void *key) {
return HashNumber(uintptr_t(key) >> JS_GCTHING_ZEROBITS);
}
@ -1210,7 +1272,7 @@ typedef js::HashMap<void *,
/* If HashNumber grows, need to change WrapperHasher. */
JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
struct WrapperHasher
{
typedef Value Lookup;
@ -1238,6 +1300,10 @@ struct JSCompartment {
void *data;
bool marked;
js::WrapperMap crossCompartmentWrappers;
bool debugMode;
/* List all scripts in this compartment. */
JSCList scripts;
JSCompartment(JSRuntime *cx);
~JSCompartment();
@ -1254,6 +1320,12 @@ struct JSCompartment {
bool wrapException(JSContext *cx);
void sweep(JSContext *cx);
#ifdef JS_METHODJIT
bool addScript(JSContext *cx, JSScript *script);
void removeScript(JSScript *script);
#endif
void purge(JSContext *cx);
};
typedef void
@ -1319,6 +1391,7 @@ struct JSRuntime {
size_t gcLastBytes;
size_t gcMaxBytes;
size_t gcMaxMallocBytes;
size_t gcNewArenaTriggerBytes;
uint32 gcEmptyArenaPoolLifespan;
uint32 gcNumber;
js::GCMarker *gcMarkingTracer;
@ -1333,20 +1406,10 @@ struct JSRuntime {
* are not guaranteed, so stores issued by one thread may be lost due to
* unsynchronized read-modify-write cycles on other threads.
*/
JSPackedBool gcPoke;
JSPackedBool gcRunning;
JSPackedBool gcRegenShapes;
/*
* During gc, if rt->gcRegenShapes &&
* (scope->flags & JSScope::SHAPE_REGEN) == rt->gcRegenShapesScopeFlag,
* then the scope's shape has already been regenerated during this GC.
* To avoid having to sweep JSScopes, the bit's meaning toggles with each
* shape-regenerating GC.
*
* FIXME Once scopes are GC'd (bug 505004), this will be obsolete.
*/
uint8 gcRegenShapesScopeFlag;
bool gcPoke;
bool gcMarkAndSweep;
bool gcRunning;
bool gcRegenShapes;
#ifdef JS_GC_ZEAL
jsrefcount gcZeal;
@ -1354,18 +1417,16 @@ struct JSRuntime {
JSGCCallback gcCallback;
private:
/*
* Malloc counter to measure memory pressure for GC scheduling. It runs
* from gcMaxMallocBytes down to zero.
*/
ptrdiff_t gcMallocBytes;
#ifdef JS_THREADSAFE
JSBackgroundThread gcHelperThread;
#endif
volatile ptrdiff_t gcMallocBytes;
public:
js::GCChunkAllocator *gcChunkAllocator;
void setCustomGCChunkAllocator(js::GCChunkAllocator *allocator) {
JS_ASSERT(allocator);
JS_ASSERT(state == JSRTS_DOWN);
@ -1417,6 +1478,8 @@ struct JSRuntime {
uint32 requestCount;
JSThread *gcThread;
js::GCHelperThread gcHelperThread;
/* Lock and owning thread pointer for JS_LOCK_RUNTIME. */
PRLock *rtLock;
#ifdef DEBUG
@ -1476,9 +1539,9 @@ struct JSRuntime {
#define JS_PROPERTY_TREE(cx) ((cx)->runtime->propertyTree)
/*
* The propertyRemovals counter is incremented for every JSScope::clear,
* and for each JSScope::remove method call that frees a slot in an object.
* See js_NativeGet and js_NativeSet in jsobj.cpp.
* The propertyRemovals counter is incremented for every JSObject::clear,
* and for each JSObject::remove method call that frees a slot in the given
* object. See js_NativeGet and js_NativeSet in jsobj.cpp.
*/
int32 propertyRemovals;
@ -1530,14 +1593,14 @@ struct JSRuntime {
/*
* Runtime-shared empty scopes for well-known built-in objects that lack
* class prototypes (the usual locus of an emptyScope). Mnemonic: ABCDEW
* class prototypes (the usual locus of an emptyShape). Mnemonic: ABCDEW
*/
JSEmptyScope *emptyArgumentsScope;
JSEmptyScope *emptyBlockScope;
JSEmptyScope *emptyCallScope;
JSEmptyScope *emptyDeclEnvScope;
JSEmptyScope *emptyEnumeratorScope;
JSEmptyScope *emptyWithScope;
js::EmptyShape *emptyArgumentsShape;
js::EmptyShape *emptyBlockShape;
js::EmptyShape *emptyCallShape;
js::EmptyShape *emptyDeclEnvShape;
js::EmptyShape *emptyEnumeratorShape;
js::EmptyShape *emptyWithShape;
/*
* Various metering fields are defined at the end of JSRuntime. In this
@ -1569,16 +1632,34 @@ struct JSRuntime {
jsrefcount claimedTitles;
jsrefcount deadContexts;
jsrefcount deadlocksAvoided;
jsrefcount liveScopes;
jsrefcount liveShapes;
jsrefcount sharedTitles;
jsrefcount totalScopes;
jsrefcount liveScopeProps;
jsrefcount liveScopePropsPreSweep;
jsrefcount totalScopeProps;
jsrefcount totalShapes;
jsrefcount liveObjectProps;
jsrefcount liveObjectPropsPreSweep;
jsrefcount totalObjectProps;
jsrefcount livePropTreeNodes;
jsrefcount duplicatePropTreeNodes;
jsrefcount totalPropTreeNodes;
jsrefcount propTreeKidsChunks;
jsrefcount liveDictModeNodes;
/*
* NB: emptyShapes is init'ed iff at least one of these envars is set:
*
* JS_PROPTREE_STATFILE statistics on the property tree forest
* JS_PROPTREE_DUMPFILE all paths in the property tree forest
*/
const char *propTreeStatFilename;
const char *propTreeDumpFilename;
bool meterEmptyShapes() const { return propTreeStatFilename || propTreeDumpFilename; }
typedef js::HashSet<js::EmptyShape *,
js::DefaultHasher<js::EmptyShape *>,
js::SystemAllocPolicy> EmptyShapeSet;
EmptyShapeSet emptyShapes;
/* String instrumentation. */
jsrefcount liveStrings;
@ -1649,11 +1730,36 @@ struct JSRuntime {
void setGCTriggerFactor(uint32 factor);
void setGCLastBytes(size_t lastBytes);
void* malloc(size_t bytes) { return ::js_malloc(bytes); }
/*
* Call the system malloc while checking for GC memory pressure and
* reporting OOM error when cx is not null.
*/
void* malloc(size_t bytes, JSContext *cx = NULL) {
updateMallocCounter(bytes);
void *p = ::js_malloc(bytes);
return JS_LIKELY(!!p) ? p : onOutOfMemory(NULL, bytes, cx);
}
void* calloc(size_t bytes) { return ::js_calloc(bytes); }
/*
* Call the system calloc while checking for GC memory pressure and
* reporting OOM error when cx is not null.
*/
void* calloc(size_t bytes, JSContext *cx = NULL) {
updateMallocCounter(bytes);
void *p = ::js_calloc(bytes);
return JS_LIKELY(!!p) ? p : onOutOfMemory(reinterpret_cast<void *>(1), bytes, cx);
}
void* realloc(void* p, size_t bytes) { return ::js_realloc(p, bytes); }
void* realloc(void* p, size_t bytes, JSContext *cx = NULL) {
/*
* For compatibility we do not account for realloc that increases
* previously allocated memory.
*/
if (!p)
updateMallocCounter(bytes);
void *p2 = ::js_realloc(p, bytes);
return JS_LIKELY(!!p2) ? p2 : onOutOfMemory(p, bytes, cx);
}
void free(void* p) { ::js_free(p); }
@ -1669,12 +1775,45 @@ struct JSRuntime {
gcMaxMallocBytes = (ptrdiff_t(value) >= 0) ? value : size_t(-1) >> 1;
resetGCMallocBytes();
}
/*
* Call this after allocating memory held by GC things, to update memory
* pressure counters or report the OOM error if necessary. If oomError and
* cx is not null the function also reports OOM error.
*
* The function must be called outside the GC lock and in case of OOM error
* the caller must ensure that no deadlock possible during OOM reporting.
*/
void updateMallocCounter(size_t nbytes) {
/* We tolerate any thread races when updating gcMallocBytes. */
ptrdiff_t newCount = gcMallocBytes - ptrdiff_t(nbytes);
gcMallocBytes = newCount;
if (JS_UNLIKELY(newCount <= 0))
onTooMuchMalloc();
}
private:
/*
* The function must be called outside the GC lock.
*/
JS_FRIEND_API(void) onTooMuchMalloc();
/*
* This should be called after system malloc/realloc returns NULL to try
* to recove some memory or to report an error. Failures in malloc and
* calloc are signaled by p == null and p == reinterpret_cast<void *>(1).
* Other values of p mean a realloc failure.
*
* The function must be called outside the GC lock.
*/
JS_FRIEND_API(void *) onOutOfMemory(void *p, size_t nbytes, JSContext *cx);
};
/* Common macros to access thread-local caches in JSThread or JSRuntime. */
#define JS_GSN_CACHE(cx) (JS_THREAD_DATA(cx)->gsnCache)
#define JS_PROPERTY_CACHE(cx) (JS_THREAD_DATA(cx)->propertyCache)
#define JS_TRACE_MONITOR(cx) (JS_THREAD_DATA(cx)->traceMonitor)
#define JS_METHODJIT_DATA(cx) (JS_THREAD_DATA(cx)->jmData)
#define JS_SCRIPTS_TO_GC(cx) (JS_THREAD_DATA(cx)->scriptsToGC)
#ifdef DEBUG
@ -1732,12 +1871,8 @@ typedef struct JSResolvingEntry {
extern const JSDebugHooks js_NullDebugHooks; /* defined in jsdbgapi.cpp */
namespace js {
class AutoGCRooter;
}
namespace js {
class RegExp;
class RegExpStatics
{
@ -1844,7 +1979,7 @@ class RegExpStatics
void getRightContext(JSSubString *out) const;
};
}
} /* namespace js */
struct JSContext
{
@ -1877,7 +2012,7 @@ struct JSContext
JSPackedBool generatingError;
/* Exception state -- the exception member is a GC root by definition. */
JSPackedBool throwing; /* is there a pending exception? */
JSBool throwing; /* is there a pending exception? */
js::Value exception; /* most-recently-thrown exception */
/* Limit pointer for checking native stack consumption during recursion. */
@ -1913,16 +2048,15 @@ struct JSContext
return !!regs;
}
private:
public:
friend class js::StackSpace;
friend bool js::Interpret(JSContext *);
friend bool js::Interpret(JSContext *, JSStackFrame *, uintN, uintN);
/* 'regs' must only be changed by calling this function. */
void setCurrentRegs(JSFrameRegs *regs) {
this->regs = regs;
}
public:
/* Temporary arena pool used while compiling and decompiling. */
JSArenaPool tempPool;
@ -2028,20 +2162,12 @@ struct JSContext
}
return fp;
}
#ifdef JS_THREADSAFE
JSThread *thread;
jsrefcount requestDepth;
/* Same as requestDepth but ignoring JS_SuspendRequest/JS_ResumeRequest */
jsrefcount outstandingRequests;
JSContext *prevRequestContext;
jsrefcount prevRequestDepth;
# ifdef DEBUG
unsigned checkRequestDepth;
# endif
JSTitle *lockedSealedTitle; /* weak ref, for low-cost sealed
title locking */
unsigned outstandingRequests;/* number of JS_BeginRequest calls
without the corresponding
JS_EndRequest. */
JSCList threadLinks; /* JSThread contextList linkage */
#define CX_FROM_THREAD_LINKS(tl) \
@ -2143,87 +2269,34 @@ struct JSContext
#ifdef JS_THREADSAFE
/*
* The sweep task for this context.
* When non-null JSContext::free delegates the job to the background
* thread.
*/
js::BackgroundSweepTask *gcSweepTask;
js::GCHelperThread *gcBackgroundFree;
#endif
ptrdiff_t &getMallocCounter() {
#ifdef JS_THREADSAFE
return thread->gcThreadMallocBytes;
#else
return runtime->gcMallocBytes;
#endif
}
/*
* Call this after allocating memory held by GC things, to update memory
* pressure counters or report the OOM error if necessary.
*/
inline void updateMallocCounter(void *p, size_t nbytes) {
JS_ASSERT(ptrdiff_t(nbytes) >= 0);
ptrdiff_t &counter = getMallocCounter();
counter -= ptrdiff_t(nbytes);
if (!p || counter <= 0)
checkMallocGCPressure(p);
}
/*
* Call this after successfully allocating memory held by GC things, to
* update memory pressure counters.
*/
inline void updateMallocCounter(size_t nbytes) {
JS_ASSERT(ptrdiff_t(nbytes) >= 0);
ptrdiff_t &counter = getMallocCounter();
counter -= ptrdiff_t(nbytes);
if (counter <= 0) {
/*
* Use 1 as an arbitrary non-null pointer indicating successful
* allocation.
*/
checkMallocGCPressure(reinterpret_cast<void *>(jsuword(1)));
}
}
inline void* malloc(size_t bytes) {
JS_ASSERT(bytes != 0);
void *p = runtime->malloc(bytes);
updateMallocCounter(p, bytes);
return p;
return runtime->malloc(bytes, this);
}
inline void* mallocNoReport(size_t bytes) {
JS_ASSERT(bytes != 0);
void *p = runtime->malloc(bytes);
if (!p)
return NULL;
updateMallocCounter(bytes);
return p;
return runtime->malloc(bytes, NULL);
}
inline void* calloc(size_t bytes) {
JS_ASSERT(bytes != 0);
void *p = runtime->calloc(bytes);
updateMallocCounter(p, bytes);
return p;
return runtime->calloc(bytes, this);
}
inline void* realloc(void* p, size_t bytes) {
void *orig = p;
p = runtime->realloc(p, bytes);
/*
* For compatibility we do not account for realloc that increases
* previously allocated memory.
*/
updateMallocCounter(p, orig ? 0 : bytes);
return p;
return runtime->realloc(p, bytes, this);
}
inline void free(void* p) {
#ifdef JS_THREADSAFE
if (gcSweepTask) {
gcSweepTask->freeLater(p);
if (gcBackgroundFree) {
gcBackgroundFree->freeLater(p);
return;
}
#endif
@ -2268,8 +2341,6 @@ struct JSContext
this->free(p);
}
bool isConstructing();
void purge();
js::StackSpace &stack() const {
@ -2334,22 +2405,24 @@ namespace js {
class AutoCheckRequestDepth {
JSContext *cx;
public:
AutoCheckRequestDepth(JSContext *cx) : cx(cx) { cx->checkRequestDepth++; }
AutoCheckRequestDepth(JSContext *cx) : cx(cx) { cx->thread->checkRequestDepth++; }
~AutoCheckRequestDepth() {
JS_ASSERT(cx->checkRequestDepth != 0);
cx->checkRequestDepth--;
JS_ASSERT(cx->thread->checkRequestDepth != 0);
cx->thread->checkRequestDepth--;
}
};
}
# define CHECK_REQUEST(cx) \
JS_ASSERT((cx)->requestDepth || (cx)->thread == (cx)->runtime->gcThread);\
# define CHECK_REQUEST(cx) \
JS_ASSERT((cx)->thread); \
JS_ASSERT((cx)->thread->requestDepth || (cx)->thread == (cx)->runtime->gcThread); \
AutoCheckRequestDepth _autoCheckRequestDepth(cx);
#else
# define CHECK_REQUEST(cx) ((void)0)
# define CHECK_REQUEST(cx) ((void) 0)
# define CHECK_REQUEST_THREAD(cx) ((void) 0)
#endif
static inline uintN
@ -2375,17 +2448,13 @@ class AutoGCRooter {
: down(cx->autoGCRooters), tag(tag), context(cx)
{
JS_ASSERT(this != cx->autoGCRooters);
#ifdef JS_THREADSAFE
JS_ASSERT(cx->requestDepth != 0);
#endif
CHECK_REQUEST(cx);
cx->autoGCRooters = this;
}
~AutoGCRooter() {
JS_ASSERT(this == context->autoGCRooters);
#ifdef JS_THREADSAFE
JS_ASSERT(context->requestDepth != 0);
#endif
CHECK_REQUEST(context);
context->autoGCRooters = down;
}
@ -2395,8 +2464,8 @@ class AutoGCRooter {
#ifdef __GNUC__
# pragma GCC visibility push(default)
#endif
friend void ::js_TraceContext(JSTracer *trc, JSContext *acx);
friend void ::js_TraceRuntime(JSTracer *trc);
friend void MarkContext(JSTracer *trc, JSContext *acx);
friend void MarkRuntime(JSTracer *trc);
#ifdef __GNUC__
# pragma GCC visibility pop
#endif
@ -2417,7 +2486,7 @@ class AutoGCRooter {
enum {
JSVAL = -1, /* js::AutoValueRooter */
SPROP = -2, /* js::AutoScopePropertyRooter */
SHAPE = -2, /* js::AutoShapeRooter */
PARSER = -3, /* js::Parser */
SCRIPT = -4, /* js::AutoScriptRooter */
ENUMERATOR = -5, /* js::AutoEnumStateRooter */
@ -2501,7 +2570,7 @@ class AutoValueRooter : private AutoGCRooter
}
friend void AutoGCRooter::trace(JSTracer *trc);
friend void ::js_TraceRuntime(JSTracer *trc);
friend void MarkRuntime(JSTracer *trc);
private:
Value val;
@ -2530,7 +2599,7 @@ class AutoObjectRooter : private AutoGCRooter {
}
friend void AutoGCRooter::trace(JSTracer *trc);
friend void ::js_TraceRuntime(JSTracer *trc);
friend void MarkRuntime(JSTracer *trc);
private:
JSObject *obj;
@ -2601,20 +2670,20 @@ class AutoArrayRooter : private AutoGCRooter {
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
class AutoScopePropertyRooter : private AutoGCRooter {
class AutoShapeRooter : private AutoGCRooter {
public:
AutoScopePropertyRooter(JSContext *cx, JSScopeProperty *sprop
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: AutoGCRooter(cx, SPROP), sprop(sprop)
AutoShapeRooter(JSContext *cx, const js::Shape *shape
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: AutoGCRooter(cx, SHAPE), shape(shape)
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
friend void AutoGCRooter::trace(JSTracer *trc);
friend void ::js_TraceRuntime(JSTracer *trc);
friend void MarkRuntime(JSTracer *trc);
private:
JSScopeProperty * const sprop;
const js::Shape * const shape;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
@ -2657,7 +2726,7 @@ class AutoIdRooter : private AutoGCRooter
}
friend void AutoGCRooter::trace(JSTracer *trc);
friend void ::js_TraceRuntime(JSTracer *trc);
friend void MarkRuntime(JSTracer *trc);
private:
jsid id_;
@ -2756,7 +2825,7 @@ class AutoXMLRooter : private AutoGCRooter {
}
friend void AutoGCRooter::trace(JSTracer *trc);
friend void ::js_TraceRuntime(JSTracer *trc);
friend void MarkRuntime(JSTracer *trc);
private:
JSXML * const xml;
@ -2810,6 +2879,37 @@ class AutoReleasePtr {
~AutoReleasePtr() { cx->free(ptr); }
};
class AutoLocalNameArray {
public:
explicit AutoLocalNameArray(JSContext *cx, JSFunction *fun
JS_GUARD_OBJECT_NOTIFIER_PARAM)
: context(cx),
mark(JS_ARENA_MARK(&cx->tempPool)),
names(fun->getLocalNameArray(cx, &cx->tempPool)),
count(fun->countLocalNames())
{
JS_GUARD_OBJECT_NOTIFIER_INIT;
}
~AutoLocalNameArray() {
JS_ARENA_RELEASE(&context->tempPool, mark);
}
operator bool() const { return !!names; }
uint32 length() const { return count; }
const jsuword &operator [](unsigned i) const { return names[i]; }
private:
JSContext *context;
void *mark;
jsuword *names;
uint32 count;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
};
} /* namespace js */
class JSAutoResolveFlags
@ -3112,7 +3212,8 @@ extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit];
#endif
#ifdef JS_THREADSAFE
# define JS_ASSERT_REQUEST_DEPTH(cx) JS_ASSERT((cx)->requestDepth >= 1)
# define JS_ASSERT_REQUEST_DEPTH(cx) (JS_ASSERT((cx)->thread), \
JS_ASSERT((cx)->thread->requestDepth >= 1))
#else
# define JS_ASSERT_REQUEST_DEPTH(cx) ((void) 0)
#endif
@ -3124,7 +3225,7 @@ extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit];
*/
#define JS_CHECK_OPERATION_LIMIT(cx) \
(JS_ASSERT_REQUEST_DEPTH(cx), \
(!JS_THREAD_DATA(cx)->operationCallbackFlag || js_InvokeOperationCallback(cx)))
(!(JS_THREAD_DATA(cx)->interruptFlags & JSThreadData::INTERRUPT_OPERATION_CALLBACK) || js_InvokeOperationCallback(cx)))
/*
* Invoke the operation callback and return false if the current execution
@ -3133,13 +3234,16 @@ extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit];
extern JSBool
js_InvokeOperationCallback(JSContext *cx);
#ifndef JS_THREADSAFE
# define js_TriggerAllOperationCallbacks(rt, gcLocked) \
js_TriggerAllOperationCallbacks (rt)
#endif
extern JSBool
js_HandleExecutionInterrupt(JSContext *cx);
namespace js {
/* Must be called with GC lock taken. */
void
js_TriggerAllOperationCallbacks(JSRuntime *rt, JSBool gcLocked);
TriggerAllOperationCallbacks(JSRuntime *rt);
} /* namespace js */
extern JSStackFrame *
js_GetScriptedCaller(JSContext *cx, JSStackFrame *fp);
@ -3318,7 +3422,7 @@ class AutoValueVector : private AutoGCRooter
const Value &back() const { return vector.back(); }
friend void AutoGCRooter::trace(JSTracer *trc);
private:
Vector<Value, 8> vector;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
@ -3378,7 +3482,7 @@ class AutoIdVector : private AutoGCRooter
const jsid &back() const { return vector.back(); }
friend void AutoGCRooter::trace(JSTracer *trc);
private:
Vector<jsid, 8> vector;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER

Просмотреть файл

@ -1,4 +1,5 @@
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=78:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
@ -107,6 +108,55 @@ StackSpace::isCurrentAndActive(JSContext *cx) const
currentSegment == cx->getCurrentSegment();
}
/*
* SunSpider and v8bench have roughly an average of 9 slots per script.
* Our heuristic for a quick over-recursion check uses a generous slot
* count based on this estimate. We take this frame size and multiply it
* by the old recursion limit from the interpreter.
*
* Worst case, if an average size script (<=9 slots) over recurses, it'll
* effectively be the same as having increased the old inline call count
* to <= 5,000.
*/
static const uint32 MAX_STACK_USAGE = (VALUES_PER_STACK_FRAME + 18) * JS_MAX_INLINE_CALL_COUNT;
JS_ALWAYS_INLINE Value *
StackSpace::makeStackLimit(Value *start) const
{
Value *limit = JS_MIN(start + MAX_STACK_USAGE, end);
#ifdef XP_WIN
limit = JS_MIN(limit, commitEnd);
#endif
return limit;
}
JS_ALWAYS_INLINE bool
StackSpace::ensureSpace(JSContext *maybecx, Value *start, Value *from,
Value *& limit, uint32 nslots) const
{
JS_ASSERT(from == firstUnused());
#ifdef XP_WIN
/*
* If commitEnd < limit, we're guaranteed that we reached the end of the
* commit depth, because stackLimit is MIN(commitEnd, limit). If we did
* reach this soft limit, check if we can bump the commit end without
* over-recursing.
*/
ptrdiff_t nvals = VALUES_PER_STACK_FRAME + nslots;
if (commitEnd <= limit && from + nvals < (start + MAX_STACK_USAGE)) {
if (!ensureSpace(maybecx, from, nvals))
return false;
/* Compute a new limit. */
limit = makeStackLimit(start);
return true;
}
#endif
js_ReportOverRecursed(maybecx);
return false;
}
JS_ALWAYS_INLINE bool
StackSpace::ensureSpace(JSContext *maybecx, Value *from, ptrdiff_t nvals) const
{
@ -274,19 +324,26 @@ InvokeFrameGuard::~InvokeFrameGuard()
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
StackSpace::getInlineFrame(JSContext *cx, Value *sp,
uintN nmissing, uintN nfixed) const
StackSpace::getInlineFrameUnchecked(JSContext *cx, Value *sp,
uintN nmissing) const
{
JS_ASSERT(isCurrentAndActive(cx));
JS_ASSERT(cx->hasActiveSegment());
JS_ASSERT(cx->regs->sp == sp);
JSStackFrame *fp = reinterpret_cast<JSStackFrame *>(sp + nmissing);
return fp;
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE JSStackFrame *
StackSpace::getInlineFrame(JSContext *cx, Value *sp,
uintN nmissing, uintN nfixed) const
{
ptrdiff_t nvals = nmissing + VALUES_PER_STACK_FRAME + nfixed;
if (!ensureSpace(cx, sp, nvals))
return NULL;
JSStackFrame *fp = reinterpret_cast<JSStackFrame *>(sp + nmissing);
return fp;
return getInlineFrameUnchecked(cx, sp, nmissing);
}
JS_REQUIRES_STACK JS_ALWAYS_INLINE void
@ -547,28 +604,50 @@ assertSameCompartment(JSContext *cx, T1 t1, T2 t2, T3 t3, T4 t4, T5 t5)
#undef START_ASSERT_SAME_COMPARTMENT
inline JSBool
callJSNative(JSContext *cx, js::Native native, JSObject *thisobj, uintN argc, js::Value *argv, js::Value *rval)
{
assertSameCompartment(cx, thisobj, ValueArray(argv, argc));
JSBool ok = native(cx, thisobj, argc, argv, rval);
if (ok)
assertSameCompartment(cx, *rval);
return ok;
}
inline JSBool
callJSFastNative(JSContext *cx, js::FastNative native, uintN argc, js::Value *vp)
JS_ALWAYS_INLINE bool
CallJSNative(JSContext *cx, js::Native native, uintN argc, js::Value *vp)
{
#ifdef DEBUG
JSBool alreadyThrowing = cx->throwing;
#endif
assertSameCompartment(cx, ValueArray(vp, argc + 2));
JSBool ok = native(cx, argc, vp);
if (ok)
if (ok) {
assertSameCompartment(cx, vp[0]);
JS_ASSERT_IF(!alreadyThrowing, !cx->throwing);
}
return ok;
}
inline JSBool
callJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp)
JS_ALWAYS_INLINE bool
CallJSNativeConstructor(JSContext *cx, js::Native native, uintN argc, js::Value *vp)
{
#ifdef DEBUG
JSObject *callee = &vp[0].toObject();
#endif
JS_ASSERT(vp[1].isMagic());
if (!CallJSNative(cx, native, argc, vp))
return false;
/*
* Native constructors must return non-primitive values on success.
* Although it is legal, if a constructor returns the callee, there is a
* 99.9999% chance it is a bug. If any valid code actually wants the
* constructor to return the callee, this can be removed.
*
* Proxies are exceptions to both rules: they can return primitives and
* they allow content to return the callee.
*/
extern JSBool proxy_Construct(JSContext *, uintN, Value *);
JS_ASSERT_IF(native != proxy_Construct,
!vp->isPrimitive() && callee != &vp[0].toObject());
return true;
}
JS_ALWAYS_INLINE bool
CallJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp)
{
assertSameCompartment(cx, obj, id, *vp);
JSBool ok = op(cx, obj, id, vp);
@ -577,8 +656,8 @@ callJSPropertyOp(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::V
return ok;
}
inline JSBool
callJSPropertyOpSetter(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp)
JS_ALWAYS_INLINE bool
CallJSPropertyOpSetter(JSContext *cx, js::PropertyOp op, JSObject *obj, jsid id, js::Value *vp)
{
assertSameCompartment(cx, obj, id, *vp);
return op(cx, obj, id, vp);

Просмотреть файл

@ -497,8 +497,7 @@ msFromTime(jsdouble t)
Class js_DateClass = {
js_Date_str,
JSCLASS_HAS_RESERVED_SLOTS(JSObject::DATE_CLASS_RESERVED_SLOTS) |
JSCLASS_HAS_CACHED_PROTO(JSProto_Date) |
JSCLASS_FAST_CONSTRUCTOR,
JSCLASS_HAS_CACHED_PROTO(JSProto_Date),
PropertyStub, /* addProperty */
PropertyStub, /* delProperty */
PropertyStub, /* getProperty */
@ -2479,25 +2478,27 @@ JSBool
js_Date(JSContext *cx, uintN argc, Value *vp)
{
/* Date called as function. */
if (!vp[1].isMagic(JS_FAST_CONSTRUCTOR))
if (!IsConstructing(vp))
return date_format(cx, NowAsMillis(), FORMATSPEC_FULL, vp);
Value *argv = vp + 2;
/* Date called as constructor. */
jsdouble d;
if (argc == 0) {
d = NowAsMillis();
} else if (argc == 1) {
if (!vp[2].isString()) {
if (!argv[0].isString()) {
/* the argument is a millisecond number */
if (!ValueToNumber(cx, vp[2], &d))
return JS_FALSE;
if (!ValueToNumber(cx, argv[0], &d))
return false;
d = TIMECLIP(d);
} else {
/* the argument is a string; parse it. */
JSString *str = js_ValueToString(cx, vp[2]);
JSString *str = js_ValueToString(cx, argv[0]);
if (!str)
return JS_FALSE;
vp[2].setString(str);
return false;
argv[0].setString(str);
if (!date_parseString(str, &d, cx))
d = js_NaN;
@ -2506,8 +2507,8 @@ js_Date(JSContext *cx, uintN argc, Value *vp)
}
} else {
jsdouble msec_time;
if (!date_msecFromArgs(cx, argc, vp + 2, &msec_time))
return JS_FALSE;
if (!date_msecFromArgs(cx, argc, argv, &msec_time))
return false;
if (JSDOUBLE_IS_FINITE(msec_time)) {
msec_time = UTC(msec_time, cx);
@ -2518,10 +2519,10 @@ js_Date(JSContext *cx, uintN argc, Value *vp)
JSObject *obj = js_NewDateObjectMsec(cx, d);
if (!obj)
return JS_FALSE;
return false;
vp->setObject(*obj);
return JS_TRUE;
return true;
}
JSObject *
@ -2529,7 +2530,7 @@ js_InitDateClass(JSContext *cx, JSObject *obj)
{
/* set static LocalTZA */
LocalTZA = -(PRMJ_LocalGMTDifference() * msPerSecond);
JSObject *proto = js_InitClass(cx, obj, NULL, &js_DateClass, (Native) js_Date, MAXARGS,
JSObject *proto = js_InitClass(cx, obj, NULL, &js_DateClass, js_Date, MAXARGS,
NULL, date_methods, NULL, date_static_methods);
if (!proto)
return NULL;

Просмотреть файл

@ -69,6 +69,9 @@
#include "jsautooplen.h"
#include "methodjit/MethodJIT.h"
#include "methodjit/Retcon.h"
using namespace js;
typedef struct JSTrap {
@ -84,6 +87,80 @@ typedef struct JSTrap {
#define DBG_UNLOCK(rt) JS_RELEASE_LOCK((rt)->debuggerLock)
#define DBG_LOCK_EVAL(rt,expr) (DBG_LOCK(rt), (expr), DBG_UNLOCK(rt))
JS_PUBLIC_API(JSBool)
JS_GetDebugMode(JSContext *cx)
{
return cx->compartment->debugMode;
}
#ifdef JS_METHODJIT
static bool
IsScriptLive(JSContext *cx, JSScript *script)
{
for (AllFramesIter i(cx); !i.done(); ++i) {
if (i.fp()->maybeScript() == script)
return true;
}
return false;
}
#endif
JS_FRIEND_API(JSBool)
js_SetDebugMode(JSContext *cx, JSBool debug)
{
cx->compartment->debugMode = debug;
#ifdef JS_METHODJIT
for (JSScript *script = (JSScript *)cx->compartment->scripts.next;
&script->links != &cx->compartment->scripts;
script = (JSScript *)script->links.next) {
if (script->debugMode != debug &&
script->ncode &&
script->ncode != JS_UNJITTABLE_METHOD &&
!IsScriptLive(cx, script)) {
/*
* In the event that this fails, debug mode is left partially on,
* leading to a small performance overhead but no loss of
* correctness. We set the debug flag to false so that the caller
* will not later attempt to use debugging features.
*/
mjit::Recompiler recompiler(cx, script);
if (!recompiler.recompile()) {
cx->compartment->debugMode = JS_FALSE;
return JS_FALSE;
}
}
}
#endif
return JS_TRUE;
}
JS_PUBLIC_API(JSBool)
JS_SetDebugMode(JSContext *cx, JSBool debug)
{
#ifdef DEBUG
for (AllFramesIter i(cx); !i.done(); ++i)
JS_ASSERT(!JS_IsScriptFrame(cx, i.fp()));
#endif
return js_SetDebugMode(cx, debug);
}
static JSBool
CheckDebugMode(JSContext *cx)
{
JSBool debugMode = JS_GetDebugMode(cx);
/*
* :TODO:
* This probably should be an assertion, since it's indicative of a severe
* API misuse.
*/
if (!debugMode) {
JS_ReportErrorFlagsAndNumber(cx, JSREPORT_ERROR, js_GetErrorMessage,
NULL, JSMSG_NEED_DEBUG_MODE);
}
return debugMode;
}
/*
* NB: FindTrap must be called with rt->debuggerLock acquired.
*/
@ -148,6 +225,9 @@ JS_SetTrap(JSContext *cx, JSScript *script, jsbytecode *pc,
JSRuntime *rt;
uint32 sample;
if (!CheckDebugMode(cx))
return JS_FALSE;
if (script == JSScript::emptyScript()) {
JS_ReportErrorFlagsAndNumber(cx, JSREPORT_ERROR, js_GetErrorMessage,
NULL, JSMSG_READ_ONLY, "empty script");
@ -190,6 +270,15 @@ JS_SetTrap(JSContext *cx, JSScript *script, jsbytecode *pc,
DBG_UNLOCK(rt);
if (junk)
cx->free(junk);
#ifdef JS_METHODJIT
if (script->ncode != NULL && script->ncode != JS_UNJITTABLE_METHOD) {
mjit::Recompiler recompiler(cx, script);
if (!recompiler.recompile())
return JS_FALSE;
}
#endif
return JS_TRUE;
}
@ -223,7 +312,7 @@ JS_ClearTrap(JSContext *cx, JSScript *script, jsbytecode *pc,
JSTrapHandler *handlerp, jsval *closurep)
{
JSTrap *trap;
DBG_LOCK(cx->runtime);
trap = FindTrap(cx->runtime, script, pc);
if (handlerp)
@ -234,6 +323,13 @@ JS_ClearTrap(JSContext *cx, JSScript *script, jsbytecode *pc,
DestroyTrapAndUnlock(cx, trap);
else
DBG_UNLOCK(cx->runtime);
#ifdef JS_METHODJIT
if (script->ncode != NULL && script->ncode != JS_UNJITTABLE_METHOD) {
mjit::Recompiler recompiler(cx, script);
recompiler.recompile();
}
#endif
}
JS_PUBLIC_API(void)
@ -413,7 +509,7 @@ JS_ClearInterrupt(JSRuntime *rt, JSInterruptHook *hoop, void **closurep)
typedef struct JSWatchPoint {
JSCList links;
JSObject *object; /* weak link, see js_FinalizeObject */
JSScopeProperty *sprop;
const Shape *shape;
PropertyOp setter;
JSWatchPointHandler handler;
JSObject *closure;
@ -424,7 +520,7 @@ typedef struct JSWatchPoint {
#define JSWP_HELD 0x2 /* held while running handler/setter */
static bool
IsWatchedProperty(JSContext *cx, JSScopeProperty *sprop);
IsWatchedProperty(JSContext *cx, const Shape &shape);
/*
* NB: DropWatchPointAndUnlock releases cx->runtime->debuggerLock in all cases.
@ -432,53 +528,53 @@ IsWatchedProperty(JSContext *cx, JSScopeProperty *sprop);
static JSBool
DropWatchPointAndUnlock(JSContext *cx, JSWatchPoint *wp, uintN flag)
{
JSBool ok;
JSScopeProperty *sprop;
JSScope *scope;
PropertyOp setter;
bool ok = true;
JSRuntime *rt = cx->runtime;
ok = JS_TRUE;
wp->flags &= ~flag;
if (wp->flags != 0) {
DBG_UNLOCK(cx->runtime);
DBG_UNLOCK(rt);
return ok;
}
/*
* Remove wp from the list, then if there are no other watchpoints for
* wp->sprop in any scope, restore wp->sprop->setter from wp.
* wp->shape in any scope, restore wp->shape->setter from wp.
*/
++cx->runtime->debuggerMutations;
++rt->debuggerMutations;
JS_REMOVE_LINK(&wp->links);
sprop = wp->sprop;
/*
* Passing null for the scope parameter tells js_GetWatchedSetter to find
* any watch point for sprop, and not to lock or unlock rt->debuggerLock.
* If js_ChangeNativePropertyAttrs fails, propagate failure after removing
* wp->closure's root and freeing wp.
*/
setter = js_GetWatchedSetter(cx->runtime, NULL, sprop);
DBG_UNLOCK(cx->runtime);
const Shape *shape = wp->shape;
PropertyOp setter = NULL;
for (JSWatchPoint *wp2 = (JSWatchPoint *)rt->watchPointList.next;
&wp2->links != &rt->watchPointList;
wp2 = (JSWatchPoint *)wp2->links.next) {
if (wp2->shape == shape) {
setter = wp->setter;
break;
}
}
DBG_UNLOCK(rt);
if (!setter) {
JS_LOCK_OBJ(cx, wp->object);
scope = wp->object->scope();
/*
* If the property wasn't found on wp->object, or it isn't still being
* watched, then someone else must have deleted or unwatched it, and we
* don't need to change the property attributes.
*/
JSScopeProperty *wprop = scope->lookup(sprop->id);
const Shape *wprop = wp->object->nativeLookup(shape->id);
if (wprop &&
wprop->hasSetterValue() == sprop->hasSetterValue() &&
IsWatchedProperty(cx, wprop)) {
sprop = scope->changeProperty(cx, wprop, 0, wprop->attributes(),
wprop->getter(), wp->setter);
if (!sprop)
ok = JS_FALSE;
wprop->hasSetterValue() == shape->hasSetterValue() &&
IsWatchedProperty(cx, *wprop)) {
shape = wp->object->changeProperty(cx, wprop, 0, wprop->attributes(),
wprop->getter(), wp->setter);
if (!shape)
ok = false;
}
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, wp->object);
}
cx->free(wp);
@ -502,8 +598,8 @@ js_TraceWatchPoints(JSTracer *trc, JSObject *obj)
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
if (wp->object == obj) {
wp->sprop->trace(trc);
if (wp->sprop->hasSetterValue() && wp->setter)
wp->shape->trace(trc);
if (wp->shape->hasSetterValue() && wp->setter)
JS_CALL_OBJECT_TRACER(trc, CastAsObject(wp->setter), "wp->setter");
JS_CALL_OBJECT_TRACER(trc, wp->closure, "wp->closure");
}
@ -542,57 +638,30 @@ js_SweepWatchPoints(JSContext *cx)
* NB: FindWatchPoint must be called with rt->debuggerLock acquired.
*/
static JSWatchPoint *
FindWatchPoint(JSRuntime *rt, JSScope *scope, jsid id)
FindWatchPoint(JSRuntime *rt, JSObject *obj, jsid id)
{
JSWatchPoint *wp;
for (wp = (JSWatchPoint *)rt->watchPointList.next;
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
if (wp->object->scope() == scope && wp->sprop->id == id)
if (wp->object == obj && wp->shape->id == id)
return wp;
}
return NULL;
}
JSScopeProperty *
js_FindWatchPoint(JSRuntime *rt, JSScope *scope, jsid id)
const Shape *
js_FindWatchPoint(JSRuntime *rt, JSObject *obj, jsid id)
{
JSWatchPoint *wp;
JSScopeProperty *sprop;
const Shape *shape;
DBG_LOCK(rt);
wp = FindWatchPoint(rt, scope, id);
sprop = wp ? wp->sprop : NULL;
wp = FindWatchPoint(rt, obj, id);
shape = wp ? wp->shape : NULL;
DBG_UNLOCK(rt);
return sprop;
}
/*
* Secret handshake with DropWatchPointAndUnlock: if (!scope), we know our
* caller has acquired rt->debuggerLock, so we don't have to.
*/
PropertyOp
js_GetWatchedSetter(JSRuntime *rt, JSScope *scope,
const JSScopeProperty *sprop)
{
PropertyOp setter;
JSWatchPoint *wp;
setter = NULL;
if (scope)
DBG_LOCK(rt);
for (wp = (JSWatchPoint *)rt->watchPointList.next;
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
if ((!scope || wp->object->scope() == scope) && wp->sprop == sprop) {
setter = wp->setter;
break;
}
}
if (scope)
DBG_UNLOCK(rt);
return setter;
return shape;
}
JSBool
@ -603,22 +672,21 @@ js_watch_set(JSContext *cx, JSObject *obj, jsid id, Value *vp)
for (JSWatchPoint *wp = (JSWatchPoint *)rt->watchPointList.next;
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
JSScopeProperty *sprop = wp->sprop;
if (wp->object == obj && SPROP_USERID(sprop) == id &&
const Shape *shape = wp->shape;
if (wp->object == obj && SHAPE_USERID(shape) == id &&
!(wp->flags & JSWP_HELD)) {
wp->flags |= JSWP_HELD;
DBG_UNLOCK(rt);
JS_LOCK_OBJ(cx, obj);
jsid propid = sprop->id;
jsid userid = SPROP_USERID(sprop);
JSScope *scope = obj->scope();
jsid propid = shape->id;
jsid userid = SHAPE_USERID(shape);
JS_UNLOCK_OBJ(cx, obj);
/* NB: wp is held, so we can safely dereference it still. */
if (!wp->handler(cx, obj, propid,
SPROP_HAS_VALID_SLOT(sprop, scope)
? Jsvalify(obj->getSlotMT(cx, sprop->slot))
obj->containsSlot(shape->slot)
? Jsvalify(obj->getSlotMT(cx, shape->slot))
: JSVAL_VOID,
Jsvalify(vp), wp->closure)) {
DBG_LOCK(rt);
@ -631,11 +699,11 @@ js_watch_set(JSContext *cx, JSObject *obj, jsid id, Value *vp)
* prevent any funny business between watchpoints and setters.
*/
JSBool ok = !wp->setter ||
(sprop->hasSetterValue()
? InternalCall(cx, obj,
ObjectValue(*CastAsObject(wp->setter)),
1, vp, vp)
: callJSPropertyOpSetter(cx, wp->setter, obj, userid, vp));
(shape->hasSetterValue()
? ExternalInvoke(cx, obj,
ObjectValue(*CastAsObject(wp->setter)),
1, vp, vp)
: CallJSPropertyOpSetter(cx, wp->setter, obj, userid, vp));
DBG_LOCK(rt);
return DropWatchPointAndUnlock(cx, wp, JSWP_HELD) && ok;
@ -646,32 +714,32 @@ js_watch_set(JSContext *cx, JSObject *obj, jsid id, Value *vp)
}
JSBool
js_watch_set_wrapper(JSContext *cx, JSObject *obj, uintN argc, Value *argv,
Value *rval)
js_watch_set_wrapper(JSContext *cx, uintN argc, Value *vp)
{
JSObject *funobj;
JSFunction *wrapper;
jsid userid;
JSObject *obj = ComputeThisFromVp(cx, vp);
if (!obj)
return false;
funobj = &argv[-2].toObject();
wrapper = GET_FUNCTION_PRIVATE(cx, funobj);
userid = ATOM_TO_JSID(wrapper->atom);
*rval = argv[0];
return js_watch_set(cx, obj, userid, rval);
JSObject &funobj = JS_CALLEE(cx, vp).toObject();
JSFunction *wrapper = funobj.getFunctionPrivate();
jsid userid = ATOM_TO_JSID(wrapper->atom);
JS_SET_RVAL(cx, vp, argc ? JS_ARGV(cx, vp)[0] : UndefinedValue());
return js_watch_set(cx, obj, userid, vp);
}
static bool
IsWatchedProperty(JSContext *cx, JSScopeProperty *sprop)
IsWatchedProperty(JSContext *cx, const Shape &shape)
{
if (sprop->hasSetterValue()) {
JSObject *funobj = sprop->setterObject();
if (shape.hasSetterValue()) {
JSObject *funobj = shape.setterObject();
if (!funobj || !funobj->isFunction())
return false;
JSFunction *fun = GET_FUNCTION_PRIVATE(cx, funobj);
return FUN_NATIVE(fun) == js_watch_set_wrapper;
return fun->maybeNative() == js_watch_set_wrapper;
}
return sprop->setterOp() == js_watch_set;
return shape.setterOp() == js_watch_set;
}
PropertyOp
@ -710,7 +778,7 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
jsid propid;
JSObject *pobj;
JSProperty *prop;
JSScopeProperty *sprop;
const Shape *shape;
JSRuntime *rt;
JSBool ok;
JSWatchPoint *wp;
@ -747,18 +815,18 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
if (!js_LookupProperty(cx, obj, propid, &pobj, &prop))
return JS_FALSE;
sprop = (JSScopeProperty *) prop;
shape = (Shape *) prop;
rt = cx->runtime;
if (!sprop) {
if (!shape) {
/* Check for a deleted symbol watchpoint, which holds its property. */
sprop = js_FindWatchPoint(rt, obj->scope(), propid);
if (!sprop) {
shape = js_FindWatchPoint(rt, obj, propid);
if (!shape) {
/* Make a new property in obj so we can watch for the first set. */
if (!js_DefineNativeProperty(cx, obj, propid, UndefinedValue(), NULL, NULL,
JSPROP_ENUMERATE, 0, 0, &prop)) {
return JS_FALSE;
}
sprop = (JSScopeProperty *) prop;
shape = (Shape *) prop;
}
} else if (pobj != obj) {
/* Clone the prototype property so we can watch the right object. */
@ -768,14 +836,14 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
intN shortid;
if (pobj->isNative()) {
valroot.set(SPROP_HAS_VALID_SLOT(sprop, pobj->scope())
? pobj->lockedGetSlot(sprop->slot)
valroot.set(pobj->containsSlot(shape->slot)
? pobj->lockedGetSlot(shape->slot)
: UndefinedValue());
getter = sprop->getter();
setter = sprop->setter();
attrs = sprop->attributes();
flags = sprop->getFlags();
shortid = sprop->shortid;
getter = shape->getter();
setter = shape->setter();
attrs = shape->attributes();
flags = shape->getFlags();
shortid = shape->shortid;
JS_UNLOCK_OBJ(cx, pobj);
} else {
if (!pobj->getProperty(cx, propid, valroot.addr()) ||
@ -793,19 +861,19 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
shortid, &prop)) {
return JS_FALSE;
}
sprop = (JSScopeProperty *) prop;
shape = (Shape *) prop;
}
/*
* At this point, prop/sprop exists in obj, obj is locked, and we must
* At this point, prop/shape exists in obj, obj is locked, and we must
* unlock the object before returning.
*/
ok = JS_TRUE;
DBG_LOCK(rt);
wp = FindWatchPoint(rt, obj->scope(), propid);
wp = FindWatchPoint(rt, obj, propid);
if (!wp) {
DBG_UNLOCK(rt);
watcher = js_WrapWatchedSetter(cx, propid, sprop->attributes(), sprop->setter());
watcher = js_WrapWatchedSetter(cx, propid, shape->attributes(), shape->setter());
if (!watcher) {
ok = JS_FALSE;
goto out;
@ -819,13 +887,13 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
wp->handler = NULL;
wp->closure = NULL;
wp->object = obj;
wp->setter = sprop->setter();
wp->setter = shape->setter();
wp->flags = JSWP_LIVE;
/* XXXbe nest in obj lock here */
sprop = js_ChangeNativePropertyAttrs(cx, obj, sprop, 0, sprop->attributes(),
sprop->getter(), watcher);
if (!sprop) {
shape = js_ChangeNativePropertyAttrs(cx, obj, shape, 0, shape->attributes(),
shape->getter(), watcher);
if (!shape) {
/* Self-link so DropWatchPointAndUnlock can JS_REMOVE_LINK it. */
JS_INIT_CLIST(&wp->links);
DBG_LOCK(rt);
@ -833,7 +901,7 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
ok = JS_FALSE;
goto out;
}
wp->sprop = sprop;
wp->shape = shape;
/*
* Now that wp is fully initialized, append it to rt's wp list.
@ -841,7 +909,7 @@ JS_SetWatchPoint(JSContext *cx, JSObject *obj, jsid id,
* a watchpoint for (obj, propid).
*/
DBG_LOCK(rt);
JS_ASSERT(!FindWatchPoint(rt, obj->scope(), propid));
JS_ASSERT(!FindWatchPoint(rt, obj, propid));
JS_APPEND_LINK(&wp->links, &rt->watchPointList);
++rt->debuggerMutations;
}
@ -866,7 +934,7 @@ JS_ClearWatchPoint(JSContext *cx, JSObject *obj, jsid id,
for (wp = (JSWatchPoint *)rt->watchPointList.next;
&wp->links != &rt->watchPointList;
wp = (JSWatchPoint *)wp->links.next) {
if (wp->object == obj && SPROP_USERID(wp->sprop) == id) {
if (wp->object == obj && SHAPE_USERID(wp->shape) == id) {
if (handlerp)
*handlerp = wp->handler;
if (closurep)
@ -962,7 +1030,7 @@ extern JS_PUBLIC_API(jsuword *)
JS_GetFunctionLocalNameArray(JSContext *cx, JSFunction *fun, void **markp)
{
*markp = JS_ARENA_MARK(&cx->tempPool);
return js_GetLocalNameArray(cx, fun, &cx->tempPool);
return fun->getLocalNameArray(cx, &cx->tempPool);
}
extern JS_PUBLIC_API(JSAtom *)
@ -992,13 +1060,7 @@ JS_GetFunctionScript(JSContext *cx, JSFunction *fun)
JS_PUBLIC_API(JSNative)
JS_GetFunctionNative(JSContext *cx, JSFunction *fun)
{
return Jsvalify(FUN_NATIVE(fun));
}
JS_PUBLIC_API(JSFastNative)
JS_GetFunctionFastNative(JSContext *cx, JSFunction *fun)
{
return Jsvalify(FUN_FAST_NATIVE(fun));
return Jsvalify(fun->maybeNative());
}
JS_PUBLIC_API(JSPrincipals *)
@ -1037,8 +1099,8 @@ JS_GetScriptedCaller(JSContext *cx, JSStackFrame *fp)
return js_GetScriptedCaller(cx, fp);
}
JS_PUBLIC_API(JSPrincipals *)
JS_StackFramePrincipals(JSContext *cx, JSStackFrame *fp)
JSPrincipals *
js_StackFramePrincipals(JSContext *cx, JSStackFrame *fp)
{
JSSecurityCallbacks *callbacks;
@ -1068,24 +1130,18 @@ js_EvalFramePrincipals(JSContext *cx, JSObject *callee, JSStackFrame *caller)
principals = NULL;
if (!caller)
return principals;
callerPrincipals = JS_StackFramePrincipals(cx, caller);
callerPrincipals = js_StackFramePrincipals(cx, caller);
return (callerPrincipals && principals &&
callerPrincipals->subsume(callerPrincipals, principals))
? principals
: callerPrincipals;
}
JS_PUBLIC_API(JSPrincipals *)
JS_EvalFramePrincipals(JSContext *cx, JSStackFrame *fp, JSStackFrame *caller)
{
return js_EvalFramePrincipals(cx, fp->callee(), caller);
}
JS_PUBLIC_API(void *)
JS_GetFrameAnnotation(JSContext *cx, JSStackFrame *fp)
{
if (fp->hasAnnotation() && fp->hasScript()) {
JSPrincipals *principals = JS_StackFramePrincipals(cx, fp);
JSPrincipals *principals = js_StackFramePrincipals(cx, fp);
if (principals && principals->globalPrivilegesEnabled(cx, principals)) {
/*
@ -1110,16 +1166,16 @@ JS_GetFramePrincipalArray(JSContext *cx, JSStackFrame *fp)
{
JSPrincipals *principals;
principals = JS_StackFramePrincipals(cx, fp);
principals = js_StackFramePrincipals(cx, fp);
if (!principals)
return NULL;
return principals->getPrincipalArray(cx, principals);
}
JS_PUBLIC_API(JSBool)
JS_IsNativeFrame(JSContext *cx, JSStackFrame *fp)
JS_IsScriptFrame(JSContext *cx, JSStackFrame *fp)
{
return !fp->hasScript();
return !fp->isDummyFrame();
}
/* this is deprecated, use JS_GetFrameScopeChain instead */
@ -1277,6 +1333,9 @@ JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp,
{
JS_ASSERT_NOT_ON_TRACE(cx);
if (!CheckDebugMode(cx))
return JS_FALSE;
JSObject *scobj = JS_GetFrameScopeChain(cx, fp);
if (!scobj)
return false;
@ -1287,7 +1346,7 @@ JS_EvaluateUCInStackFrame(JSContext *cx, JSStackFrame *fp,
* we use a static level that will cause us not to attempt to optimize
* variable references made by this frame.
*/
JSScript *script = Compiler::compileScript(cx, scobj, fp, JS_StackFramePrincipals(cx, fp),
JSScript *script = Compiler::compileScript(cx, scobj, fp, js_StackFramePrincipals(cx, fp),
TCF_COMPILE_N_GO, chars, length, NULL,
filename, lineno, NULL,
UpvarCookie::UPVAR_LEVEL_LIMIT);
@ -1310,6 +1369,9 @@ JS_EvaluateInStackFrame(JSContext *cx, JSStackFrame *fp,
jschar *chars;
JSBool ok;
size_t len = length;
if (!CheckDebugMode(cx))
return JS_FALSE;
chars = js_InflateString(cx, bytes, &len);
if (!chars)
@ -1324,34 +1386,40 @@ JS_EvaluateInStackFrame(JSContext *cx, JSStackFrame *fp,
/************************************************************************/
/* XXXbe this all needs to be reworked to avoid requiring JSScope types. */
/* This all should be reworked to avoid requiring JSScopeProperty types. */
JS_PUBLIC_API(JSScopeProperty *)
JS_PropertyIterator(JSObject *obj, JSScopeProperty **iteratorp)
{
JSScopeProperty *sprop;
JSScope *scope;
const Shape *shape;
sprop = *iteratorp;
scope = obj->scope();
/* The caller passes null in *iteratorp to get things started. */
shape = (Shape *) *iteratorp;
if (!shape) {
shape = obj->lastProperty();
} else {
shape = shape->previous();
if (!shape->previous()) {
JS_ASSERT(JSID_IS_EMPTY(shape->id));
shape = NULL;
}
}
/* XXXbe minor(?) incompatibility: iterate in reverse definition order */
sprop = sprop ? sprop->parent : scope->lastProperty();
*iteratorp = sprop;
return sprop;
return *iteratorp = reinterpret_cast<JSScopeProperty *>(const_cast<Shape *>(shape));
}
JS_PUBLIC_API(JSBool)
JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
JSPropertyDesc *pd)
{
pd->id = IdToJsval(sprop->id);
Shape *shape = (Shape *) sprop;
pd->id = IdToJsval(shape->id);
JSBool wasThrowing = cx->throwing;
AutoValueRooter lastException(cx, cx->exception);
cx->throwing = JS_FALSE;
if (!js_GetProperty(cx, obj, sprop->id, Valueify(&pd->value))) {
if (!js_GetProperty(cx, obj, shape->id, Valueify(&pd->value))) {
if (!cx->throwing) {
pd->flags = JSPD_ERROR;
pd->value = JSVAL_VOID;
@ -1367,27 +1435,26 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
if (wasThrowing)
cx->exception = lastException.value();
pd->flags |= (sprop->enumerable() ? JSPD_ENUMERATE : 0)
| (!sprop->writable() ? JSPD_READONLY : 0)
| (!sprop->configurable() ? JSPD_PERMANENT : 0);
pd->flags |= (shape->enumerable() ? JSPD_ENUMERATE : 0)
| (!shape->writable() ? JSPD_READONLY : 0)
| (!shape->configurable() ? JSPD_PERMANENT : 0);
pd->spare = 0;
if (sprop->getter() == js_GetCallArg) {
pd->slot = sprop->shortid;
if (shape->getter() == js_GetCallArg) {
pd->slot = shape->shortid;
pd->flags |= JSPD_ARGUMENT;
} else if (sprop->getter() == js_GetCallVar) {
pd->slot = sprop->shortid;
} else if (shape->getter() == js_GetCallVar) {
pd->slot = shape->shortid;
pd->flags |= JSPD_VARIABLE;
} else {
pd->slot = 0;
}
pd->alias = JSVAL_VOID;
JSScope *scope = obj->scope();
if (SPROP_HAS_VALID_SLOT(sprop, scope)) {
JSScopeProperty *aprop;
for (aprop = scope->lastProperty(); aprop; aprop = aprop->parent) {
if (aprop != sprop && aprop->slot == sprop->slot) {
pd->alias = IdToJsval(aprop->id);
if (obj->containsSlot(shape->slot)) {
for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
const Shape &aprop = r.front();
if (&aprop != shape && aprop.slot == shape->slot) {
pd->alias = IdToJsval(aprop.id);
break;
}
}
@ -1398,11 +1465,6 @@ JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
JS_PUBLIC_API(JSBool)
JS_GetPropertyDescArray(JSContext *cx, JSObject *obj, JSPropertyDescArray *pda)
{
JSScope *scope;
uint32 i, n;
JSPropertyDesc *pd;
JSScopeProperty *sprop;
Class *clasp = obj->getClass();
if (!obj->isNative() || (clasp->flags & JSCLASS_NEW_ENUMERATE)) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL,
@ -1412,25 +1474,25 @@ JS_GetPropertyDescArray(JSContext *cx, JSObject *obj, JSPropertyDescArray *pda)
if (!clasp->enumerate(cx, obj))
return JS_FALSE;
/* have no props, or object's scope has not mutated from that of proto */
scope = obj->scope();
if (scope->entryCount == 0) {
/* Return an empty pda early if obj has no own properties. */
if (obj->nativeEmpty()) {
pda->length = 0;
pda->array = NULL;
return JS_TRUE;
}
n = scope->entryCount;
pd = (JSPropertyDesc *) cx->malloc((size_t)n * sizeof(JSPropertyDesc));
uint32 n = obj->propertyCount();
JSPropertyDesc *pd = (JSPropertyDesc *) cx->malloc(size_t(n) * sizeof(JSPropertyDesc));
if (!pd)
return JS_FALSE;
i = 0;
for (sprop = scope->lastProperty(); sprop; sprop = sprop->parent) {
uint32 i = 0;
for (Shape::Range r = obj->lastProperty()->all(); !r.empty(); r.popFront()) {
if (!js_AddRoot(cx, Valueify(&pd[i].id), NULL))
goto bad;
if (!js_AddRoot(cx, Valueify(&pd[i].value), NULL))
goto bad;
if (!JS_GetPropertyDesc(cx, obj, sprop, &pd[i]))
Shape *shape = const_cast<Shape *>(&r.front());
if (!JS_GetPropertyDesc(cx, obj, reinterpret_cast<JSScopeProperty *>(shape), &pd[i]))
goto bad;
if ((pd[i].flags & JSPD_ALIAS) && !js_AddRoot(cx, Valueify(&pd[i].alias), NULL))
goto bad;
@ -1580,21 +1642,14 @@ JS_SetDebugErrorHook(JSRuntime *rt, JSDebugErrorHook hook, void *closure)
JS_PUBLIC_API(size_t)
JS_GetObjectTotalSize(JSContext *cx, JSObject *obj)
{
size_t nbytes;
JSScope *scope;
size_t nbytes = (obj->isFunction() && obj->getPrivate() == obj)
? sizeof(JSFunction)
: sizeof *obj;
nbytes = sizeof *obj;
if (obj->dslots) {
nbytes += (obj->dslots[-1].toPrivateUint32() - JS_INITIAL_NSLOTS + 1)
* sizeof obj->dslots[0];
}
if (obj->isNative()) {
scope = obj->scope();
if (!scope->isSharedEmpty()) {
nbytes += sizeof *scope;
nbytes += SCOPE_CAPACITY(scope) * sizeof(JSScopeProperty *);
}
}
return nbytes;
}
@ -1811,50 +1866,50 @@ JS_DisconnectShark()
}
JS_FRIEND_API(JSBool)
js_StartShark(JSContext *cx, JSObject *obj,
uintN argc, jsval *argv, jsval *rval)
js_StartShark(JSContext *cx, uintN argc, jsval *vp)
{
if (!JS_StartChudRemote()) {
JS_ReportError(cx, "Error starting CHUD.");
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
JS_FRIEND_API(JSBool)
js_StopShark(JSContext *cx, JSObject *obj,
uintN argc, jsval *argv, jsval *rval)
js_StopShark(JSContext *cx, uintN argc, jsval *vp)
{
if (!JS_StopChudRemote()) {
JS_ReportError(cx, "Error stopping CHUD.");
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
JS_FRIEND_API(JSBool)
js_ConnectShark(JSContext *cx, JSObject *obj,
uintN argc, jsval *argv, jsval *rval)
js_ConnectShark(JSContext *cx, uintN argc, jsval *vp)
{
if (!JS_ConnectShark()) {
JS_ReportError(cx, "Error connecting to Shark.");
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
JS_FRIEND_API(JSBool)
js_DisconnectShark(JSContext *cx, JSObject *obj,
uintN argc, jsval *argv, jsval *rval)
js_DisconnectShark(JSContext *cx, uintN argc, jsval *vp)
{
if (!JS_DisconnectShark()) {
JS_ReportError(cx, "Error disconnecting from Shark.");
return JS_FALSE;
}
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
@ -1865,29 +1920,29 @@ js_DisconnectShark(JSContext *cx, JSObject *obj,
#include <valgrind/callgrind.h>
JS_FRIEND_API(JSBool)
js_StartCallgrind(JSContext *cx, JSObject *obj,
uintN argc, jsval *argv, jsval *rval)
js_StartCallgrind(JSContext *cx, uintN argc, jsval *vp)
{
CALLGRIND_START_INSTRUMENTATION;
CALLGRIND_ZERO_STATS;
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
JS_FRIEND_API(JSBool)
js_StopCallgrind(JSContext *cx, JSObject *obj,
uintN argc, jsval *argv, jsval *rval)
js_StopCallgrind(JSContext *cx, uintN argc, jsval *vp)
{
CALLGRIND_STOP_INSTRUMENTATION;
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
JS_FRIEND_API(JSBool)
js_DumpCallgrind(JSContext *cx, JSObject *obj,
uintN argc, jsval *argv, jsval *rval)
js_DumpCallgrind(JSContext *cx, uintN argc, jsval *vp)
{
JSString *str;
char *cstr;
jsval *argv = JS_ARGV(cx, vp);
if (argc > 0 && JSVAL_IS_STRING(argv[0])) {
str = JSVAL_TO_STRING(argv[0]);
cstr = js_DeflateString(cx, str->chars(), str->length());
@ -1899,6 +1954,7 @@ js_DumpCallgrind(JSContext *cx, JSObject *obj,
}
CALLGRIND_DUMP_STATS;
JS_SET_RVAL(cx, vp, JSVAL_VOID);
return JS_TRUE;
}
@ -2033,7 +2089,7 @@ js_ResumeVtune(JSContext *cx, JSObject *obj,
*
*/
#if defined(XP_WIN)
#include <windows.h>
#include "jswin.h"
#else
#include <sys/time.h>
#endif

Просмотреть файл

@ -49,6 +49,28 @@
JS_BEGIN_EXTERN_C
/*
* Debug mode is a compartment-wide mode that enables a debugger to attach
* to and interact with running methodjit-ed frames. In particular, it causes
* every function to be compiled as if an eval was present (so eval-in-frame)
* can work, and it ensures that functions can be re-JITed for other debug
* features. In general, it is not safe to interact with frames that were live
* before debug mode was enabled. For this reason, it is also not safe to
* enable debug mode while frames are live.
*/
/* Get current state of debugging mode. */
extern JS_PUBLIC_API(JSBool)
JS_GetDebugMode(JSContext *cx);
/* Turn on debugging mode, ignoring the presence of live frames. */
extern JS_FRIEND_API(JSBool)
js_SetDebugMode(JSContext *cx, JSBool debug);
/* Turn on debugging mode. */
extern JS_PUBLIC_API(JSBool)
JS_SetDebugMode(JSContext *cx, JSBool debug);
/*
* Unexported library-private helper used to unpatch all traps in a script.
* Returns script->code if script has no traps, else a JS_malloc'ed copy of
@ -111,24 +133,16 @@ js_TraceWatchPoints(JSTracer *trc, JSObject *obj);
extern void
js_SweepWatchPoints(JSContext *cx);
extern JSScopeProperty *
js_FindWatchPoint(JSRuntime *rt, JSScope *scope, jsid id);
#ifdef __cplusplus
/*
* NB: callers outside of jsdbgapi.c must pass non-null scope.
*/
extern js::PropertyOp
js_GetWatchedSetter(JSRuntime *rt, JSScope *scope,
const JSScopeProperty *sprop);
extern const js::Shape *
js_FindWatchPoint(JSRuntime *rt, JSObject *obj, jsid id);
extern JSBool
js_watch_set(JSContext *cx, JSObject *obj, jsid id, js::Value *vp);
extern JSBool
js_watch_set_wrapper(JSContext *cx, JSObject *obj, uintN argc, js::Value *argv,
js::Value *rval);
js_watch_set_wrapper(JSContext *cx, uintN argc, js::Value *vp);
extern js::PropertyOp
js_WrapWatchedSetter(JSContext *cx, jsid id, uintN attrs, js::PropertyOp setter);
@ -174,9 +188,6 @@ JS_GetFunctionScript(JSContext *cx, JSFunction *fun);
extern JS_PUBLIC_API(JSNative)
JS_GetFunctionNative(JSContext *cx, JSFunction *fun);
extern JS_PUBLIC_API(JSFastNative)
JS_GetFunctionFastNative(JSContext *cx, JSFunction *fun);
extern JS_PUBLIC_API(JSPrincipals *)
JS_GetScriptPrincipals(JSContext *cx, JSScript *script);
@ -206,22 +217,8 @@ JS_GetScriptedCaller(JSContext *cx, JSStackFrame *fp);
* Return a weak reference to fp's principals. A null return does not denote
* an error, it means there are no principals.
*/
extern JS_PUBLIC_API(JSPrincipals *)
JS_StackFramePrincipals(JSContext *cx, JSStackFrame *fp);
/*
* This API is like JS_StackFramePrincipals(cx, caller), except that if
* cx->runtime->findObjectPrincipals is non-null, it returns the weaker of
* the caller's principals and the object principals of fp's callee function
* object (fp->argv[-2]), which is eval, Function, or a similar eval-like
* method. The caller parameter should be JS_GetScriptedCaller(cx, fp).
*
* All eval-like methods must use JS_EvalFramePrincipals to acquire a weak
* reference to the correct principals for the eval call to be secure, given
* an embedding that calls JS_SetObjectPrincipalsFinder (see jsapi.h).
*/
extern JS_PUBLIC_API(JSPrincipals *)
JS_EvalFramePrincipals(JSContext *cx, JSStackFrame *fp, JSStackFrame *caller);
extern JSPrincipals *
js_StackFramePrincipals(JSContext *cx, JSStackFrame *fp);
JSPrincipals *
js_EvalFramePrincipals(JSContext *cx, JSObject *callee, JSStackFrame *caller);
@ -236,7 +233,7 @@ extern JS_PUBLIC_API(void *)
JS_GetFramePrincipalArray(JSContext *cx, JSStackFrame *fp);
extern JS_PUBLIC_API(JSBool)
JS_IsNativeFrame(JSContext *cx, JSStackFrame *fp);
JS_IsScriptFrame(JSContext *cx, JSStackFrame *fp);
/* this is deprecated, use JS_GetFrameScopeChain instead */
extern JS_PUBLIC_API(JSObject *)
@ -380,11 +377,13 @@ typedef struct JSPropertyDescArray {
JSPropertyDesc *array; /* alloc'd by Get, freed by Put */
} JSPropertyDescArray;
typedef struct JSScopeProperty JSScopeProperty;
extern JS_PUBLIC_API(JSScopeProperty *)
JS_PropertyIterator(JSObject *obj, JSScopeProperty **iteratorp);
extern JS_PUBLIC_API(JSBool)
JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *sprop,
JS_GetPropertyDesc(JSContext *cx, JSObject *obj, JSScopeProperty *shape,
JSPropertyDesc *pd);
extern JS_PUBLIC_API(JSBool)
@ -518,36 +517,29 @@ extern JS_PUBLIC_API(JSBool)
JS_DisconnectShark();
extern JS_FRIEND_API(JSBool)
js_StopShark(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
jsval *rval);
js_StopShark(JSContext *cx, uintN argc, jsval *vp);
extern JS_FRIEND_API(JSBool)
js_StartShark(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
jsval *rval);
js_StartShark(JSContext *cx, uintN argc, jsval *vp);
extern JS_FRIEND_API(JSBool)
js_ConnectShark(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
jsval *rval);
js_ConnectShark(JSContext *cx, uintN argc, jsval *vp);
extern JS_FRIEND_API(JSBool)
js_DisconnectShark(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
jsval *rval);
js_DisconnectShark(JSContext *cx, uintN argc, jsval *vp);
#endif /* MOZ_SHARK */
#ifdef MOZ_CALLGRIND
extern JS_FRIEND_API(JSBool)
js_StopCallgrind(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
jsval *rval);
js_StopCallgrind(JSContext *cx, uintN argc, jsval *vp);
extern JS_FRIEND_API(JSBool)
js_StartCallgrind(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
jsval *rval);
js_StartCallgrind(JSContext *cx, uintN argc, jsval *vp);
extern JS_FRIEND_API(JSBool)
js_DumpCallgrind(JSContext *cx, JSObject *obj, uintN argc, jsval *argv,
jsval *rval);
js_DumpCallgrind(JSContext *cx, uintN argc, jsval *vp);
#endif /* MOZ_CALLGRIND */

Просмотреть файл

@ -1,200 +0,0 @@
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sw=4 et tw=80:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* Copyright (C) 2007 Sun Microsystems, Inc. All Rights Reserved.
*
* Contributor(s):
* Brendan Eich <brendan@mozilla.org>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifdef INCLUDE_MOZILLA_DTRACE
#include "javascript-trace.h"
#endif
#include "jspubtd.h"
#include "jsprvtd.h"
#ifndef _JSDTRACEF_H
#define _JSDTRACEF_H
namespace js {
class DTrace {
static void enterJSFunImpl(JSContext *cx, JSStackFrame *fp, const JSFunction *fun);
static void handleFunctionInfo(JSContext *cx, JSStackFrame *fp, JSStackFrame *dfp,
JSFunction *fun);
static void handleFunctionArgs(JSContext *cx, JSStackFrame *fp, const JSFunction *fun,
jsuint argc, js::Value *argv);
static void handleFunctionRval(JSContext *cx, JSStackFrame *fp, JSFunction *fun,
const js::Value &rval);
static void handleFunctionReturn(JSContext *cx, JSStackFrame *fp, JSFunction *fun);
static void finalizeObjectImpl(JSObject *obj);
public:
/*
* If |lval| is provided to the enter/exit methods, it is tested to see if
* it is a function as a predicate to the dtrace event emission.
*/
static void enterJSFun(JSContext *cx, JSStackFrame *fp, JSFunction *fun,
JSStackFrame *dfp, jsuint argc, js::Value *argv,
js::Value *lval = NULL);
static void exitJSFun(JSContext *cx, JSStackFrame *fp, JSFunction *fun,
const js::Value &rval,
js::Value *lval = NULL);
static void finalizeObject(JSObject *obj);
class ExecutionScope {
const JSContext *cx;
const JSScript *script;
void startExecution();
void endExecution();
public:
explicit ExecutionScope(JSContext *cx, JSScript *script);
~ExecutionScope();
};
class ObjectCreationScope {
JSContext * const cx;
JSStackFrame * const fp;
js::Class * const clasp;
void handleCreationStart();
void handleCreationImpl(JSObject *obj);
void handleCreationEnd();
public:
ObjectCreationScope(JSContext *cx, JSStackFrame *fp, js::Class *clasp);
void handleCreation(JSObject *obj);
~ObjectCreationScope();
};
};
inline void
DTrace::enterJSFun(JSContext *cx, JSStackFrame *fp, JSFunction *fun, JSStackFrame *dfp,
jsuint argc, js::Value *argv, js::Value *lval)
{
#ifdef INCLUDE_MOZILLA_DTRACE
if (!lval || IsFunctionObject(*lval)) {
if (JAVASCRIPT_FUNCTION_ENTRY_ENABLED())
enterJSFunImpl(cx, fp, fun);
if (JAVASCRIPT_FUNCTION_INFO_ENABLED())
handleFunctionInfo(cx, fp, dfp, fun);
if (JAVASCRIPT_FUNCTION_ARGS_ENABLED())
handleFunctionArgs(cx, fp, fun, argc, argv);
}
#endif
#ifdef MOZ_TRACE_JSCALLS
cx->doFunctionCallback(fun, fun ? FUN_SCRIPT(fun) : NULL, true);
#endif
}
inline void
DTrace::exitJSFun(JSContext *cx, JSStackFrame *fp, JSFunction *fun,
const js::Value &rval, js::Value *lval)
{
#ifdef INCLUDE_MOZILLA_DTRACE
if (!lval || IsFunctionObject(*lval)) {
if (JAVASCRIPT_FUNCTION_RVAL_ENABLED())
handleFunctionRval(cx, fp, fun, rval);
if (JAVASCRIPT_FUNCTION_RETURN_ENABLED())
handleFunctionReturn(cx, fp, fun);
}
#endif
#ifdef MOZ_TRACE_JSCALLS
cx->doFunctionCallback(fun, fun ? FUN_SCRIPT(fun) : NULL, false);
#endif
}
inline void
DTrace::finalizeObject(JSObject *obj)
{
#ifdef INCLUDE_MOZILLA_DTRACE
if (JAVASCRIPT_OBJECT_FINALIZE_ENABLED())
finalizeObjectImpl(obj);
#endif
}
/* Execution scope. */
inline
DTrace::ExecutionScope::ExecutionScope(JSContext *cx, JSScript *script)
: cx(cx), script(script)
{
#ifdef INCLUDE_MOZILLA_DTRACE
if (JAVASCRIPT_EXECUTE_START_ENABLED())
startExecution();
#endif
#ifdef MOZ_TRACE_JSCALLS
cx->doFunctionCallback(NULL, script, true);
#endif
}
inline
DTrace::ExecutionScope::~ExecutionScope()
{
#ifdef INCLUDE_MOZILLA_DTRACE
if (JAVASCRIPT_EXECUTE_DONE_ENABLED())
endExecution();
#endif
#ifdef MOZ_TRACE_JSCALLS
cx->doFunctionCallback(NULL, script, false);
#endif
}
/* Object creation scope. */
inline
DTrace::ObjectCreationScope::ObjectCreationScope(JSContext *cx, JSStackFrame *fp, js::Class *clasp)
: cx(cx), fp(fp), clasp(clasp)
{
#ifdef INCLUDE_MOZILLA_DTRACE
if (JAVASCRIPT_OBJECT_CREATE_START_ENABLED())
handleCreationStart();
#endif
}
inline void
DTrace::ObjectCreationScope::handleCreation(JSObject *obj)
{
#ifdef INCLUDE_MOZILLA_DTRACE
if (JAVASCRIPT_OBJECT_CREATE_ENABLED())
handleCreationImpl(obj);
#endif
}
inline
DTrace::ObjectCreationScope::~ObjectCreationScope()
{
#ifdef INCLUDE_MOZILLA_DTRACE
if (JAVASCRIPT_OBJECT_CREATE_DONE_ENABLED())
handleCreationEnd();
#endif
}
} /* namespace js */
#endif /* _JSDTRACE_H */

Просмотреть файл

@ -101,7 +101,8 @@ JSCodeGenerator::JSCodeGenerator(Parser *parser,
arrayCompDepth(0),
emitLevel(0),
constMap(parser->context),
constList(parser->context)
constList(parser->context),
globalUses(ContextAllocPolicy(parser->context))
{
flags = TCF_COMPILING;
memset(&prolog, 0, sizeof prolog);
@ -187,6 +188,25 @@ UpdateDepth(JSContext *cx, JSCodeGenerator *cg, ptrdiff_t target)
depth = (uintN) cg->stackDepth +
((cs->format & JOF_TMPSLOT_MASK) >> JOF_TMPSLOT_SHIFT) +
extra;
/* :TODO: hack - remove later. */
switch (op) {
case JSOP_PROPINC:
case JSOP_PROPDEC:
depth += 1;
break;
case JSOP_NAMEINC:
case JSOP_NAMEDEC:
case JSOP_INCNAME:
case JSOP_DECNAME:
case JSOP_GNAMEINC:
case JSOP_GNAMEDEC:
case JSOP_INCGNAME:
case JSOP_DECGNAME:
depth += 2;
break;
default:
break;
}
if (depth > cg->maxStackDepth)
cg->maxStackDepth = depth;
}
@ -1275,9 +1295,9 @@ JSTreeContext::ensureSharpSlots()
return false;
sharpSlotBase = fun->u.i.nvars;
if (!js_AddLocal(cx, fun, sharpArrayAtom, JSLOCAL_VAR))
if (!fun->addLocal(cx, sharpArrayAtom, JSLOCAL_VAR))
return false;
if (!js_AddLocal(cx, fun, sharpDepthAtom, JSLOCAL_VAR))
if (!fun->addLocal(cx, sharpDepthAtom, JSLOCAL_VAR))
return false;
} else {
/*
@ -1561,10 +1581,6 @@ js_DefineCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
JSStmtInfo *
js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt)
{
JSObject *obj;
JSScope *scope;
JSScopeProperty *sprop;
if (!stmt)
stmt = tc->topScopeStmt;
for (; stmt; stmt = stmt->downScope) {
@ -1575,17 +1591,17 @@ js_LexicalLookup(JSTreeContext *tc, JSAtom *atom, jsint *slotp, JSStmtInfo *stmt
if (!(stmt->flags & SIF_SCOPE))
continue;
obj = stmt->blockObj;
JSObject *obj = stmt->blockObj;
JS_ASSERT(obj->getClass() == &js_BlockClass);
scope = obj->scope();
sprop = scope->lookup(ATOM_TO_JSID(atom));
if (sprop) {
JS_ASSERT(sprop->hasShortID());
const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom));
if (shape) {
JS_ASSERT(shape->hasShortID());
if (slotp) {
JS_ASSERT(obj->fslots[JSSLOT_BLOCK_DEPTH].isInt32());
*slotp = obj->fslots[JSSLOT_BLOCK_DEPTH].toInt32() +
sprop->shortid;
shape->shortid;
}
return stmt;
}
@ -1634,30 +1650,29 @@ LookupCompileTimeConstant(JSContext *cx, JSCodeGenerator *cg, JSAtom *atom,
* nor can prop be deleted.
*/
if (cg->inFunction()) {
if (js_LookupLocal(cx, cg->fun, atom, NULL) != JSLOCAL_NONE)
if (cg->fun->lookupLocal(cx, atom, NULL) != JSLOCAL_NONE)
break;
} else {
JS_ASSERT(cg->compileAndGo());
obj = cg->scopeChain;
JS_LOCK_OBJ(cx, obj);
JSScope *scope = obj->scope();
JSScopeProperty *sprop = scope->lookup(ATOM_TO_JSID(atom));
if (sprop) {
const Shape *shape = obj->nativeLookup(ATOM_TO_JSID(atom));
if (shape) {
/*
* We're compiling code that will be executed immediately,
* not re-executed against a different scope chain and/or
* variable object. Therefore we can get constant values
* from our variable object here.
*/
if (!sprop->writable() && !sprop->configurable() &&
sprop->hasDefaultGetter() && SPROP_HAS_VALID_SLOT(sprop, scope)) {
*constp = obj->lockedGetSlot(sprop->slot);
if (!shape->writable() && !shape->configurable() &&
shape->hasDefaultGetter() && obj->containsSlot(shape->slot)) {
*constp = obj->lockedGetSlot(shape->slot);
}
}
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, obj);
if (sprop)
if (shape)
break;
}
}
@ -1852,8 +1867,10 @@ EmitEnterBlock(JSContext *cx, JSParseNode *pn, JSCodeGenerator *cg)
#endif
}
blockObj->scope()->freeslot = base;
return blockObj->growSlots(cx, base);
if (!blockObj->growSlots(cx, base))
return false;
blockObj->freeslot = base;
return true;
}
/*
@ -1904,7 +1921,7 @@ MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
JSAtom *atom = pn->pn_atom;
uintN index;
JSLocalKind localKind = js_LookupLocal(cx, fun, atom, &index);
JSLocalKind localKind = fun->lookupLocal(cx, atom, &index);
if (localKind == JSLOCAL_NONE)
return true;
@ -1914,10 +1931,8 @@ MakeUpvarForEval(JSParseNode *pn, JSCodeGenerator *cg)
JSAtomListElement *ale = cg->upvarList.lookup(atom);
if (!ale) {
if (cg->inFunction() &&
!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR)) {
if (cg->inFunction() && !cg->fun->addLocal(cx, atom, JSLOCAL_UPVAR))
return false;
}
ale = cg->upvarList.add(cg->parser, atom);
if (!ale)
@ -2097,49 +2112,75 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
return MakeUpvarForEval(pn, cg);
}
/*
* Optimize accesses to undeclared globals, but only if we are in
* compile-and-go mode, the global is the same as the scope chain,
* and we are not in strict mode.
*/
if (cg->compileAndGo() &&
cg->compiler()->globalScope->globalObj &&
!(cg->flags & TCF_STRICT_MODE_CODE)) {
switch (op) {
case JSOP_NAME: op = JSOP_GETGNAME; break;
case JSOP_SETNAME: op = JSOP_SETGNAME; break;
case JSOP_INCNAME: op = JSOP_INCGNAME; break;
case JSOP_NAMEINC: op = JSOP_GNAMEINC; break;
case JSOP_DECNAME: op = JSOP_DECGNAME; break;
case JSOP_NAMEDEC: op = JSOP_GNAMEDEC; break;
case JSOP_SETCONST:
case JSOP_DELNAME:
case JSOP_FORNAME:
/* Not supported. */
return JS_TRUE;
default: JS_NOT_REACHED("gname");
}
}
ale = cg->atomList.add(cg->parser, atom);
if (!ale)
return JS_FALSE;
pn->pn_op = op;
pn->pn_dflags |= PND_BOUND;
return JS_TRUE;
}
if (dn->pn_dflags & PND_GVAR) {
/*
* If this is a global reference from within a function, leave pn_op as
* JSOP_NAME, etc. We could emit JSOP_*GVAR ops within function code if
* only we could depend on the global frame's slots being valid for all
* calls to the function, and if we could equate the atom index in the
* function's atom map for every global name with its frame slot.
*/
if (cg->inFunction())
return JS_TRUE;
/*
* We are optimizing global variables and there may be no pre-existing
* global property named atom when this global script runs. If atom was
* declared via const or var, optimize pn to access fp->vars using the
* appropriate JSOP_*GVAR op.
*
* FIXME: should be able to optimize global function access too.
*/
JS_ASSERT(dn_kind == JSDefinition::VAR || dn_kind == JSDefinition::CONST);
switch (op) {
case JSOP_NAME: op = JSOP_GETGVAR; break;
case JSOP_SETNAME: op = JSOP_SETGVAR; break;
case JSOP_SETCONST: /* NB: no change */ break;
case JSOP_INCNAME: op = JSOP_INCGVAR; break;
case JSOP_NAMEINC: op = JSOP_GVARINC; break;
case JSOP_DECNAME: op = JSOP_DECGVAR; break;
case JSOP_NAMEDEC: op = JSOP_GVARDEC; break;
case JSOP_FORNAME: /* NB: no change */ break;
case JSOP_DELNAME: /* NB: no change */ break;
case JSOP_NAME: op = JSOP_GETGLOBAL; break;
case JSOP_SETNAME: op = JSOP_SETGLOBAL; break;
case JSOP_INCNAME: op = JSOP_INCGLOBAL; break;
case JSOP_NAMEINC: op = JSOP_GLOBALINC; break;
case JSOP_DECNAME: op = JSOP_DECGLOBAL; break;
case JSOP_NAMEDEC: op = JSOP_GLOBALDEC; break;
case JSOP_FORNAME: op = JSOP_FORGLOBAL; break;
case JSOP_SETCONST:
case JSOP_DELNAME:
/* Not supported. */
return JS_TRUE;
default: JS_NOT_REACHED("gvar");
}
JSCodeGenerator *globalCg = cg->compiler()->globalScope->cg;
if (globalCg != cg) {
uint32 slot = globalCg->globalUses[cookie.asInteger()].slot;
/* Fall back to NAME if we can't add a slot. */
if (!cg->addGlobalUse(atom, slot, cookie))
return JS_FALSE;
if (cookie.isFree())
return JS_TRUE;
}
pn->pn_op = op;
pn->pn_cookie.set(cookie);
pn->pn_dflags |= PND_BOUND;
return JS_TRUE;
}
uintN level = cookie.level();
uint16 level = cookie.level();
JS_ASSERT(cg->staticLevel >= level);
/*
@ -2206,7 +2247,7 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
return JS_TRUE;
if (FUN_FLAT_CLOSURE(cg->fun)) {
op = JSOP_GETDSLOT;
op = JSOP_GETFCSLOT;
} else {
/*
* The function we're compiling may not be heavyweight, but if it
@ -2233,7 +2274,7 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
if (ale) {
index = ALE_INDEX(ale);
} else {
if (!js_AddLocal(cx, cg->fun, atom, JSLOCAL_UPVAR))
if (!cg->fun->addLocal(cx, atom, JSLOCAL_UPVAR))
return JS_FALSE;
ale = cg->upvarList.add(cg->parser, atom);
@ -2365,6 +2406,40 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
return JS_TRUE;
}
bool
JSCodeGenerator::addGlobalUse(JSAtom *atom, uint32 slot, UpvarCookie &cookie)
{
JSAtomListElement *ale = globalMap.lookup(atom);
if (ale) {
cookie.set(0, uint16(ALE_INDEX(ale)));
return true;
}
/* Don't bother encoding indexes >= uint16 */
if (globalUses.length() >= UINT16_LIMIT) {
cookie.makeFree();
return true;
}
/* Find or add an existing atom table entry. */
ale = atomList.add(parser, atom);
if (!ale)
return false;
cookie.set(0, globalUses.length());
GlobalSlotArray::Entry entry = { ALE_INDEX(ale), slot };
if (!globalUses.append(entry))
return false;
ale = globalMap.add(parser, atom);
if (!ale)
return false;
ALE_SET_INDEX(ale, cookie.asInteger());
return true;
}
/*
* If pn contains a useful expression, return true with *answer set to true.
* If pn contains a useless expression, return true with *answer set to false.
@ -2588,9 +2663,11 @@ EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
case JSOP_NAME:
op = JSOP_CALLNAME;
break;
case JSOP_GETGVAR:
JS_ASSERT(!cg->funbox);
op = JSOP_CALLGVAR;
case JSOP_GETGNAME:
op = JSOP_CALLGNAME;
break;
case JSOP_GETGLOBAL:
op = JSOP_CALLGLOBAL;
break;
case JSOP_GETARG:
op = JSOP_CALLARG;
@ -2601,8 +2678,8 @@ EmitNameOp(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
case JSOP_GETUPVAR:
op = JSOP_CALLUPVAR;
break;
case JSOP_GETDSLOT:
op = JSOP_CALLDSLOT;
case JSOP_GETFCSLOT:
op = JSOP_CALLFCSLOT;
break;
default:
JS_ASSERT(op == JSOP_ARGUMENTS || op == JSOP_CALLEE);
@ -3542,13 +3619,6 @@ js_EmitFunctionScript(JSContext *cx, JSCodeGenerator *cg, JSParseNode *body)
if (js_Emit1(cx, cg, JSOP_GENERATOR) < 0)
return false;
CG_SWITCH_TO_MAIN(cg);
} else {
/*
* Emit a trace hint opcode only if not in a generator, since generators
* are not yet traced and both want to be the first instruction.
*/
if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
return false;
}
if (cg->needsEagerArguments()) {
@ -3623,7 +3693,9 @@ MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
}
if (JOF_OPTYPE(pn->pn_op) == JOF_ATOM &&
(!cg->inFunction() || (cg->flags & TCF_FUN_HEAVYWEIGHT))) {
(!cg->inFunction() || (cg->flags & TCF_FUN_HEAVYWEIGHT)) &&
js_CodeSpec[pn->pn_op].type() != JOF_GLOBAL)
{
CG_SWITCH_TO_PROLOG(cg);
if (!UpdateLineNumberNotes(cx, cg, pn->pn_pos.begin.lineno))
return JS_FALSE;
@ -3631,6 +3703,16 @@ MaybeEmitVarDecl(JSContext *cx, JSCodeGenerator *cg, JSOp prologOp,
CG_SWITCH_TO_MAIN(cg);
}
if (JOF_OPTYPE(pn->pn_op) == JOF_LOCAL &&
!(cg->flags & TCF_FUN_CALLS_EVAL) &&
pn->pn_defn &&
(((JSDefinition *)pn)->pn_dflags & PND_CLOSED))
{
CG_SWITCH_TO_PROLOG(cg);
EMIT_UINT16_IMM_OP(JSOP_DEFUPVAR, pn->pn_cookie.asInteger());
CG_SWITCH_TO_MAIN(cg);
}
if (result)
*result = atomIndex;
return JS_TRUE;
@ -3712,6 +3794,7 @@ EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
switch (pn->pn_op) {
case JSOP_SETNAME:
case JSOP_SETGNAME:
/*
* NB: pn is a PN_NAME node, not a PN_BINARY. Nevertheless,
* we want to emit JSOP_ENUMELEM, which has format JOF_ELEM.
@ -3734,7 +3817,7 @@ EmitDestructuringLHS(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
}
case JSOP_SETARG:
case JSOP_SETGVAR:
case JSOP_SETGLOBAL:
{
jsuint slot = pn->pn_cookie.asInteger();
EMIT_UINT16_IMM_OP(PN_OP(pn), slot);
@ -4144,6 +4227,9 @@ EmitVariables(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn,
if (op == JSOP_SETNAME) {
JS_ASSERT(!let);
EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
} else if (op == JSOP_SETGNAME) {
JS_ASSERT(!let);
EMIT_INDEX_OP(JSOP_BINDGNAME, atomIndex);
}
if (pn->pn_op == JSOP_DEFCONST &&
!js_DefineCompileTimeConstant(cx, cg, pn2->pn_atom, pn3)) {
@ -4434,10 +4520,12 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
*/
if (!cg->inFunction()) {
JS_ASSERT(!cg->topStmt);
CG_SWITCH_TO_PROLOG(cg);
op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFFUN_FC : JSOP_DEFFUN;
EMIT_INDEX_OP(op, index);
CG_SWITCH_TO_MAIN(cg);
if (pn->pn_cookie.isFree()) {
CG_SWITCH_TO_PROLOG(cg);
op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFFUN_FC : JSOP_DEFFUN;
EMIT_INDEX_OP(op, index);
CG_SWITCH_TO_MAIN(cg);
}
/* Emit NOP for the decompiler. */
if (!EmitFunctionDefNop(cx, cg, index))
@ -4446,11 +4534,16 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
#ifdef DEBUG
JSLocalKind localKind =
#endif
js_LookupLocal(cx, cg->fun, fun->atom, &slot);
cg->fun->lookupLocal(cx, fun->atom, &slot);
JS_ASSERT(localKind == JSLOCAL_VAR || localKind == JSLOCAL_CONST);
JS_ASSERT(index < JS_BIT(20));
pn->pn_index = index;
op = FUN_FLAT_CLOSURE(fun) ? JSOP_DEFLOCALFUN_FC : JSOP_DEFLOCALFUN;
if ((pn->pn_dflags & PND_CLOSED) && !(cg->flags & TCF_FUN_CALLS_EVAL)) {
CG_SWITCH_TO_PROLOG(cg);
EMIT_UINT16_IMM_OP(JSOP_DEFUPVAR, pn->pn_cookie.asInteger());
CG_SWITCH_TO_MAIN(cg);
}
if (!EmitSlotIndexOp(cx, op, slot, index, cg))
return JS_FALSE;
}
@ -4772,10 +4865,10 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
switch (op) {
case JSOP_GETARG: /* FALL THROUGH */
case JSOP_SETARG: op = JSOP_FORARG; break;
case JSOP_GETGVAR: /* FALL THROUGH */
case JSOP_SETGVAR: op = JSOP_FORNAME; break;
case JSOP_GETLOCAL: /* FALL THROUGH */
case JSOP_SETLOCAL: op = JSOP_FORLOCAL; break;
case JSOP_GETGLOBAL: /* FALL THROUGH */
case JSOP_SETGLOBAL: op = JSOP_FORGLOBAL; break;
default: JS_ASSERT(0);
}
} else {
@ -5720,8 +5813,10 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
if (!ale)
return JS_FALSE;
atomIndex = ALE_INDEX(ale);
if (!pn2->isConst())
EMIT_INDEX_OP(JSOP_BINDNAME, atomIndex);
if (!pn2->isConst()) {
JSOp op = PN_OP(pn2) == JSOP_SETGNAME ? JSOP_BINDGNAME : JSOP_BINDNAME;
EMIT_INDEX_OP(op, atomIndex);
}
}
break;
case TOK_DOT:
@ -5778,8 +5873,10 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
EMIT_INDEX_OP(JSOP_GETXPROP, atomIndex);
} else {
JS_ASSERT(PN_OP(pn2) != JSOP_GETUPVAR);
EMIT_UINT16_IMM_OP((PN_OP(pn2) == JSOP_SETGVAR)
? JSOP_GETGVAR
EMIT_UINT16_IMM_OP((PN_OP(pn2) == JSOP_SETGNAME)
? JSOP_GETGNAME
: (PN_OP(pn2) == JSOP_SETGLOBAL)
? JSOP_GETGLOBAL
: (PN_OP(pn2) == JSOP_SETARG)
? JSOP_GETARG
: JSOP_GETLOCAL,
@ -6326,11 +6423,6 @@ js_EmitTree(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
argc = pn->pn_count - 1;
if (js_Emit3(cx, cg, PN_OP(pn), ARGC_HI(argc), ARGC_LO(argc)) < 0)
return JS_FALSE;
if (PN_OP(pn) == JSOP_CALL) {
/* Add a trace hint opcode for recursion. */
if (js_Emit1(cx, cg, JSOP_TRACE) < 0)
return JS_FALSE;
}
if (PN_OP(pn) == JSOP_EVAL)
EMIT_UINT16_IMM_OP(JSOP_LINENO, pn->pn_pos.begin.lineno);
break;
@ -7307,8 +7399,8 @@ js_FinishTakingTryNotes(JSCodeGenerator *cg, JSTryNoteArray *array)
* cloned function objects and with the compiler-created clone-parent. There
* are nregexps = script->regexps()->length such reserved slots in each
* function object cloned from fun->object. NB: during compilation, a funobj
* slots element must never be allocated, because js_AllocSlot could hand out
* one of the slots that should be given to a regexp clone.
* slots element must never be allocated, because JSObject::allocSlot could
* hand out one of the slots that should be given to a regexp clone.
*
* If the code being compiled is global code, the cloned regexp are stored in
* fp->vars slot after cg->ngvars and to protect regexp slots from GC we set

Просмотреть файл

@ -246,6 +246,11 @@ struct JSStmtInfo {
/* The function mutates a positional (non-destructuring) parameter. */
#define TCF_FUN_MUTATES_PARAMETER 0x1000000
/*
* Compiling an eval() script.
*/
#define TCF_COMPILE_FOR_EVAL 0x2000000
/*
* Flags to check for return; vs. return expr; in a function.
*/
@ -295,6 +300,8 @@ struct JSTreeContext { /* tree context for semantic checks */
Compiler::compileFunctionBody */
JSFunctionBox *functionList;
JSParseNode *innermostWith; /* innermost WITH parse node */
#ifdef JS_SCOPE_DEPTH_METER
uint16 scopeDepth; /* current lexical scope chain depth */
uint16 maxScopeDepth; /* maximum lexical scope chain depth */
@ -304,7 +311,7 @@ struct JSTreeContext { /* tree context for semantic checks */
: flags(0), ngvars(0), bodyid(0), blockidGen(0),
topStmt(NULL), topScopeStmt(NULL), blockChain(NULL), blockNode(NULL),
parser(prs), scopeChain(NULL), parent(prs->tc), staticLevel(0),
funbox(NULL), functionList(NULL), sharpSlotBase(-1)
funbox(NULL), functionList(NULL), innermostWith(NULL), sharpSlotBase(-1)
{
prs->tc = this;
JS_SCOPE_DEPTH_METERING(scopeDepth = maxScopeDepth = 0);
@ -345,6 +352,8 @@ struct JSTreeContext { /* tree context for semantic checks */
int sharpSlotBase;
bool ensureSharpSlots();
js::Compiler *compiler() { return (js::Compiler *)parser; }
// Return true there is a generator function within |skip| lexical scopes
// (going upward) from this context's lexical scope. Always return true if
// this context is itself a generator.
@ -536,6 +545,11 @@ struct JSCodeGenerator : public JSTreeContext
JSAtomList upvarList; /* map of atoms to upvar indexes */
JSUpvarArray upvarMap; /* indexed upvar pairs (JS_realloc'ed) */
typedef js::Vector<js::GlobalSlotArray::Entry, 16, js::ContextAllocPolicy> GlobalUseVector;
GlobalUseVector globalUses; /* per-script global uses */
JSAtomList globalMap; /* per-script map of global name to globalUses vector */
/*
* Initialize cg to allocate bytecode space from codePool, source note
* space from notePool, and all other arena-allocated temporaries from
@ -555,6 +569,8 @@ struct JSCodeGenerator : public JSTreeContext
*/
~JSCodeGenerator();
bool addGlobalUse(JSAtom *atom, uint32 slot, js::UpvarCookie &cooke);
bool hasSharps() {
bool rv = !!(flags & TCF_HAS_SHARPS);
JS_ASSERT((sharpSlotBase >= 0) == rv);
@ -564,6 +580,8 @@ struct JSCodeGenerator : public JSTreeContext
uintN sharpSlots() {
return hasSharps() ? SHARP_NSLOTS : 0;
}
bool compilingForEval() { return !!(flags & TCF_COMPILE_FOR_EVAL); }
};
#define CG_TS(cg) TS((cg)->parser)

Просмотреть файл

@ -69,7 +69,7 @@ using namespace js;
/* Forward declarations for js_ErrorClass's initializer. */
static JSBool
Exception(JSContext *cx, JSObject *obj, uintN argc, Value *argv, Value *rval);
Exception(JSContext *cx, uintN argc, Value *vp);
static void
exn_trace(JSTracer *trc, JSObject *obj);
@ -696,31 +696,27 @@ StringToFilename(JSContext *cx, JSString *str)
}
static JSBool
Exception(JSContext *cx, JSObject *obj, uintN argc, Value *argv, Value *rval)
Exception(JSContext *cx, uintN argc, Value *vp)
{
JSString *message, *filename;
JSStackFrame *fp;
if (!JS_IsConstructing(cx)) {
/*
* ECMA ed. 3, 15.11.1 requires Error, etc., to construct even when
* called as functions, without operator new. But as we do not give
* each constructor a distinct JSClass, whose .name member is used by
* NewNativeClassInstance to find the class prototype, we must get the
* class prototype ourselves.
*/
if (!argv[-2].toObject().getProperty(cx,
ATOM_TO_JSID(cx->runtime->atomState
.classPrototypeAtom),
rval)) {
return JS_FALSE;
}
JSObject *errProto = &rval->toObject();
obj = NewNativeClassInstance(cx, &js_ErrorClass, errProto, errProto->getParent());
if (!obj)
return JS_FALSE;
rval->setObject(*obj);
}
/*
* ECMA ed. 3, 15.11.1 requires Error, etc., to construct even when
* called as functions, without operator new. But as we do not give
* each constructor a distinct JSClass, whose .name member is used by
* NewNativeClassInstance to find the class prototype, we must get the
* class prototype ourselves.
*/
JSObject &callee = vp[0].toObject();
Value protov;
if (!callee.getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.classPrototypeAtom), &protov))
return JS_FALSE;
JSObject *errProto = &protov.toObject();
JSObject *obj = NewNativeClassInstance(cx, &js_ErrorClass, errProto, errProto->getParent());
if (!obj)
return JS_FALSE;
/*
* If it's a new object of class Exception, then null out the private
@ -730,6 +726,7 @@ Exception(JSContext *cx, JSObject *obj, uintN argc, Value *argv, Value *rval)
obj->setPrivate(NULL);
/* Set the 'message' property. */
Value *argv = vp + 2;
if (argc != 0) {
message = js_ValueToString(cx, argv[0]);
if (!message)
@ -768,8 +765,13 @@ Exception(JSContext *cx, JSObject *obj, uintN argc, Value *argv, Value *rval)
lineno = (fp && fp->pc(cx)) ? js_FramePCToLineNumber(cx, fp) : 0;
}
return (obj->getClass() != &js_ErrorClass) ||
InitExnPrivate(cx, obj, message, filename, lineno, NULL);
if (obj->getClass() == &js_ErrorClass &&
!InitExnPrivate(cx, obj, message, filename, lineno, NULL)) {
return JS_FALSE;
}
vp->setObject(*obj);
return JS_TRUE;
}
/*
@ -780,7 +782,7 @@ Exception(JSContext *cx, JSObject *obj, uintN argc, Value *argv, Value *rval)
* number information along with this message.
*/
static JSBool
exn_toString(JSContext *cx, uintN argc, jsval *vp)
exn_toString(JSContext *cx, uintN argc, Value *vp)
{
JSObject *obj;
jsval v;
@ -788,11 +790,11 @@ exn_toString(JSContext *cx, uintN argc, jsval *vp)
jschar *chars, *cp;
size_t name_length, message_length, length;
obj = JS_THIS_OBJECT(cx, vp);
obj = ComputeThisFromVp(cx, vp);
if (!obj || !obj->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.nameAtom), Valueify(&v)))
return JS_FALSE;
name = JSVAL_IS_STRING(v) ? JSVAL_TO_STRING(v) : cx->runtime->emptyString;
*vp = STRING_TO_JSVAL(name);
vp->setString(name);
if (!JS_GetProperty(cx, obj, js_message_str, &v))
return JS_FALSE;
@ -825,7 +827,7 @@ exn_toString(JSContext *cx, uintN argc, jsval *vp)
result = name;
}
*vp = STRING_TO_JSVAL(result);
vp->setString(result);
return JS_TRUE;
}
@ -834,7 +836,7 @@ exn_toString(JSContext *cx, uintN argc, jsval *vp)
* Return a string that may eval to something similar to the original object.
*/
static JSBool
exn_toSource(JSContext *cx, uintN argc, jsval *vp)
exn_toSource(JSContext *cx, uintN argc, Value *vp)
{
JSObject *obj;
JSString *name, *message, *filename, *lineno_as_str, *result;
@ -842,13 +844,13 @@ exn_toSource(JSContext *cx, uintN argc, jsval *vp)
size_t lineno_length, name_length, message_length, filename_length, length;
jschar *chars, *cp;
obj = JS_THIS_OBJECT(cx, vp);
if (!obj || !obj->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.nameAtom), Valueify(vp)))
obj = ComputeThisFromVp(cx, vp);
if (!obj || !obj->getProperty(cx, ATOM_TO_JSID(cx->runtime->atomState.nameAtom), vp))
return false;
name = js_ValueToString(cx, Valueify(*vp));
name = js_ValueToString(cx, *vp);
if (!name)
return false;
*vp = STRING_TO_JSVAL(name);
vp->setString(name);
{
AutoArrayRooter tvr(cx, JS_ARRAY_LENGTH(localroots), Valueify(localroots));
@ -948,7 +950,7 @@ exn_toSource(JSContext *cx, uintN argc, jsval *vp)
cx->free(chars);
return false;
}
*vp = STRING_TO_JSVAL(result);
vp->setString(result);
return true;
}
}
@ -1030,7 +1032,7 @@ js_InitExceptionClasses(JSContext *cx, JSObject *obj)
/* Make a constructor function for the current name. */
JSProtoKey protoKey = GetExceptionProtoKey(i);
JSAtom *atom = cx->runtime->atomState.classAtoms[protoKey];
JSFunction *fun = js_DefineFunction(cx, obj, atom, Exception, 3, 0);
JSFunction *fun = js_DefineFunction(cx, obj, atom, Exception, 3, JSFUN_CONSTRUCTOR);
if (!fun)
return NULL;
roots[2] = OBJECT_TO_JSVAL(FUN_OBJECT(fun));

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -48,21 +48,6 @@
#include "jsatom.h"
#include "jsstr.h"
typedef struct JSLocalNameMap JSLocalNameMap;
/*
* Depending on the number of arguments and variables in the function their
* names and attributes are stored either as a single atom or as an array of
* tagged atoms (when there are few locals) or as a hash-based map (when there
* are many locals). In the first 2 cases the lowest bit of the atom is used
* as a tag to distinguish const from var. See jsfun.c for details.
*/
typedef union JSLocalNames {
jsuword taggedAtom;
jsuword *array;
JSLocalNameMap *map;
} JSLocalNames;
/*
* The high two bits of JSFunction.flags encode whether the function is native
* or interpreted, and if interpreted, what kind of optimized closure form (if
@ -101,17 +86,6 @@ typedef union JSLocalNames {
appear to call itself via its own name
or arguments.callee */
#define JSFUN_FAST_NATIVE_CTOR 0x0002 /* JSFastNative directly invokable
* during construction. */
/*
* Extra JSCLASS flag indicating the native passed to JS_InitClass is
* a fast native constructor. This is internal for now as the 'this' value passed
* to such a constructor is a magic value, and there is no way to query this
* in the API. See bug 581263.
*/
#define JSCLASS_FAST_CONSTRUCTOR (1<<4)
#define JSFUN_EXPR_CLOSURE 0x1000 /* expression closure: function(x) x*x */
#define JSFUN_TRCINFO 0x2000 /* when set, u.n.trcinfo is non-null,
JSFunctionSpec::call points to a
@ -128,36 +102,49 @@ typedef union JSLocalNames {
#define FUN_INTERPRETED(fun) (FUN_KIND(fun) >= JSFUN_INTERPRETED)
#define FUN_FLAT_CLOSURE(fun)(FUN_KIND(fun) == JSFUN_FLAT_CLOSURE)
#define FUN_NULL_CLOSURE(fun)(FUN_KIND(fun) == JSFUN_NULL_CLOSURE)
#define FUN_SLOW_NATIVE(fun) (!FUN_INTERPRETED(fun) && !((fun)->flags & JSFUN_FAST_NATIVE))
#define FUN_SCRIPT(fun) (FUN_INTERPRETED(fun) ? (fun)->u.i.script : NULL)
#define FUN_NATIVE(fun) (FUN_SLOW_NATIVE(fun) ? (fun)->u.n.native : NULL)
#define FUN_FAST_NATIVE(fun) (((fun)->flags & JSFUN_FAST_NATIVE) \
? (js::FastNative) (fun)->u.n.native \
: NULL)
#define FUN_MINARGS(fun) (((fun)->flags & JSFUN_FAST_NATIVE) \
? 0 \
: (fun)->nargs)
#define FUN_CLASP(fun) (JS_ASSERT(!FUN_INTERPRETED(fun)), \
fun->u.n.clasp)
#define FUN_TRCINFO(fun) (JS_ASSERT(!FUN_INTERPRETED(fun)), \
JS_ASSERT((fun)->flags & JSFUN_TRCINFO), \
fun->u.n.trcinfo)
/*
* Formal parameters, local variables, and upvars are stored in a shape tree
* path with its latest node at fun->u.i.names. The addLocal, lookupLocal, and
* getLocalNameArray methods abstract away this detail.
*
* The lastArg, lastVar, and lastUpvar JSFunction methods provide more direct
* access to the shape path. These methods may be used to make a Shape::Range
* for iterating over the relevant shapes from youngest to oldest (i.e., last
* or right-most to first or left-most in source order).
*
* Sometimes iteration order must be from oldest to youngest, however. For such
* cases, use getLocalNameArray. The RAII helper class js::AutoLocalNameArray,
* defined in jscntxt.h, should be used where possible instead of direct calls
* to getLocalNameArray.
*/
enum JSLocalKind {
JSLOCAL_NONE,
JSLOCAL_ARG,
JSLOCAL_VAR,
JSLOCAL_CONST,
JSLOCAL_UPVAR
};
struct JSFunction : public JSObject
{
uint16 nargs; /* maximum number of specified arguments,
reflected as f.length/f.arity */
uint16 flags; /* flags, see JSFUN_* below and in jsapi.h */
union {
union U {
struct {
uint16 extra; /* number of arg slots for local GC roots */
uint16 spare; /* reserved for future use */
js::Native native; /* native method pointer or null */
js::Class *clasp; /* class of objects constructed
by this function */
JSNativeTraceInfo *trcinfo;
} n;
struct {
struct Scripted {
uint16 nvars; /* number of local variables */
uint16 nupvars; /* number of upvars (computable from script
but here for faster access) */
@ -171,7 +158,7 @@ struct JSFunction : public JSObject
indirect eval; if true, then this function
object's proto is the wrapped object */
JSScript *script; /* interpreted bytecode descriptor or null */
JSLocalNames names; /* argument and variable names */
js::Shape *names; /* argument and variable names */
} i;
} u;
JSAtom *atom; /* name for diagnostics and decompiling */
@ -179,14 +166,14 @@ struct JSFunction : public JSObject
bool optimizedClosure() const { return FUN_KIND(this) > JSFUN_INTERPRETED; }
bool needsWrapper() const { return FUN_NULL_CLOSURE(this) && u.i.skipmin != 0; }
bool isInterpreted() const { return FUN_INTERPRETED(this); }
bool isFastNative() const { return !!(flags & JSFUN_FAST_NATIVE); }
bool isFastConstructor() const { return !!(flags & JSFUN_FAST_NATIVE_CTOR); }
bool isNative() const { return !FUN_INTERPRETED(this); }
bool isConstructor() const { return flags & JSFUN_CONSTRUCTOR; }
bool isHeavyweight() const { return JSFUN_HEAVYWEIGHT_TEST(flags); }
unsigned minArgs() const { return FUN_MINARGS(this); }
unsigned minArgs() const { return isInterpreted() ? nargs : 0; }
inline bool inStrictMode() const;
inline bool isBound() const;
bool isBound() const;
uintN countVars() const {
JS_ASSERT(FUN_INTERPRETED(this));
@ -213,13 +200,51 @@ struct JSFunction : public JSObject
int sharpSlotBase(JSContext *cx);
uint32 countUpvarSlots() const;
const js::Shape *lastArg() const;
const js::Shape *lastVar() const;
const js::Shape *lastUpvar() const { return u.i.names; }
bool addLocal(JSContext *cx, JSAtom *atom, JSLocalKind kind);
/*
* Look up an argument or variable name returning its kind when found or
* JSLOCAL_NONE when no such name exists. When indexp is not null and the
* name exists, *indexp will receive the index of the corresponding
* argument or variable.
*/
JSLocalKind lookupLocal(JSContext *cx, JSAtom *atom, uintN *indexp);
/*
* Function and macros to work with local names as an array of words.
* getLocalNameArray returns the array, or null if we are out of memory.
* This function must be called only when fun->hasLocalNames().
*
* The supplied pool is used to allocate the returned array, so the caller
* is obligated to mark and release to free it.
*
* The elements of the array with index less than fun->nargs correspond to
* the names of function formal parameters. An index >= fun->nargs
* addresses a var binding. Use JS_LOCAL_NAME_TO_ATOM to convert array's
* element to an atom pointer. This pointer can be null when the element is
* for a formal parameter corresponding to a destructuring pattern.
*
* If nameWord does not name a formal parameter, use JS_LOCAL_NAME_IS_CONST
* to check if nameWord corresponds to the const declaration.
*/
jsuword *getLocalNameArray(JSContext *cx, struct JSArenaPool *pool);
void freezeLocalNames(JSContext *cx);
/*
* If fun's formal parameters include any duplicate names, return one
* of them (chosen arbitrarily). If they are all unique, return NULL.
*/
JSAtom *findDuplicateFormal() const;
uint32 countInterpretedReservedSlots() const;
#define JS_LOCAL_NAME_TO_ATOM(nameWord) ((JSAtom *) ((nameWord) & ~(jsuword) 1))
#define JS_LOCAL_NAME_IS_CONST(nameWord) ((((nameWord) & (jsuword) 1)) != 0)
bool mightEscape() const {
return FUN_INTERPRETED(this) && (FUN_FLAT_CLOSURE(this) || u.i.nupvars == 0);
@ -262,6 +287,14 @@ struct JSFunction : public JSObject
JS_ASSERT(joinable());
fslots[METHOD_ATOM_SLOT].setString(ATOM_TO_STRING(atom));
}
js::Native maybeNative() const {
return isInterpreted() ? NULL : u.n.native;
}
/* Number of extra fixed function object slots besides JSSLOT_PRIVATE. */
static const uint32 CLASS_RESERVED_SLOTS = JSObject::FUN_CLASS_RESERVED_SLOTS;
static const uint32 FIRST_FREE_SLOT = JSSLOT_PRIVATE + CLASS_RESERVED_SLOTS + 1;
};
JS_STATIC_ASSERT(sizeof(JSFunction) % JS_GCTHING_ALIGN == 0);
@ -274,8 +307,8 @@ JS_STATIC_ASSERT(sizeof(JSFunction) % JS_GCTHING_ALIGN == 0);
#ifdef JS_TRACER
/* MSVC demands the intermediate (void *) cast here. */
# define JS_TN(name,fastcall,nargs,flags,trcinfo) \
JS_FN(name, JS_DATA_TO_FUNC_PTR(JSNative, trcinfo), nargs, \
(flags) | JSFUN_FAST_NATIVE | JSFUN_STUB_GSOPS | JSFUN_TRCINFO)
JS_FN(name, JS_DATA_TO_FUNC_PTR(Native, trcinfo), nargs, \
(flags) | JSFUN_STUB_GSOPS | JSFUN_TRCINFO)
#else
# define JS_TN(name,fastcall,nargs,flags,trcinfo) \
JS_FN(name, fastcall, nargs, flags)
@ -296,8 +329,16 @@ JS_STATIC_ASSERT(sizeof(JSFunction) % JS_GCTHING_ALIGN == 0);
* single-threaded objects and GC heaps.
*/
extern js::Class js_ArgumentsClass;
namespace js {
extern Class StrictArgumentsClass;
struct ArgumentsData {
js::Value callee;
js::Value slots[1];
};
}
inline bool
@ -318,12 +359,18 @@ JSObject::isArguments() const
return isNormalArguments() || isStrictArguments();
}
#define JS_ARGUMENT_OBJECT_ON_TRACE ((void *)0xa126)
#define JS_ARGUMENTS_OBJECT_ON_TRACE ((void *)0xa126)
extern JS_PUBLIC_DATA(js::Class) js_CallClass;
extern JS_PUBLIC_DATA(js::Class) js_FunctionClass;
extern js::Class js_DeclEnvClass;
extern const uint32 CALL_CLASS_FIXED_RESERVED_SLOTS;
extern const uint32 CALL_CLASS_RESERVED_SLOTS;
inline bool
JSObject::isCall() const
{
return getClass() == &js_CallClass;
}
inline bool
JSObject::isFunction() const
@ -331,17 +378,14 @@ JSObject::isFunction() const
return getClass() == &js_FunctionClass;
}
inline bool
JSObject::isCallable()
inline JSFunction *
JSObject::getFunctionPrivate() const
{
return isFunction() || getClass()->call;
JS_ASSERT(isFunction());
return reinterpret_cast<JSFunction *>(getPrivate());
}
static inline bool
js_IsCallable(const js::Value &v)
{
return v.isObject() && v.toObject().isCallable();
}
namespace js {
/*
* NB: jsapi.h and jsobj.h must be included before any call to this macro.
@ -361,6 +405,16 @@ IsFunctionObject(const js::Value &v, JSObject **funobj)
return v.isObject() && (*funobj = &v.toObject())->isFunction();
}
static JS_ALWAYS_INLINE bool
IsFunctionObject(const js::Value &v, JSFunction **fun)
{
JSObject *funobj;
bool b = IsFunctionObject(v, &funobj);
if (b)
*fun = funobj->getFunctionPrivate();
return b;
}
/*
* Macro to access the private slot of the function object after the slot is
* initialized.
@ -369,12 +423,6 @@ IsFunctionObject(const js::Value &v, JSObject **funobj)
(JS_ASSERT((funobj)->isFunction()), \
(JSFunction *) (funobj)->getPrivate())
extern JSFunction *
js_NewFunction(JSContext *cx, JSObject *funobj, js::Native native, uintN nargs,
uintN flags, JSObject *parent, JSAtom *atom);
namespace js {
/*
* Return true if this is a compiler-created internal function accessed by
* its own object. Such a function object must not be accessible to script
@ -388,10 +436,48 @@ IsInternalFunctionObject(JSObject *funobj)
return funobj == fun && (fun->flags & JSFUN_LAMBDA) && !funobj->getParent();
}
/* Valueified JS_IsConstructing. */
static JS_ALWAYS_INLINE bool
IsConstructing(const Value *vp)
{
#ifdef DEBUG
JSObject *callee = &JS_CALLEE(cx, vp).toObject();
if (callee->isFunction()) {
JSFunction *fun = callee->getFunctionPrivate();
JS_ASSERT((fun->flags & JSFUN_CONSTRUCTOR) != 0);
} else {
JS_ASSERT(callee->getClass()->construct != NULL);
}
#endif
return vp[1].isMagic();
}
static JS_ALWAYS_INLINE bool
IsConstructing_PossiblyWithGivenThisObject(const Value *vp, JSObject **ctorThis)
{
#ifdef DEBUG
JSObject *callee = &JS_CALLEE(cx, vp).toObject();
if (callee->isFunction()) {
JSFunction *fun = callee->getFunctionPrivate();
JS_ASSERT((fun->flags & JSFUN_CONSTRUCTOR) != 0);
} else {
JS_ASSERT(callee->getClass()->construct != NULL);
}
#endif
bool isCtor = vp[1].isMagic();
if (isCtor)
*ctorThis = vp[1].getMagicObjectOrNullPayload();
return isCtor;
}
} /* namespace js */
extern JSString *
fun_toStringHelper(JSContext *cx, JSObject *obj, uintN indent);
} /* namespace js */
extern JSFunction *
js_NewFunction(JSContext *cx, JSObject *funobj, js::Native native, uintN nargs,
uintN flags, JSObject *parent, JSAtom *atom);
extern JSObject *
js_InitFunctionClass(JSContext *cx, JSObject *obj);
@ -419,6 +505,9 @@ CloneFunctionObject(JSContext *cx, JSFunction *fun, JSObject *parent)
return js_CloneFunctionObject(cx, fun, parent, proto);
}
extern JSObject * JS_FASTCALL
js_AllocFlatClosure(JSContext *cx, JSFunction *fun, JSObject *scopeChain);
extern JS_REQUIRES_STACK JSObject *
js_NewFlatClosure(JSContext *cx, JSFunction *fun);
@ -531,56 +620,6 @@ JS_STATIC_ASSERT(((JS_ARGS_LENGTH_MAX << 1) | 1) <= JSVAL_INT_MAX);
extern JSBool
js_XDRFunctionObject(JSXDRState *xdr, JSObject **objp);
typedef enum JSLocalKind {
JSLOCAL_NONE,
JSLOCAL_ARG,
JSLOCAL_VAR,
JSLOCAL_CONST,
JSLOCAL_UPVAR
} JSLocalKind;
extern JSBool
js_AddLocal(JSContext *cx, JSFunction *fun, JSAtom *atom, JSLocalKind kind);
/*
* Look up an argument or variable name returning its kind when found or
* JSLOCAL_NONE when no such name exists. When indexp is not null and the name
* exists, *indexp will receive the index of the corresponding argument or
* variable.
*/
extern JSLocalKind
js_LookupLocal(JSContext *cx, JSFunction *fun, JSAtom *atom, uintN *indexp);
/*
* Functions to work with local names as an array of words.
*
* js_GetLocalNameArray returns the array, or null if we are out of memory.
* This function must be called only when fun->hasLocalNames().
*
* The supplied pool is used to allocate the returned array, so the caller is
* obligated to mark and release to free it.
*
* The elements of the array with index less than fun->nargs correspond to the
* names of function formal parameters. An index >= fun->nargs addresses a var
* binding. Use JS_LOCAL_NAME_TO_ATOM to convert array's element to an atom
* pointer. This pointer can be null when the element is for a formal parameter
* corresponding to a destructuring pattern.
*
* If nameWord does not name a formal parameter, use JS_LOCAL_NAME_IS_CONST to
* check if nameWord corresponds to the const declaration.
*/
extern jsuword *
js_GetLocalNameArray(JSContext *cx, JSFunction *fun, struct JSArenaPool *pool);
#define JS_LOCAL_NAME_TO_ATOM(nameWord) \
((JSAtom *) ((nameWord) & ~(jsuword) 1))
#define JS_LOCAL_NAME_IS_CONST(nameWord) \
((((nameWord) & (jsuword) 1)) != 0)
extern void
js_FreezeLocalNames(JSContext *cx, JSFunction *fun);
extern JSBool
js_fun_apply(JSContext *cx, uintN argc, js::Value *vp);

Просмотреть файл

@ -78,14 +78,14 @@
#include "jsscript.h"
#include "jsstaticcheck.h"
#include "jsstr.h"
#include "jstask.h"
#include "jstracer.h"
#include "methodjit/MethodJIT.h"
#if JS_HAS_XML_SUPPORT
#include "jsxml.h"
#endif
#include "jsdtracef.h"
#include "jsprobes.h"
#include "jscntxtinlines.h"
#include "jsobjinlines.h"
#include "jshashtable.h"
@ -633,7 +633,9 @@ static JSGCArena *
NewGCArena(JSContext *cx)
{
JSRuntime *rt = cx->runtime;
if (!JS_THREAD_DATA(cx)->waiveGCQuota && rt->gcBytes >= rt->gcMaxBytes) {
if (!JS_THREAD_DATA(cx)->waiveGCQuota &&
(rt->gcBytes >= rt->gcMaxBytes ||
rt->gcBytes > GC_HEAP_GROWTH_FACTOR * rt->gcNewArenaTriggerBytes)) {
/*
* FIXME bug 524051 We cannot run a last-ditch GC on trace for now, so
* just pretend we are out of memory which will throw us off trace and
@ -641,7 +643,7 @@ NewGCArena(JSContext *cx)
*/
if (!JS_ON_TRACE(cx))
return NULL;
js_TriggerGC(cx, true);
TriggerGC(rt);
}
if (rt->gcFreeArenaChunks.empty()) {
@ -801,7 +803,7 @@ GetFinalizableTraceKind(size_t thingKind)
JSTRACE_OBJECT, /* FINALIZE_FUNCTION */
#if JS_HAS_XML_SUPPORT /* FINALIZE_XML */
JSTRACE_XML,
#endif
#endif
JSTRACE_STRING, /* FINALIZE_SHORT_STRING */
JSTRACE_STRING, /* FINALIZE_STRING */
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING0 */
@ -928,7 +930,16 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes)
return false;
#ifdef JS_THREADSAFE
if (!rt->gcHelperThread.init())
rt->gcLock = JS_NEW_LOCK();
if (!rt->gcLock)
return false;
rt->gcDone = JS_NEW_CONDVAR(rt->gcLock);
if (!rt->gcDone)
return false;
rt->requestDone = JS_NEW_CONDVAR(rt->gcLock);
if (!rt->requestDone)
return false;
if (!rt->gcHelperThread.init(rt))
return false;
#endif
@ -952,6 +963,7 @@ js_InitGC(JSRuntime *rt, uint32 maxbytes)
* (during JS engine start).
*/
rt->setGCLastBytes(8192);
rt->gcNewArenaTriggerBytes = GC_ARENA_ALLOCATION_TRIGGER;
METER(PodZero(&rt->gcStats));
return true;
@ -1098,7 +1110,7 @@ MarkWordConservatively(JSTracer *trc, jsuword w)
}
#endif
}
#if defined JS_DUMP_CONSERVATIVE_GC_ROOTS || defined JS_GCMETER
if (IS_GC_MARKING_TRACER(trc))
static_cast<GCMarker *>(trc)->conservativeStats.counter[test]++;
@ -1113,47 +1125,70 @@ MarkRangeConservatively(JSTracer *trc, jsuword *begin, jsuword *end)
MarkWordConservatively(trc, *i);
}
static void
MarkThreadDataConservatively(JSTracer *trc, JSThreadData *td)
{
ConservativeGCThreadData *ctd = &td->conservativeGC;
JS_ASSERT(ctd->hasStackToScan());
jsuword *stackMin, *stackEnd;
#if JS_STACK_GROWTH_DIRECTION > 0
stackMin = td->nativeStackBase;
stackEnd = ctd->nativeStackTop;
#else
stackMin = ctd->nativeStackTop + 1;
stackEnd = td->nativeStackBase;
#endif
JS_ASSERT(stackMin <= stackEnd);
MarkRangeConservatively(trc, stackMin, stackEnd);
MarkRangeConservatively(trc, ctd->registerSnapshot.words,
JS_ARRAY_END(ctd->registerSnapshot.words));
}
void
MarkStackRangeConservatively(JSTracer *trc, Value *beginv, Value *endv)
{
jsuword *begin = (jsuword *) beginv;
jsuword *end = (jsuword *) endv;
#ifdef JS_NUNBOX32
/*
* With 64-bit jsvals on 32-bit systems, we can optimize a bit by
* scanning only the payloads.
*/
JS_ASSERT(begin <= end);
for (jsuword *i = begin; i != end; i += 2)
MarkWordConservatively(trc, *i);
#else
MarkRangeConservatively(trc, begin, end);
#endif
}
void
MarkConservativeStackRoots(JSTracer *trc)
{
/* Do conservative scanning of the stack and registers. */
for (ThreadDataIter i(trc->context->runtime); !i.empty(); i.popFront()) {
JSThreadData *td = i.threadData();
ConservativeGCThreadData *ctd = &td->conservativeGC;
if (ctd->isEnabled()) {
jsuword *stackMin, *stackEnd;
#if JS_STACK_GROWTH_DIRECTION > 0
stackMin = td->nativeStackBase;
stackEnd = ctd->nativeStackTop;
#else
stackMin = ctd->nativeStackTop + 1;
stackEnd = td->nativeStackBase;
#endif
JS_ASSERT(stackMin <= stackEnd);
MarkRangeConservatively(trc, stackMin, stackEnd);
MarkRangeConservatively(trc, ctd->registerSnapshot.words,
JS_ARRAY_END(ctd->registerSnapshot.words));
#ifdef JS_THREADSAFE
for (JSThread::Map::Range r = trc->context->runtime->threads.all(); !r.empty(); r.popFront()) {
JSThread *thread = r.front().value;
ConservativeGCThreadData *ctd = &thread->data.conservativeGC;
if (ctd->hasStackToScan()) {
JS_ASSERT_IF(!thread->requestDepth, thread->suspendCount);
MarkThreadDataConservatively(trc, &thread->data);
} else {
JS_ASSERT(!thread->suspendCount);
JS_ASSERT(thread->requestDepth <= ctd->requestThreshold);
}
}
#else
MarkThreadDataConservatively(trc, &trc->context->runtime->threadData);
#endif
}
JS_NEVER_INLINE JS_FRIEND_API(void)
ConservativeGCThreadData::enable(bool knownStackBoundary)
JS_NEVER_INLINE void
ConservativeGCThreadData::recordStackTop()
{
++enableCount;
if (enableCount <= 0)
return;
/* Update the native stack pointer if it points to a bigger stack. */
#if JS_STACK_GROWTH_DIRECTION > 0
# define CMP >
#else
# define CMP <
#endif
jsuword dummy;
if (knownStackBoundary || enableCount == 1 || &dummy CMP nativeStackTop)
nativeStackTop = &dummy;
#undef CMP
nativeStackTop = &dummy;
/* Update the register snapshot with the latest values. */
#if defined(_MSC_VER)
@ -1167,14 +1202,18 @@ ConservativeGCThreadData::enable(bool knownStackBoundary)
}
JS_NEVER_INLINE JS_FRIEND_API(void)
ConservativeGCThreadData::disable()
static inline void
RecordNativeStackTopForGC(JSContext *cx)
{
--enableCount;
#ifdef DEBUG
if (enableCount == 0)
nativeStackTop = NULL;
ConservativeGCThreadData *ctd = &JS_THREAD_DATA(cx)->conservativeGC;
#ifdef JS_THREADSAFE
/* Record the stack top here only if we are called from a request. */
JS_ASSERT(cx->thread->requestDepth >= ctd->requestThreshold);
if (cx->thread->requestDepth == ctd->requestThreshold)
return;
#endif
ctd->recordStackTop();
}
} /* namespace js */
@ -1196,7 +1235,7 @@ js_FinishGC(JSRuntime *rt)
#endif
#ifdef JS_THREADSAFE
rt->gcHelperThread.cancel();
rt->gcHelperThread.finish(rt);
#endif
FinishGCArenaLists(rt);
@ -1892,7 +1931,7 @@ Mark(JSTracer *trc, void *thing, uint32 kind)
}
str = iter.next();
} while (str);
} else if (MarkIfUnmarkedGCThing(thing, gcmarker->getMarkColor())) {
/*
* With JS_GC_ASSUME_LOW_C_STACK defined the mark phase of GC
@ -1922,7 +1961,7 @@ void
MarkGCThing(JSTracer *trc, void *thing)
{
JS_ASSERT(size_t(thing) % JS_GCTHING_ALIGN == 0);
if (!thing)
return;
@ -2015,8 +2054,8 @@ AutoGCRooter::trace(JSTracer *trc)
MarkValue(trc, static_cast<AutoValueRooter *>(this)->val, "js::AutoValueRooter.val");
return;
case SPROP:
static_cast<AutoScopePropertyRooter *>(this)->sprop->trace(trc);
case SHAPE:
static_cast<AutoShapeRooter *>(this)->shape->trace(trc);
return;
case PARSER:
@ -2107,8 +2146,10 @@ AutoGCRooter::trace(JSTracer *trc)
MarkValueRange(trc, tag, static_cast<AutoArrayRooter *>(this)->array, "js::AutoArrayRooter.array");
}
namespace js {
void
js_TraceContext(JSTracer *trc, JSContext *acx)
MarkContext(JSTracer *trc, JSContext *acx)
{
/* Stack frames and slots are traced by StackSpace::mark. */
@ -2145,7 +2186,7 @@ js_TraceContext(JSTracer *trc, JSContext *acx)
}
JS_REQUIRES_STACK void
js_TraceRuntime(JSTracer *trc)
MarkRuntime(JSTracer *trc)
{
JSRuntime *rt = trc->context->runtime;
@ -2163,9 +2204,9 @@ js_TraceRuntime(JSTracer *trc)
while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter)) {
for (AutoGCRooter *gcr = acx->autoGCRooters; gcr; gcr = gcr->down) {
#ifdef JS_THREADSAFE
JS_ASSERT(acx->outstandingRequests != 0);
JS_ASSERT_IF(!acx->thread->requestDepth, acx->thread->suspendCount);
#endif
JS_ASSERT(JS_THREAD_DATA(acx)->conservativeGC.isEnabled());
JS_ASSERT(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan());
void *thing;
switch (gcr->tag) {
default:
@ -2209,7 +2250,7 @@ js_TraceRuntime(JSTracer *trc)
(long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase - (jsword) gcr),
(long) ((jsword) JS_THREAD_DATA(acx)->nativeStackBase -
(jsword) JS_THREAD_DATA(acx)->conservativeGC.nativeStackTop),
JS_THREAD_DATA(acx)->conservativeGC.enableCount);
int(JS_THREAD_DATA(acx)->conservativeGC.hasStackToScan()));
JS_ASSERT(false);
abort();
}
@ -2228,11 +2269,24 @@ js_TraceRuntime(JSTracer *trc)
iter = NULL;
while (JSContext *acx = js_ContextIterator(rt, JS_TRUE, &iter))
js_TraceContext(trc, acx);
MarkContext(trc, acx);
for (ThreadDataIter i(rt); !i.empty(); i.popFront())
i.threadData()->mark(trc);
if (rt->emptyArgumentsShape)
rt->emptyArgumentsShape->trace(trc);
if (rt->emptyBlockShape)
rt->emptyBlockShape->trace(trc);
if (rt->emptyCallShape)
rt->emptyCallShape->trace(trc);
if (rt->emptyDeclEnvShape)
rt->emptyDeclEnvShape->trace(trc);
if (rt->emptyEnumeratorShape)
rt->emptyEnumeratorShape->trace(trc);
if (rt->emptyWithShape)
rt->emptyWithShape->trace(trc);
/*
* We mark extra roots at the last thing so it can use use additional
* colors to implement cycle collection.
@ -2255,13 +2309,8 @@ js_TraceRuntime(JSTracer *trc)
}
void
js_TriggerGC(JSContext *cx, JSBool gcLocked)
TriggerGC(JSRuntime *rt)
{
JSRuntime *rt = cx->runtime;
#ifdef JS_THREADSAFE
JS_ASSERT(cx->requestDepth > 0);
#endif
JS_ASSERT(!rt->gcRunning);
if (rt->gcIsNeeded)
return;
@ -2270,10 +2319,12 @@ js_TriggerGC(JSContext *cx, JSBool gcLocked)
* Trigger the GC when it is safe to call an operation callback on any
* thread.
*/
rt->gcIsNeeded = JS_TRUE;
js_TriggerAllOperationCallbacks(rt, gcLocked);
rt->gcIsNeeded = true;
TriggerAllOperationCallbacks(rt);
}
} /* namespace js */
void
js_DestroyScriptsToGC(JSContext *cx, JSThreadData *data)
{
@ -2304,17 +2355,9 @@ FinalizeObject(JSContext *cx, JSObject *obj, unsigned thingKind)
if (clasp->finalize)
clasp->finalize(cx, obj);
DTrace::finalizeObject(obj);
Probes::finalizeObject(obj);
if (JS_LIKELY(obj->isNative())) {
JSScope *scope = obj->scope();
if (scope->isSharedEmpty())
static_cast<JSEmptyScope *>(scope)->dropFromGC(cx);
else
scope->destroy(cx);
}
if (obj->hasSlotsArray())
obj->freeSlotsArray(cx);
obj->finish(cx);
}
inline void
@ -2551,8 +2594,90 @@ FinalizeArenaList(JSContext *cx, unsigned thingKind)
namespace js {
bool
GCHelperThread::init(JSRuntime *rt)
{
if (!(wakeup = PR_NewCondVar(rt->gcLock)))
return false;
if (!(sweepingDone = PR_NewCondVar(rt->gcLock)))
return false;
thread = PR_CreateThread(PR_USER_THREAD, threadMain, rt, PR_PRIORITY_NORMAL,
PR_LOCAL_THREAD, PR_JOINABLE_THREAD, 0);
return !!thread;
}
void
GCHelperThread::finish(JSRuntime *rt)
{
PRThread *join = NULL;
{
AutoLockGC lock(rt);
if (thread && !shutdown) {
shutdown = true;
PR_NotifyCondVar(wakeup);
join = thread;
}
}
if (join) {
/* PR_DestroyThread is not necessary. */
PR_JoinThread(join);
}
if (wakeup)
PR_DestroyCondVar(wakeup);
if (sweepingDone)
PR_DestroyCondVar(sweepingDone);
}
/* static */
void
GCHelperThread::threadMain(void *arg)
{
JSRuntime *rt = static_cast<JSRuntime *>(arg);
rt->gcHelperThread.threadLoop(rt);
}
void
GCHelperThread::threadLoop(JSRuntime *rt)
{
AutoLockGC lock(rt);
while (!shutdown) {
/*
* Sweeping can be true here on the first iteration if a GC and the
* corresponding startBackgroundSweep call happen before this thread
* has a chance to run.
*/
if (!sweeping)
PR_WaitCondVar(wakeup, PR_INTERVAL_NO_TIMEOUT);
if (sweeping) {
AutoUnlockGC unlock(rt);
doSweep();
}
sweeping = false;
PR_NotifyAllCondVar(sweepingDone);
}
}
void
GCHelperThread::startBackgroundSweep(JSRuntime *rt)
{
/* The caller takes the GC lock. */
JS_ASSERT(!sweeping);
sweeping = true;
PR_NotifyCondVar(wakeup);
}
void
GCHelperThread::waitBackgroundSweepEnd(JSRuntime *rt)
{
AutoLockGC lock(rt);
while (sweeping)
PR_WaitCondVar(sweepingDone, PR_INTERVAL_NO_TIMEOUT);
}
JS_FRIEND_API(void)
BackgroundSweepTask::replenishAndFreeLater(void *ptr)
GCHelperThread::replenishAndFreeLater(void *ptr)
{
JS_ASSERT(freeCursor == freeCursorEnd);
do {
@ -2571,7 +2696,7 @@ BackgroundSweepTask::replenishAndFreeLater(void *ptr)
}
void
BackgroundSweepTask::run()
GCHelperThread::doSweep()
{
if (freeCursor) {
void **array = freeCursorEnd - FREE_ARRAY_LENGTH;
@ -2584,6 +2709,7 @@ BackgroundSweepTask::run()
void **array = *iter;
freeElementsAndArray(array, array + FREE_ARRAY_LENGTH);
}
freeVector.resize(0);
}
}
@ -2598,10 +2724,10 @@ SweepCompartments(JSContext *cx)
JSCompartment **read = rt->compartments.begin();
JSCompartment **end = rt->compartments.end();
JSCompartment **write = read;
/* Delete defaultCompartment only during runtime shutdown */
rt->defaultCompartment->marked = true;
while (read < end) {
JSCompartment *compartment = (*read++);
if (compartment->marked) {
@ -2622,7 +2748,7 @@ SweepCompartments(JSContext *cx)
/*
* Common cache invalidation and so forth that must be done before GC. Even if
* GCUntilDone calls GC several times, this work only needs to be done once.
* GCUntilDone calls GC several times, this work needs to be done only once.
*/
static void
PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
@ -2653,8 +2779,7 @@ PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
#endif
) {
rt->gcRegenShapes = true;
rt->gcRegenShapesScopeFlag ^= JSScope::SHAPE_REGEN;
rt->shapeGen = JSScope::LAST_RESERVED_SHAPE;
rt->shapeGen = Shape::LAST_RESERVED_SHAPE;
rt->protoHazardShape = 0;
}
@ -2674,7 +2799,7 @@ PreGCCleanup(JSContext *cx, JSGCInvocationKind gckind)
* to finish. Note that the caller does not hold rt->gcLock.
*/
static void
GC(JSContext *cx GCTIMER_PARAM)
MarkAndSweep(JSContext *cx GCTIMER_PARAM)
{
JSRuntime *rt = cx->runtime;
rt->gcNumber++;
@ -2686,11 +2811,11 @@ GC(JSContext *cx GCTIMER_PARAM)
JS_ASSERT(IS_GC_MARKING_TRACER(&gcmarker));
JS_ASSERT(gcmarker.getMarkColor() == BLACK);
rt->gcMarkingTracer = &gcmarker;
for (GCChunkSet::Range r(rt->gcChunkSet.all()); !r.empty(); r.popFront())
GCChunkInfo::fromChunk(r.front())->clearMarkBitmap();
js_TraceRuntime(&gcmarker);
MarkRuntime(&gcmarker);
js_MarkScriptFilenames(rt);
/*
@ -2705,9 +2830,15 @@ GC(JSContext *cx GCTIMER_PARAM)
(void) rt->gcCallback(cx, JSGC_MARK_END);
#ifdef JS_THREADSAFE
JS_ASSERT(!cx->gcSweepTask);
if (!rt->gcHelperThread.busy())
cx->gcSweepTask = new js::BackgroundSweepTask();
/*
* cx->gcBackgroundFree is set if we need several mark-and-sweep loops to
* finish the GC.
*/
if(!cx->gcBackgroundFree) {
/* Wait until the sweeping from the previois GC finishes. */
rt->gcHelperThread.waitBackgroundSweepEnd(rt);
cx->gcBackgroundFree = &rt->gcHelperThread;
}
#endif
/*
@ -2732,7 +2863,12 @@ GC(JSContext *cx GCTIMER_PARAM)
#ifdef DEBUG
/* Save the pre-sweep count of scope-mapped properties. */
rt->liveScopePropsPreSweep = rt->liveScopeProps;
rt->liveObjectPropsPreSweep = rt->liveObjectProps;
#endif
#ifdef JS_METHODJIT
/* Fix-up call ICs guarding against unreachable objects. */
mjit::SweepCallICs(cx);
#endif
/*
@ -2762,15 +2898,20 @@ GC(JSContext *cx GCTIMER_PARAM)
++i) {
FinalizeArenaList<JSString, FinalizeExternalString>(cx, i);
}
rt->gcNewArenaTriggerBytes = rt->gcBytes < GC_ARENA_ALLOCATION_TRIGGER ?
GC_ARENA_ALLOCATION_TRIGGER :
rt->gcBytes;
TIMESTAMP(sweepStringEnd);
SweepCompartments(cx);
/*
* Sweep the runtime's property tree after finalizing objects, in case any
* Sweep the runtime's property trees after finalizing objects, in case any
* had watchpoints referencing tree nodes.
*/
js::SweepScopeProperties(cx);
js::PropertyTree::sweepShapes(cx);
/*
* Sweep script filenames after sweeping functions in the generic loop
@ -2790,13 +2931,6 @@ GC(JSContext *cx GCTIMER_PARAM)
FreeGCChunks(rt);
TIMESTAMP(sweepDestroyEnd);
#ifdef JS_THREADSAFE
if (cx->gcSweepTask) {
rt->gcHelperThread.schedule(cx->gcSweepTask);
cx->gcSweepTask = NULL;
}
#endif
if (rt->gcCallback)
(void) rt->gcCallback(cx, JSGC_FINALIZE_END);
#ifdef DEBUG_srcnotesize
@ -2848,7 +2982,7 @@ LetOtherGCFinish(JSContext *cx)
JS_ASSERT(rt->gcThread);
JS_ASSERT(cx->thread != rt->gcThread);
size_t requestDebit = cx->thread->requestContext ? 1 : 0;
size_t requestDebit = cx->thread->requestDepth ? 1 : 0;
JS_ASSERT(requestDebit <= rt->requestCount);
#ifdef JS_TRACER
JS_ASSERT_IF(requestDebit == 0, !JS_ON_TRACE(cx));
@ -2879,38 +3013,53 @@ LetOtherGCFinish(JSContext *cx)
* finish before we wait.
*/
JS_ASSERT(rt->gcThread);
JS_THREAD_DATA(cx)->conservativeGC.enable(true);
/*
* Wait for GC to finish on the other thread, even if requestDebit is 0
* and even if GC has not started yet because the gcThread is waiting in
* BeginGCSession. This ensures that js_GC never returns without a full GC
* AutoGCSession. This ensures that js_GC never returns without a full GC
* cycle happening.
*/
do {
JS_AWAIT_GC_DONE(rt);
} while (rt->gcThread);
JS_THREAD_DATA(cx)->conservativeGC.disable();
cx->thread->gcWaiting = false;
rt->requestCount += requestDebit;
}
#endif
class AutoGCSession {
public:
explicit AutoGCSession(JSContext *cx);
~AutoGCSession();
private:
JSContext *context;
/* Disable copy constructor or assignments */
AutoGCSession(const AutoGCSession&);
void operator=(const AutoGCSession&);
};
/*
* Start a new GC session assuming no GC is running on this or other threads.
* Together with LetOtherGCFinish this function contains the rendezvous
* algorithm by which we stop the world for GC.
* Start a new GC session. Together with LetOtherGCFinish this function
* contains the rendezvous algorithm by which we stop the world for GC.
*
* This thread becomes the GC thread. Wait for all other threads to quiesce.
* Then set rt->gcRunning and return. The caller must call EndGCSession when
* GC work is done.
* Then set rt->gcRunning and return.
*/
static void
BeginGCSession(JSContext *cx)
AutoGCSession::AutoGCSession(JSContext *cx)
: context(cx)
{
JSRuntime *rt = cx->runtime;
#ifdef JS_THREADSAFE
if (rt->gcThread && rt->gcThread != cx->thread)
LetOtherGCFinish(cx);
#endif
JS_ASSERT(!rt->gcRunning);
#ifdef JS_THREADSAFE
@ -2936,8 +3085,7 @@ BeginGCSession(JSContext *cx)
* JS_NOTIFY_REQUEST_DONE, which will wake us up, is only called on
* rt->requestCount transitions to 0.
*/
JS_ASSERT_IF(cx->requestDepth != 0, cx->thread->requestContext);
size_t requestDebit = cx->thread->requestContext ? 1 : 0;
size_t requestDebit = cx->thread->requestDepth ? 1 : 0;
JS_ASSERT(requestDebit <= rt->requestCount);
if (requestDebit != rt->requestCount) {
rt->requestCount -= requestDebit;
@ -2969,14 +3117,12 @@ BeginGCSession(JSContext *cx)
}
/* End the current GC session and allow other threads to proceed. */
static void
EndGCSession(JSContext *cx)
AutoGCSession::~AutoGCSession()
{
JSRuntime *rt = cx->runtime;
JSRuntime *rt = context->runtime;
rt->gcRunning = false;
#ifdef JS_THREADSAFE
JS_ASSERT(rt->gcThread == cx->thread);
JS_ASSERT(rt->gcThread == context->thread);
rt->gcThread = NULL;
JS_NOTIFY_GC_DONE(rt);
#endif
@ -2995,44 +3141,27 @@ GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
JSRuntime *rt = cx->runtime;
/* Recursive GC or a call from another thread restarts the GC cycle. */
#ifndef JS_THREADSAFE
if (rt->gcRunning) {
if (rt->gcMarkAndSweep) {
rt->gcPoke = true;
#ifdef JS_THREADSAFE
JS_ASSERT(rt->gcThread);
if (rt->gcThread != cx->thread) {
/* We do not return until another GC finishes. */
LetOtherGCFinish(cx);
}
#endif
return;
}
#else /* JS_THREADSAFE */
if (rt->gcThread) {
rt->gcPoke = true;
if (cx->thread == rt->gcThread) {
JS_ASSERT(rt->gcRunning);
return;
}
LetOtherGCFinish(cx);
/*
* Check if the GC on another thread have collected the garbage and
* it was not a set slot request.
*/
if (!rt->gcPoke)
return;
}
#endif /* JS_THREADSAFE */
BeginGCSession(cx);
AutoGCSession gcsession(cx);
METER(rt->gcStats.poke++);
/*
* Do not scan the current thread on the shutdown or when the GC is called
* outside a request.
*/
bool scanGCThreadStack = (rt->state != JSRTS_LANDING);
#ifdef JS_THREADSAFE
scanGCThreadStack &= !!cx->thread->requestContext;
#endif
if (scanGCThreadStack)
JS_THREAD_DATA(cx)->conservativeGC.enable(true);
bool firstRun = true;
rt->gcMarkAndSweep = true;
#ifdef JS_THREADSAFE
JS_ASSERT(!cx->gcBackgroundFree);
#endif
do {
rt->gcPoke = false;
@ -3042,7 +3171,7 @@ GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
TIMESTAMP(startMark);
firstRun = false;
}
GC(cx GCTIMER_ARG);
MarkAndSweep(cx GCTIMER_ARG);
// GC again if:
// - another thread, not in a request, called js_GC
@ -3050,13 +3179,15 @@ GCUntilDone(JSContext *cx, JSGCInvocationKind gckind GCTIMER_PARAM)
// - a finalizer called js_RemoveRoot or js_UnlockGCThingRT.
} while (rt->gcPoke);
if (scanGCThreadStack)
JS_THREAD_DATA(cx)->conservativeGC.disable();
#ifdef JS_THREADSAFE
JS_ASSERT(cx->gcBackgroundFree == &rt->gcHelperThread);
cx->gcBackgroundFree = NULL;
rt->gcHelperThread.startBackgroundSweep(rt);
#endif
rt->gcMarkAndSweep = false;
rt->gcRegenShapes = false;
rt->setGCLastBytes(rt->gcBytes);
EndGCSession(cx);
}
/*
@ -3077,6 +3208,8 @@ js_GC(JSContext *cx, JSGCInvocationKind gckind)
if (rt->state != JSRTS_UP && gckind != GC_LAST_CONTEXT)
return;
RecordNativeStackTopForGC(cx);
GCTIMER_BEGIN();
do {
@ -3120,54 +3253,71 @@ namespace js {
bool
SetProtoCheckingForCycles(JSContext *cx, JSObject *obj, JSObject *proto)
{
JSRuntime *rt = cx->runtime;
/*
* This function cannot be called during the GC and always requires a
* request.
*/
#ifdef JS_THREADSAFE
JS_ASSERT(cx->requestDepth);
#endif
AutoLockGC lock(rt);
JS_ASSERT(cx->thread->requestDepth);
/*
* The set slot request cannot be called recursively and must not be
* called during a normal GC. So if at this point JSRuntime::gcThread is
* set it must be a GC or a set slot request from another thread.
* This is only necessary if AutoGCSession below would wait for GC to
* finish on another thread, but to capture the minimal stack space and
* for code simplicity we do it here unconditionally.
*/
#ifdef JS_THREADSAFE
if (rt->gcThread) {
JS_ASSERT(cx->thread != rt->gcThread);
LetOtherGCFinish(cx);
}
RecordNativeStackTopForGC(cx);
#endif
BeginGCSession(cx);
JSRuntime *rt = cx->runtime;
AutoLockGC lock(rt);
AutoGCSession gcsession(cx);
AutoUnlockGC unlock(rt);
bool cycle;
{
AutoUnlockGC unlock(rt);
cycle = false;
for (JSObject *obj2 = proto; obj2;) {
obj2 = obj2->wrappedObject(cx);
if (obj2 == obj) {
cycle = true;
break;
}
obj2 = obj2->getProto();
bool cycle = false;
for (JSObject *obj2 = proto; obj2;) {
obj2 = obj2->wrappedObject(cx);
if (obj2 == obj) {
cycle = true;
break;
}
if (!cycle)
obj->setProto(proto);
obj2 = obj2->getProto();
}
EndGCSession(cx);
if (!cycle)
obj->setProto(proto);
return !cycle;
}
void
TraceRuntime(JSTracer *trc)
{
LeaveTrace(trc->context);
#ifdef JS_THREADSAFE
{
JSContext *cx = trc->context;
JSRuntime *rt = cx->runtime;
AutoLockGC lock(rt);
if (rt->gcThread != cx->thread) {
AutoGCSession gcsession(cx);
AutoUnlockGC unlock(rt);
RecordNativeStackTopForGC(trc->context);
MarkRuntime(trc);
return;
}
}
#else
RecordNativeStackTopForGC(trc->context);
#endif
/*
* Calls from inside a normal GC or a recursive calls are OK and do not
* require session setup.
*/
MarkRuntime(trc);
}
JSCompartment *
NewCompartment(JSContext *cx, JSPrincipals *principals)
{

Просмотреть файл

@ -51,7 +51,6 @@
#include "jsbit.h"
#include "jsgcchunk.h"
#include "jsutil.h"
#include "jstask.h"
#include "jsvector.h"
#include "jsversion.h"
#include "jsobj.h"
@ -65,6 +64,18 @@
*/
#define JSTRACE_LIMIT 3
/*
* Lower limit after which we limit the heap growth
*/
const size_t GC_ARENA_ALLOCATION_TRIGGER = 25 * js::GC_CHUNK_SIZE;
/*
* A GC is triggered once the number of newly allocated arenas
* is 1.5 times the number of live arenas after the last GC.
* (Starting after the lower limit of GC_ARENA_ALLOCATION_TRIGGER)
*/
const float GC_HEAP_GROWTH_FACTOR = 1.5;
const uintN JS_EXTERNAL_STRING_LIMIT = 8;
/*
@ -165,21 +176,22 @@ js_GCThingIsMarked(void *thing, uint32 color);
extern void
js_TraceStackFrame(JSTracer *trc, JSStackFrame *fp);
namespace js {
extern JS_REQUIRES_STACK void
js_TraceRuntime(JSTracer *trc);
extern JS_REQUIRES_STACK JS_FRIEND_API(void)
js_TraceContext(JSTracer *trc, JSContext *acx);
/*
* Schedule the GC call at a later safe point.
*/
#ifndef JS_THREADSAFE
# define js_TriggerGC(cx, gcLocked) js_TriggerGC (cx)
#endif
MarkRuntime(JSTracer *trc);
extern void
js_TriggerGC(JSContext *cx, JSBool gcLocked);
TraceRuntime(JSTracer *trc);
extern JS_REQUIRES_STACK JS_FRIEND_API(void)
MarkContext(JSTracer *trc, JSContext *acx);
/* Must be called with GC lock taken. */
extern void
TriggerGC(JSRuntime *rt);
} /* namespace js */
/*
* Kinds of js_GC invocation.
@ -287,7 +299,7 @@ js_NewGCExternalString(JSContext *cx, uintN type)
return (JSString *) js_NewFinalizableGCThing(cx, type);
}
static inline JSFunction*
static inline JSFunction *
js_NewGCFunction(JSContext *cx)
{
JSFunction* obj = (JSFunction *)js_NewFinalizableGCThing(cx, FINALIZE_FUNCTION);
@ -346,18 +358,24 @@ namespace js {
/*
* During the finalization we do not free immediately. Rather we add the
* corresponding pointers to a buffer which we later release on the
* background thread.
* corresponding pointers to a buffer which we later release on a separated
* thread.
*
* The buffer is implemented as a vector of 64K arrays of pointers, not as a
* simple vector, to avoid realloc calls during the vector growth and to not
* bloat the binary size of the inlined freeLater method. Any OOM during
* buffer growth results in the pointer being freed immediately.
*/
class BackgroundSweepTask : public JSBackgroundTask {
class GCHelperThread {
static const size_t FREE_ARRAY_SIZE = size_t(1) << 16;
static const size_t FREE_ARRAY_LENGTH = FREE_ARRAY_SIZE / sizeof(void *);
PRThread* thread;
PRCondVar* wakeup;
PRCondVar* sweepingDone;
bool shutdown;
bool sweeping;
Vector<void **, 16, js::SystemAllocPolicy> freeVector;
void **freeCursor;
void **freeCursorEnd;
@ -372,18 +390,37 @@ class BackgroundSweepTask : public JSBackgroundTask {
js_free(array);
}
public:
BackgroundSweepTask()
: freeCursor(NULL), freeCursorEnd(NULL) { }
static void threadMain(void* arg);
void freeLater(void* ptr) {
void threadLoop(JSRuntime *rt);
void doSweep();
public:
GCHelperThread()
: thread(NULL),
wakeup(NULL),
sweepingDone(NULL),
shutdown(false),
sweeping(false),
freeCursor(NULL),
freeCursorEnd(NULL) { }
bool init(JSRuntime *rt);
void finish(JSRuntime *rt);
/* Must be called with GC lock taken. */
void startBackgroundSweep(JSRuntime *rt);
/* Must be called outside the GC lock. */
void waitBackgroundSweepEnd(JSRuntime *rt);
void freeLater(void *ptr) {
JS_ASSERT(!sweeping);
if (freeCursor != freeCursorEnd)
*freeCursor++ = ptr;
else
replenishAndFreeLater(ptr);
}
virtual void run();
};
#endif /* JS_THREADSAFE */
@ -426,11 +463,27 @@ struct ConservativeGCThreadData {
jsuword words[JS_HOWMANY(sizeof(jmp_buf), sizeof(jsuword))];
} registerSnapshot;
int enableCount;
/*
* Cycle collector uses this to communicate that the native stack of the
* GC thread should be scanned only if the thread have more than the given
* threshold of requests.
*/
unsigned requestThreshold;
JS_NEVER_INLINE JS_FRIEND_API(void) enable(bool knownStackBoundary = false);
JS_FRIEND_API(void) disable();
bool isEnabled() const { return enableCount > 0; }
JS_NEVER_INLINE void recordStackTop();
#ifdef JS_THREADSAFE
void updateForRequestEnd(unsigned suspendCount) {
if (suspendCount)
recordStackTop();
else
nativeStackTop = NULL;
}
#endif
bool hasStackToScan() const {
return !!nativeStackTop;
}
};
struct GCMarker : public JSTracer {
@ -448,7 +501,7 @@ struct GCMarker : public JSTracer {
#if defined(JS_DUMP_CONSERVATIVE_GC_ROOTS) || defined(JS_GCMETER)
ConservativeGCStats conservativeStats;
#endif
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
struct ConservativeRoot { void *thing; uint32 traceKind; };
Vector<ConservativeRoot, 0, SystemAllocPolicy> conservativeRoots;
@ -594,6 +647,9 @@ MarkValueRange(JSTracer *trc, size_t len, Value *vec, const char *name)
MarkValueRange(trc, vec, vec + len, name);
}
void
MarkStackRangeConservatively(JSTracer *trc, Value *begin, Value *end);
static inline void
MarkId(JSTracer *trc, jsid id)
{

Просмотреть файл

@ -37,7 +37,7 @@
#include "jsgcchunk.h"
#ifdef XP_WIN
# include <windows.h>
# include "jswin.h"
# ifdef _MSC_VER
# pragma warning( disable: 4267 4996 4146 )

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -51,30 +51,40 @@
#include "jsvalue.h"
typedef struct JSFrameRegs {
JSStackFrame *fp; /* active frame */
jsbytecode *pc; /* program counter */
js::Value *sp; /* stack pointer */
jsbytecode *pc; /* program counter */
JSStackFrame *fp; /* active frame */
} JSFrameRegs;
/* JS stack frame flags. */
enum JSFrameFlags {
JSFRAME_CONSTRUCTING = 0x01, /* frame is for a constructor invocation */
JSFRAME_COMPUTED_THIS = 0x02, /* frame.thisv was computed already and
JSVAL_IS_OBJECT(thisv) */
JSFRAME_ASSIGNING = 0x04, /* a complex (not simplex JOF_ASSIGNING) op
JSFRAME_CONSTRUCTING = 0x01, /* frame is for a constructor invocation */
JSFRAME_OVERRIDE_ARGS = 0x02, /* overridden arguments local variable */
JSFRAME_ASSIGNING = 0x04, /* a complex (not simplex JOF_ASSIGNING) op
is currently assigning to a property */
JSFRAME_DEBUGGER = 0x08, /* frame for JS_EvaluateInStackFrame */
JSFRAME_EVAL = 0x10, /* frame for obj_eval */
JSFRAME_FLOATING_GENERATOR = 0x20, /* frame copy stored in a generator obj */
JSFRAME_YIELDING = 0x40, /* js_Interpret dispatched JSOP_YIELD */
JSFRAME_GENERATOR = 0x80, /* frame belongs to generator-iterator */
JSFRAME_OVERRIDE_ARGS = 0x100, /* overridden arguments local variable */
JSFRAME_DUMMY = 0x200, /* frame is a dummy frame */
JSFRAME_IN_IMACRO = 0x400, /* frame has imacpc value available */
JSFRAME_DEBUGGER = 0x08, /* frame for JS_EvaluateInStackFrame */
JSFRAME_EVAL = 0x10, /* frame for obj_eval */
JSFRAME_FLOATING_GENERATOR = 0x20, /* frame copy stored in a generator obj */
JSFRAME_YIELDING = 0x40, /* js_Interpret dispatched JSOP_YIELD */
JSFRAME_GENERATOR = 0x80, /* frame belongs to generator-iterator */
JSFRAME_BAILED_AT_RETURN = 0x100, /* bailed at JSOP_RETURN */
JSFRAME_DUMMY = 0x200, /* frame is a dummy frame */
JSFRAME_IN_IMACRO = 0x400, /* frame has imacpc value available */
JSFRAME_SPECIAL = JSFRAME_DEBUGGER | JSFRAME_EVAL
};
/* Flags to toggle Interpret() execution. */
enum JSInterpFlags {
JSINTERP_RECORD = 0x01, /* interpreter has been started to record/run traces */
JSINTERP_SAFEPOINT = 0x02 /* interpreter should leave on a method JIT safe point */
};
namespace js { namespace mjit {
class Compiler;
class InlineFrameAssembler;
} }
/*
* JS stack frame, may be allocated on the C stack by native callers. Always
* allocated on cx->stackPool for calls from the interpreter to an interpreted
@ -88,21 +98,34 @@ struct JSStackFrame
private:
JSObject *callobj; /* lazily created Call object */
JSObject *argsobj; /* lazily created arguments object */
JSObject *scopeChain; /* current scope chain */
JSObject *blockChain; /* current static block */
jsbytecode *imacpc; /* null or interpreter macro call pc */
void *annotation; /* used by Java security */
void *hookData; /* debugger call hook data */
JSVersion callerVersion; /* dynamic version of calling script */
JSScript *script; /* script being interpreted */
JSFunction *fun; /* function being called or null */
/*
* The value of |this| in this stack frame, or JSVAL_NULL if |this|
* is to be computed lazily on demand.
*
* thisv is eagerly initialized for non-function-call frames and
* qualified method calls, but lazily initialized in most unqualified
* function calls. See getThisObject().
*
* Usually if argv != NULL then thisv == argv[-1], but natives may
* assign to argv[-1]. Also, obj_eval can trigger a special case
* where two stack frames have the same argv. If one of the frames fills
* in both argv[-1] and thisv, the other frame's thisv is left null.
*/
js::Value thisv; /* "this" pointer if in method */
js::Value rval; /* function return value */
uintN argc; /* actual argument count */
JSFunction *fun; /* function being called or null */
public:
uintN argc; /* actual argument count */
js::Value *argv; /* base of argument stack slots */
private:
js::Value rval; /* function return value */
void *annotation; /* used by Java security */
public:
/* Maintained by StackSpace operations */
JSStackFrame *down; /* previous frame, part of
stack layout invariant */
@ -111,10 +134,21 @@ struct JSStackFrame
static jsbytecode *const sInvalidPC;
#endif
uint32 flags; /* frame flags -- see below */
void *ncode; /* jit return pc */
void *padding;
private:
JSObject *scopeChain;
JSObject *blockChain;
public:
uint32 flags; /* frame flags -- see below */
private:
/* Members only needed for inline calls. */
void *hookData; /* debugger call hook data */
JSVersion callerVersion; /* dynamic version of calling script */
public:
/* Get the frame's current bytecode, assuming |this| is in |cx|. */
jsbytecode *pc(JSContext *cx) const;
@ -264,6 +298,10 @@ struct JSStackFrame
blockChain = obj;
}
static size_t offsetBlockChain() {
return offsetof(JSStackFrame, blockChain);
}
/* IMacroPC accessors. */
bool hasIMacroPC() const { return flags & JSFRAME_IN_IMACRO; }
@ -310,6 +348,10 @@ struct JSStackFrame
annotation = annot;
}
static size_t offsetAnnotation() {
return offsetof(JSStackFrame, annotation);
}
/* Debugger hook data accessors */
bool hasHookData() const {
@ -329,6 +371,10 @@ struct JSStackFrame
hookData = data;
}
static size_t offsetHookData() {
return offsetof(JSStackFrame, hookData);
}
/* Version accessors */
JSVersion getCallerVersion() const {
@ -339,6 +385,10 @@ struct JSStackFrame
callerVersion = version;
}
static size_t offsetCallerVersion() {
return offsetof(JSStackFrame, callerVersion);
}
/* Script accessors */
bool hasScript() const {
@ -385,6 +435,10 @@ struct JSStackFrame
return fun;
}
static size_t offsetFunction() {
return offsetof(JSStackFrame, fun);
}
size_t numFormalArgs() const {
JS_ASSERT(!isEvalFrame());
return getFunction()->nargs;
@ -404,6 +458,10 @@ struct JSStackFrame
thisv = v;
}
static size_t offsetThisValue() {
return offsetof(JSStackFrame, thisv);
}
/* Return-value accessors */
const js::Value& getReturnValue() {
@ -469,7 +527,7 @@ struct JSStackFrame
/*
* Fallible getter to compute the correct callee function object, which may
* require deferred cloning due to JSScope::methodReadBarrier. For a frame
* require deferred cloning due to JSObject::methodReadBarrier. For a frame
* with null fun member, return true with *vp set from this->calleeValue(),
* which may not be an object (it could be undefined).
*/
@ -505,6 +563,9 @@ struct JSStackFrame
bool isDummyFrame() const { return !!(flags & JSFRAME_DUMMY); }
bool isEvalFrame() const { return !!(flags & JSFRAME_EVAL); }
private:
JSObject *computeThisObject(JSContext *cx);
/* Contains static assertions for member alignment, don't call. */
inline void staticAsserts();
};
@ -521,6 +582,15 @@ JSStackFrame::staticAsserts()
{
JS_STATIC_ASSERT(offsetof(JSStackFrame, rval) % sizeof(js::Value) == 0);
JS_STATIC_ASSERT(offsetof(JSStackFrame, thisv) % sizeof(js::Value) == 0);
/* Static assert for x86 trampolines in MethodJIT.cpp */
#if defined(JS_METHODJIT)
# if defined(JS_CPU_X86)
JS_STATIC_ASSERT(offsetof(JSStackFrame, rval) == 0x28);
# elif defined(JS_CPU_X64)
JS_STATIC_ASSERT(offsetof(JSStackFrame, rval) == 0x40);
# endif
#endif
}
static JS_INLINE uintN
@ -557,17 +627,24 @@ namespace js {
/*
* For a call with arguments argv including argv[-1] (nominal |this|) and
* argv[-2] (callee) replace null |this| with callee's parent, replace
* primitive values with the equivalent wrapper objects and censor activation
* objects as, per ECMA-262, they may not be referred to by |this|. argv[-1]
* must not be a JSVAL_VOID.
* argv[-2] (callee) replace null |this| with callee's parent and replace
* primitive values with the equivalent wrapper objects. argv[-1] must
* not be JSVAL_VOID or an activation object.
*/
extern JSObject *
extern bool
ComputeThisFromArgv(JSContext *cx, js::Value *argv);
JS_ALWAYS_INLINE JSObject *
ComputeThisFromVp(JSContext *cx, js::Value *vp)
{
extern bool ComputeThisFromArgv(JSContext *, js::Value *);
return ComputeThisFromArgv(cx, vp + 2) ? &vp[1].toObject() : NULL;
}
JS_ALWAYS_INLINE bool
ComputeThisFromVpInPlace(JSContext *cx, js::Value *vp)
{
extern bool ComputeThisFromArgv(JSContext *, js::Value *);
return ComputeThisFromArgv(cx, vp + 2);
}
@ -600,7 +677,7 @@ struct CallArgs
uintN argc() const { return argc_; }
Value &rval() const { return argv_[-2]; }
JSObject *computeThis(JSContext *cx) const {
bool computeThis(JSContext *cx) const {
return ComputeThisFromArgv(cx, argv_);
}
};
@ -637,40 +714,57 @@ Invoke(JSContext *cx, const CallArgs &args, uintN flags);
#define JSINVOKE_FUNFLAGS JSINVOKE_CONSTRUCT
/*
* "Internal" calls may come from C or C++ code using a JSContext on which no
* "External" calls may come from C or C++ code using a JSContext on which no
* JS is running (!cx->fp), so they may need to push a dummy JSStackFrame.
*/
extern JSBool
InternalInvoke(JSContext *cx, const Value &thisv, const Value &fval, uintN flags,
extern bool
ExternalInvoke(JSContext *cx, const Value &thisv, const Value &fval,
uintN argc, Value *argv, Value *rval);
static JS_ALWAYS_INLINE bool
InternalCall(JSContext *cx, JSObject *obj, const Value &fval,
uintN argc, Value *argv, Value *rval)
ExternalInvoke(JSContext *cx, JSObject *obj, const Value &fval,
uintN argc, Value *argv, Value *rval)
{
return InternalInvoke(cx, ObjectOrNullValue(obj), fval, 0, argc, argv, rval);
}
static JS_ALWAYS_INLINE bool
InternalConstruct(JSContext *cx, JSObject *obj, const Value &fval,
uintN argc, Value *argv, Value *rval)
{
return InternalInvoke(cx, ObjectOrNullValue(obj), fval, JSINVOKE_CONSTRUCT, argc, argv, rval);
return ExternalInvoke(cx, ObjectOrNullValue(obj), fval, argc, argv, rval);
}
extern bool
InternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, const Value &fval,
ExternalGetOrSet(JSContext *cx, JSObject *obj, jsid id, const Value &fval,
JSAccessMode mode, uintN argc, Value *argv, Value *rval);
extern JS_FORCES_STACK bool
Execute(JSContext *cx, JSObject *chain, JSScript *script,
JSStackFrame *down, uintN flags, Value *result);
/*
* These two functions invoke a function called from a constructor context
* (e.g. 'new'). InvokeConstructor handles the general case where a new object
* needs to be created for/by the constructor. ConstructWithGivenThis directly
* calls the constructor with the given 'this', hence the caller must
* understand the semantics of the constructor call.
*/
extern JS_REQUIRES_STACK bool
InvokeConstructor(JSContext *cx, const CallArgs &args);
extern JS_REQUIRES_STACK bool
Interpret(JSContext *cx);
InvokeConstructorWithGivenThis(JSContext *cx, JSObject *thisobj, const Value &fval,
uintN argc, Value *argv, Value *rval);
/*
* Executes a script with the given scope chain in the context of the given
* frame.
*/
extern JS_FORCES_STACK bool
Execute(JSContext *cx, JSObject *chain, JSScript *script,
JSStackFrame *down, uintN flags, Value *result);
/*
* Execute the caller-initialized frame for a user-defined script or function
* pointed to by cx->fp until completion or error.
*/
extern JS_REQUIRES_STACK bool
Interpret(JSContext *cx, JSStackFrame *stopFp, uintN inlineCallCount = 0, uintN interpFlags = 0);
extern JS_REQUIRES_STACK bool
RunScript(JSContext *cx, JSScript *script, JSFunction *fun, JSObject *scopeChain);
#define JSPROP_INITIALIZER 0x100 /* NB: Not a valid property attribute. */
@ -755,19 +849,6 @@ js_EnterWith(JSContext *cx, jsint stackIndex);
extern JS_REQUIRES_STACK void
js_LeaveWith(JSContext *cx);
extern JS_REQUIRES_STACK js::Class *
js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth);
/*
* Unwind block and scope chains to match the given depth. The function sets
* fp->sp on return to stackDepth.
*/
extern JS_REQUIRES_STACK JSBool
js_UnwindScope(JSContext *cx, jsint stackDepth, JSBool normalUnwind);
extern JSBool
js_OnUnknownMethod(JSContext *cx, js::Value *vp);
/*
* Find the results of incrementing or decrementing *vp. For pre-increments,
* both *vp and *vp2 will contain the result on return. For post-increments,
@ -795,17 +876,24 @@ js_MeterSlotOpcode(JSOp op, uint32 slot);
#endif /* JS_LONE_INTERPRET */
/*
* Unwind block and scope chains to match the given depth. The function sets
* fp->sp on return to stackDepth.
*/
extern JS_REQUIRES_STACK JSBool
js_UnwindScope(JSContext *cx, jsint stackDepth, JSBool normalUnwind);
extern JSBool
js_OnUnknownMethod(JSContext *cx, js::Value *vp);
extern JS_REQUIRES_STACK js::Class *
js_IsActiveWithOrBlock(JSContext *cx, JSObject *obj, int stackDepth);
inline JSObject *
JSStackFrame::getThisObject(JSContext *cx)
{
JS_ASSERT(!isDummyFrame());
if (flags & JSFRAME_COMPUTED_THIS)
return &thisv.toObject();
if (!js::ComputeThisFromArgv(cx, argv))
return NULL;
setThisValue(argv[-1]);
flags |= JSFRAME_COMPUTED_THIS;
return &thisv.toObject();
return thisv.isPrimitive() ? computeThisObject(cx) : &thisv.toObject();
}
#endif /* jsinterp_h___ */

Просмотреть файл

@ -87,7 +87,8 @@ static JSObject *iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly
Class js_IteratorClass = {
"Iterator",
JSCLASS_HAS_PRIVATE | JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator) | JSCLASS_MARK_IS_TRACE,
JSCLASS_HAS_PRIVATE | JSCLASS_HAS_CACHED_PROTO(JSProto_Iterator) |
JSCLASS_MARK_IS_TRACE,
PropertyStub, /* addProperty */
PropertyStub, /* delProperty */
PropertyStub, /* getProperty */
@ -256,12 +257,13 @@ EnumerateNativeProperties(JSContext *cx, JSObject *obj, JSObject *pobj, uintN fl
size_t initialLength = props.length();
/* Collect all unique properties from this object's scope. */
JSScope *scope = pobj->scope();
for (JSScopeProperty *sprop = scope->lastProperty(); sprop; sprop = sprop->parent) {
if (!JSID_IS_DEFAULT_XML_NAMESPACE(sprop->id) &&
!sprop->isAlias() &&
!Enumerate<EnumPolicy>(cx, obj, pobj, sprop->id, sprop->enumerable(), sprop->isSharedPermanent(),
flags, ht, props))
for (Shape::Range r = pobj->lastProperty()->all(); !r.empty(); r.popFront()) {
const Shape &shape = r.front();
if (!JSID_IS_DEFAULT_XML_NAMESPACE(shape.id) &&
!shape.isAlias() &&
!Enumerate<EnumPolicy>(cx, obj, pobj, shape.id, shape.enumerable(),
shape.isSharedPermanent(), flags, ht, props))
{
return false;
}
@ -269,7 +271,7 @@ EnumerateNativeProperties(JSContext *cx, JSObject *obj, JSObject *pobj, uintN fl
Reverse(props.begin() + initialLength, props.end());
JS_UNLOCK_SCOPE(cx, scope);
JS_UNLOCK_OBJ(cx, pobj);
return true;
}
@ -406,7 +408,7 @@ GetCustomIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
/* Otherwise call it and return that object. */
LeaveTrace(cx);
Value arg = BooleanValue((flags & JSITER_FOREACH) == 0);
if (!InternalCall(cx, obj, *vp, 1, &arg, vp))
if (!ExternalInvoke(cx, obj, *vp, 1, &arg, vp))
return false;
if (vp->isPrimitive()) {
/*
@ -455,8 +457,8 @@ NewIteratorObject(JSContext *cx, uintN flags)
JSObject *obj = js_NewGCObject(cx);
if (!obj)
return false;
obj->map = cx->runtime->emptyEnumeratorScope->hold();
obj->init(&js_IteratorClass, NULL, NULL, NullValue());
obj->init(&js_IteratorClass, NULL, NULL, NullValue(), cx);
obj->setMap(cx->runtime->emptyEnumeratorShape);
return obj;
}
@ -479,11 +481,11 @@ NativeIterator::allocateKeyIterator(JSContext *cx, uint32 slength, const AutoIdV
}
NativeIterator *
NativeIterator::allocateValueIterator(JSContext *cx, uint32 slength, const AutoValueVector &props)
NativeIterator::allocateValueIterator(JSContext *cx, const AutoValueVector &props)
{
size_t plength = props.length();
NativeIterator *ni = (NativeIterator *)
cx->malloc(sizeof(NativeIterator) + plength * sizeof(Value) + slength * sizeof(uint32));
cx->malloc(sizeof(NativeIterator) + plength * sizeof(Value));
if (!ni)
return NULL;
ni->props_array = ni->props_cursor = (Value *) (ni + 1);
@ -494,15 +496,13 @@ NativeIterator::allocateValueIterator(JSContext *cx, uint32 slength, const AutoV
}
inline void
NativeIterator::init(JSObject *obj, uintN flags, const uint32 *sarray, uint32 slength, uint32 key)
NativeIterator::init(JSObject *obj, uintN flags, uint32 slength, uint32 key)
{
this->obj = obj;
this->flags = flags;
this->shapes_array = (uint32 *) this->props_end;
this->shapes_length = slength;
this->shapes_key = key;
if (slength)
memcpy(this->shapes_array, sarray, slength * sizeof(uint32));
}
static inline void
@ -512,12 +512,15 @@ RegisterEnumerator(JSContext *cx, JSObject *iterobj, NativeIterator *ni)
if (ni->flags & JSITER_ENUMERATE) {
ni->next = cx->enumerators;
cx->enumerators = iterobj;
JS_ASSERT(!(ni->flags & JSITER_ACTIVE));
ni->flags |= JSITER_ACTIVE;
}
}
static inline bool
VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &keys,
const uint32 *sarray, uint32 slength, uint32 key, Value *vp)
uint32 slength, uint32 key, Value *vp)
{
JS_ASSERT(!(flags & JSITER_FOREACH));
@ -525,11 +528,27 @@ VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &key
if (!iterobj)
return false;
NativeIterator *ni = NativeIterator::allocateKeyIterator(cx, slength, keys);
if (!ni)
return NULL;
ni->init(obj, flags, sarray, slength, key);
return false;
ni->init(obj, flags, slength, key);
if (slength) {
/*
* Fill in the shape array from scratch. We can't use the array that was
* computed for the cache lookup earlier, as constructing iterobj could
* have triggered a shape-regenerating GC. Don't bother with regenerating
* the shape key; if such a GC *does* occur, we can only get hits through
* the one-slot lastNativeIterator cache.
*/
JSObject *pobj = obj;
size_t ind = 0;
do {
ni->shapes_array[ind++] = pobj->shape();
pobj = pobj->getProto();
} while (pobj);
JS_ASSERT(ind == slength);
}
iterobj->setNativeIterator(ni);
vp->setObject(*iterobj);
@ -541,12 +560,12 @@ VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &key
bool
VectorToKeyIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &props, Value *vp)
{
return VectorToKeyIterator(cx, obj, flags, props, NULL, 0, 0, vp);
return VectorToKeyIterator(cx, obj, flags, props, 0, 0, vp);
}
static inline bool
bool
VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoValueVector &vals,
const uint32 *sarray, uint32 slength, uint32 key, Value *vp)
Value *vp)
{
JS_ASSERT(flags & JSITER_FOREACH);
@ -554,10 +573,10 @@ VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoValueVector
if (!iterobj)
return false;
NativeIterator *ni = NativeIterator::allocateValueIterator(cx, slength, vals);
NativeIterator *ni = NativeIterator::allocateValueIterator(cx, vals);
if (!ni)
return NULL;
ni->init(obj, flags, sarray, slength, key);
return false;
ni->init(obj, flags, 0, 0);
iterobj->setNativeIterator(ni);
vp->setObject(*iterobj);
@ -566,12 +585,6 @@ VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoValueVector
return true;
}
bool
VectorToValueIterator(JSContext *cx, JSObject *obj, uintN flags, AutoValueVector &props, Value *vp)
{
return VectorToValueIterator(cx, obj, flags, props, NULL, 0, 0, vp);
}
bool
EnumeratedIdVectorToIterator(JSContext *cx, JSObject *obj, uintN flags, AutoIdVector &props, Value *vp)
{
@ -608,11 +621,33 @@ GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
if (obj) {
if (keysOnly) {
/*
* Check to see if this is the same as the most recent object which
* was iterated over. We don't explicitly check for shapeless
* objects here, as they are not inserted into the cache and
* will result in a miss.
*/
JSObject *last = JS_THREAD_DATA(cx)->lastNativeIterator;
JSObject *proto = obj->getProto();
if (last) {
NativeIterator *lastni = last->getNativeIterator();
if (!(lastni->flags & JSITER_ACTIVE) &&
obj->isNative() &&
obj->shape() == lastni->shapes_array[0] &&
proto && proto->isNative() &&
proto->shape() == lastni->shapes_array[1] &&
!proto->getProto()) {
vp->setObject(*last);
RegisterEnumerator(cx, last, lastni);
return true;
}
}
/*
* The iterator object for JSITER_ENUMERATE never escapes, so we
* don't care for the proper parent/proto to be set. This also
* allows us to re-use a previous iterator object that was freed
* by JSOP_ENDITER.
* allows us to re-use a previous iterator object that is not
* currently active.
*/
JSObject *pobj = obj;
do {
@ -634,13 +669,15 @@ GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
JSObject *iterobj = *hp;
if (iterobj) {
NativeIterator *ni = iterobj->getNativeIterator();
if (ni->shapes_key == key &&
if (!(ni->flags & JSITER_ACTIVE) &&
ni->shapes_key == key &&
ni->shapes_length == shapes.length() &&
Compare(ni->shapes_array, shapes.begin(), ni->shapes_length)) {
vp->setObject(*iterobj);
*hp = ni->next;
RegisterEnumerator(cx, iterobj, ni);
if (shapes.length() == 2)
JS_THREAD_DATA(cx)->lastNativeIterator = iterobj;
return true;
}
}
@ -661,13 +698,29 @@ GetIterator(JSContext *cx, JSObject *obj, uintN flags, Value *vp)
AutoValueVector vals(cx);
if (JS_LIKELY(obj != NULL) && !Snapshot<ValueEnumeration>(cx, obj, flags, vals))
return false;
return VectorToValueIterator(cx, obj, flags, vals, shapes.begin(), shapes.length(), key, vp);
JS_ASSERT(shapes.empty());
if (!VectorToValueIterator(cx, obj, flags, vals, vp))
return false;
} else {
AutoIdVector keys(cx);
if (JS_LIKELY(obj != NULL) && !Snapshot<KeyEnumeration>(cx, obj, flags, keys))
return false;
if (!VectorToKeyIterator(cx, obj, flags, keys, shapes.length(), key, vp))
return false;
}
AutoIdVector keys(cx);
if (JS_LIKELY(obj != NULL) && !Snapshot<KeyEnumeration>(cx, obj, flags, keys))
return false;
return VectorToKeyIterator(cx, obj, flags, keys, shapes.begin(), shapes.length(), key, vp);
JSObject *iterobj = &vp->toObject();
/* Cache the iterator object if possible. */
if (shapes.length()) {
uint32 hash = key % NATIVE_ITER_CACHE_SIZE;
JSObject **hp = &JS_THREAD_DATA(cx)->cachedNativeIterators[hash];
*hp = iterobj;
}
if (shapes.length() == 2)
JS_THREAD_DATA(cx)->lastNativeIterator = iterobj;
return true;
}
static JSObject *
@ -677,15 +730,13 @@ iterator_iterator(JSContext *cx, JSObject *obj, JSBool keysonly)
}
static JSBool
Iterator(JSContext *cx, JSObject *iterobj, uintN argc, Value *argv, Value *rval)
Iterator(JSContext *cx, uintN argc, Value *vp)
{
JSBool keyonly;
uintN flags;
keyonly = js_ValueToBoolean(argv[1]);
flags = JSITER_OWNONLY | (keyonly ? 0 : (JSITER_FOREACH | JSITER_KEYVALUE));
*rval = argv[0];
return js_ValueToIterator(cx, flags, rval);
Value *argv = JS_ARGV(cx, vp);
bool keyonly = argc >= 2 ? js_ValueToBoolean(argv[1]) : false;
uintN flags = JSITER_OWNONLY | (keyonly ? 0 : (JSITER_FOREACH | JSITER_KEYVALUE));
*vp = argc >= 1 ? argv[0] : UndefinedValue();
return js_ValueToIterator(cx, flags, vp);
}
JSBool
@ -794,18 +845,19 @@ js_CloseIterator(JSContext *cx, JSObject *obj)
if (clasp == &js_IteratorClass) {
/* Remove enumerators from the active list, which is a stack. */
NativeIterator *ni = obj->getNativeIterator();
if (ni->flags & JSITER_ENUMERATE) {
JS_ASSERT(cx->enumerators == obj);
cx->enumerators = ni->next;
}
/* Cache the iterator object if possible. */
if (ni->shapes_length) {
uint32 hash = ni->shapes_key % NATIVE_ITER_CACHE_SIZE;
JSObject **hp = &JS_THREAD_DATA(cx)->cachedNativeIterators[hash];
JS_ASSERT(ni->flags & JSITER_ACTIVE);
ni->flags &= ~JSITER_ACTIVE;
/*
* Reset the enumerator; it may still be in the cached iterators
* for this thread, and can be reused.
*/
ni->props_cursor = ni->props_array;
ni->next = *hp;
*hp = obj;
}
}
#if JS_HAS_GENERATORS
@ -853,7 +905,7 @@ js_SuppressDeletedProperty(JSContext *cx, JSObject *obj, jsid id)
if (prop) {
uintN attrs;
if (obj2.object()->isNative()) {
attrs = ((JSScopeProperty *) prop)->attributes();
attrs = ((Shape *) prop)->attributes();
JS_UNLOCK_OBJ(cx, obj2.object());
} else if (!obj2.object()->getAttributes(cx, id, &attrs)) {
return false;
@ -914,7 +966,7 @@ js_IteratorMore(JSContext *cx, JSObject *iterobj, Value *rval)
jsid id = ATOM_TO_JSID(cx->runtime->atomState.nextAtom);
if (!js_GetMethod(cx, iterobj, id, JSGET_METHOD_BARRIER, rval))
return false;
if (!InternalCall(cx, iterobj, *rval, 0, NULL, rval)) {
if (!ExternalInvoke(cx, iterobj, *rval, 0, NULL, rval)) {
/* Check for StopIteration. */
if (!cx->throwing || !js_ValueIsStopIteration(cx->exception))
return false;
@ -1267,7 +1319,7 @@ SendToGenerator(JSContext *cx, JSGeneratorOp op, JSObject *obj,
JSObject *enumerators = cx->enumerators;
cx->enumerators = gen->enumerators;
ok = Interpret(cx);
ok = RunScript(cx, fp->maybeScript(), fp->maybeFunction(), fp->getScopeChain());
/* Restore the original enumerators stack. */
gen->enumerators = cx->enumerators;

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше