Merge tracemonkey to mozilla-central a=blockers

This commit is contained in:
Robert Sayre 2010-09-29 02:01:12 -04:00
Родитель 36504e00c0 941ed70827
Коммит 28b54c3b54
127 изменённых файлов: 4946 добавлений и 3704 удалений

Просмотреть файл

@ -8072,6 +8072,11 @@ MOZ_ARG_ENABLE_BOOL(libxul,
MOZ_ENABLE_LIBXUL=1,
MOZ_ENABLE_LIBXUL=)
# split JS out by default to avoid VS2005 PGO crash (bug 591836).
if test "$OS_ARCH" = "WINNT"; then
ENABLE_SHARED_JS=1
fi
MOZ_ARG_ENABLE_BOOL(shared-js,
[ --enable-shared-js
Create a shared JavaScript library.],

Просмотреть файл

@ -1486,8 +1486,8 @@ nsJSContext::EvaluateStringWithValue(const nsAString& aScript,
JSAutoRequest ar(mContext);
JSAutoCrossCompartmentCall accc;
if (!accc.enter(mContext, (JSObject *)aScopeObject)) {
JSAutoEnterCompartment ac;
if (!ac.enter(mContext, (JSObject *)aScopeObject)) {
JSPRINCIPALS_DROP(mContext, jsprin);
stack->Pop(nsnull);
return NS_ERROR_FAILURE;
@ -1686,8 +1686,8 @@ nsJSContext::EvaluateString(const nsAString& aScript,
// check it isn't JSVERSION_UNKNOWN.
if (ok && ((JSVersion)aVersion) != JSVERSION_UNKNOWN) {
JSAutoRequest ar(mContext);
JSAutoCrossCompartmentCall accc;
if (!accc.enter(mContext, (JSObject *)aScopeObject)) {
JSAutoEnterCompartment ac;
if (!ac.enter(mContext, (JSObject *)aScopeObject)) {
stack->Pop(nsnull);
JSPRINCIPALS_DROP(mContext, jsprin);
return NS_ERROR_FAILURE;
@ -2129,8 +2129,8 @@ nsJSContext::CallEventHandler(nsISupports* aTarget, void *aScope, void *aHandler
}
jsval funval = OBJECT_TO_JSVAL(static_cast<JSObject *>(aHandler));
JSAutoCrossCompartmentCall accc;
if (!accc.enter(mContext, target)) {
JSAutoEnterCompartment ac;
if (!ac.enter(mContext, target)) {
stack->Pop(nsnull);
return NS_ERROR_FAILURE;
}

Просмотреть файл

@ -397,12 +397,12 @@ public:
PRBool killWorkerWhenDone;
{
nsLazyAutoRequest ar;
JSAutoCrossCompartmentCall axcc;
JSAutoEnterCompartment ac;
// Tell the worker which context it will be using
if (mWorker->SetGlobalForContext(cx, &ar, &axcc)) {
if (mWorker->SetGlobalForContext(cx, &ar, &ac)) {
NS_ASSERTION(ar.entered(), "SetGlobalForContext must enter request on success");
NS_ASSERTION(axcc.entered(), "SetGlobalForContext must enter xcc on success");
NS_ASSERTION(ac.entered(), "SetGlobalForContext must enter compartment on success");
RunQueue(cx, &killWorkerWhenDone);
@ -413,7 +413,7 @@ public:
}
else {
NS_ASSERTION(!ar.entered(), "SetGlobalForContext must not enter request on failure");
NS_ASSERTION(!axcc.entered(), "SetGlobalForContext must not enter xcc on failure");
NS_ASSERTION(!ac.entered(), "SetGlobalForContext must not enter compartment on failure");
{
// Code in XPConnect assumes that the context's global object won't be

Просмотреть файл

@ -1588,11 +1588,11 @@ nsDOMWorker::PostMessageInternal(PRBool aToInner)
PRBool
nsDOMWorker::SetGlobalForContext(JSContext* aCx, nsLazyAutoRequest *aRequest,
JSAutoCrossCompartmentCall *aCall)
JSAutoEnterCompartment *aComp)
{
NS_ASSERTION(!NS_IsMainThread(), "Wrong thread!");
if (!CompileGlobalObject(aCx, aRequest, aCall)) {
if (!CompileGlobalObject(aCx, aRequest, aComp)) {
return PR_FALSE;
}
@ -1602,7 +1602,7 @@ nsDOMWorker::SetGlobalForContext(JSContext* aCx, nsLazyAutoRequest *aRequest,
PRBool
nsDOMWorker::CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest,
JSAutoCrossCompartmentCall *aCall)
JSAutoEnterCompartment *aComp)
{
NS_ASSERTION(!NS_IsMainThread(), "Wrong thread!");
@ -1612,16 +1612,16 @@ nsDOMWorker::CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest,
// failure, the local request and call will automatically get cleaned
// up. Once success is certain, we swap them into *aRequest and *aCall.
nsLazyAutoRequest localRequest;
JSAutoCrossCompartmentCall localCall;
JSAutoEnterCompartment localAutoCompartment;
localRequest.enter(aCx);
PRBool success;
if (mGlobal) {
success = localCall.enter(aCx, mGlobal);
success = localAutoCompartment.enter(aCx, mGlobal);
NS_ENSURE_TRUE(success, PR_FALSE);
aRequest->swap(localRequest);
aCall->swap(localCall);
aComp->swap(localAutoCompartment);
return PR_TRUE;
}
@ -1662,7 +1662,7 @@ nsDOMWorker::CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest,
NS_ASSERTION(JS_GetGlobalObject(aCx) == global, "Global object mismatch!");
success = localCall.enter(aCx, global);
success = localAutoCompartment.enter(aCx, global);
NS_ENSURE_TRUE(success, PR_FALSE);
#ifdef DEBUG
@ -1734,7 +1734,7 @@ nsDOMWorker::CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest,
NS_ASSERTION(mPrincipal && mURI, "Script loader didn't set our principal!");
aRequest->swap(localRequest);
aCall->swap(localCall);
aComp->swap(localAutoCompartment);
return PR_TRUE;
}

Просмотреть файл

@ -201,7 +201,7 @@ public:
PRBool IsClosing();
PRBool IsSuspended();
PRBool SetGlobalForContext(JSContext* aCx, nsLazyAutoRequest *aRequest, JSAutoCrossCompartmentCall *aCall);
PRBool SetGlobalForContext(JSContext* aCx, nsLazyAutoRequest *aRequest, JSAutoEnterCompartment *aComp);
void SetPool(nsDOMWorkerPool* aPool);
@ -285,7 +285,7 @@ private:
nsresult PostMessageInternal(PRBool aToInner);
PRBool CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest, JSAutoCrossCompartmentCall *aCall);
PRBool CompileGlobalObject(JSContext* aCx, nsLazyAutoRequest *aRequest, JSAutoEnterCompartment *aComp);
PRUint32 NextTimeoutId() {
return ++mNextTimeoutId;

Просмотреть файл

@ -431,7 +431,7 @@ JetpackChild::CreateSandbox(JSContext* cx, uintN argc, jsval* vp)
if (!obj)
return JS_FALSE;
JSAutoCrossCompartmentCall ac;
JSAutoEnterCompartment ac;
if (!ac.enter(cx, obj))
return JS_FALSE;
@ -462,7 +462,7 @@ JetpackChild::EvalInSandbox(JSContext* cx, uintN argc, jsval* vp)
if (!str)
return JS_FALSE;
JSAutoCrossCompartmentCall ac;
JSAutoEnterCompartment ac;
if (!ac.enter(cx, obj))
return JS_FALSE;

Просмотреть файл

@ -90,7 +90,7 @@ _newJSDContext(JSRuntime* jsrt,
JSObject* scopeobj)
{
JSDContext* jsdc = NULL;
JSCompartment *compartment;
JSCrossCompartmentCall *call = NULL;
if( ! jsrt )
return NULL;
@ -140,10 +140,10 @@ _newJSDContext(JSRuntime* jsrt,
JS_BeginRequest(jsdc->dumbContext);
if( scopeobj )
compartment = js_SwitchToObjectCompartment(jsdc->dumbContext, scopeobj);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, scopeobj);
jsdc->glob = JS_NewGlobalObject(jsdc->dumbContext, &global_class);
if( scopeobj )
js_SwitchToCompartment(jsdc->dumbContext, compartment);
if( call )
JS_LeaveCrossCompartmentCall(call);
if( ! jsdc->glob )
goto label_newJSDContext_failure;

Просмотреть файл

@ -153,16 +153,25 @@ jsd_IsValueNative(JSDContext* jsdc, JSDValue* jsdval)
jsval val = jsdval->val;
JSFunction* fun;
JSExceptionState* exceptionState;
JSCrossCompartmentCall *call = NULL;
if(jsd_IsValueFunction(jsdc, jsdval))
{
JSBool ok = JS_FALSE;
JS_BeginRequest(cx);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob);
if(!call) {
JS_EndRequest(cx);
return JS_FALSE;
}
exceptionState = JS_SaveExceptionState(cx);
fun = JS_ValueToFunction(cx, val);
JS_RestoreExceptionState(cx, exceptionState);
if(fun)
ok = JS_GetFunctionScript(cx, fun) ? JS_FALSE : JS_TRUE;
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(cx);
JS_ASSERT(fun);
return ok;
@ -203,6 +212,7 @@ jsd_GetValueString(JSDContext* jsdc, JSDValue* jsdval)
{
JSContext* cx = jsdc->dumbContext;
JSExceptionState* exceptionState;
JSCrossCompartmentCall *call = NULL;
if(!jsdval->string)
{
@ -212,6 +222,13 @@ jsd_GetValueString(JSDContext* jsdc, JSDValue* jsdval)
else
{
JS_BeginRequest(cx);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob);
if(!call) {
JS_EndRequest(cx);
return NULL;
}
exceptionState = JS_SaveExceptionState(cx);
jsdval->string = JS_ValueToString(cx, jsdval->val);
JS_RestoreExceptionState(cx, exceptionState);
@ -220,6 +237,7 @@ jsd_GetValueString(JSDContext* jsdc, JSDValue* jsdval)
if(!JS_AddNamedStringRoot(cx, &jsdval->string, "ValueString"))
jsdval->string = NULL;
}
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(cx);
}
}
@ -232,13 +250,23 @@ jsd_GetValueFunctionName(JSDContext* jsdc, JSDValue* jsdval)
JSContext* cx = jsdc->dumbContext;
JSFunction* fun;
JSExceptionState* exceptionState;
JSCrossCompartmentCall *call = NULL;
if(!jsdval->funName && jsd_IsValueFunction(jsdc, jsdval))
{
JS_BeginRequest(cx);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob);
if(!call) {
JS_EndRequest(cx);
return NULL;
}
exceptionState = JS_SaveExceptionState(cx);
fun = JS_ValueToFunction(cx, jsdval->val);
JS_RestoreExceptionState(cx, exceptionState);
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(cx);
if(!fun)
return NULL;
@ -253,6 +281,7 @@ JSDValue*
jsd_NewValue(JSDContext* jsdc, jsval val)
{
JSDValue* jsdval;
JSCrossCompartmentCall *call = NULL;
if(!(jsdval = (JSDValue*) calloc(1, sizeof(JSDValue))))
return NULL;
@ -261,7 +290,16 @@ jsd_NewValue(JSDContext* jsdc, jsval val)
{
JSBool ok = JS_FALSE;
JS_BeginRequest(jsdc->dumbContext);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob);
if(!call) {
JS_EndRequest(jsdc->dumbContext);
return NULL;
}
ok = JS_AddNamedValueRoot(jsdc->dumbContext, &jsdval->val, "JSDValue");
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(jsdc->dumbContext);
if(!ok)
{
@ -279,6 +317,8 @@ jsd_NewValue(JSDContext* jsdc, jsval val)
void
jsd_DropValue(JSDContext* jsdc, JSDValue* jsdval)
{
JSCrossCompartmentCall *call = NULL;
JS_ASSERT(jsdval->nref > 0);
if(0 == --jsdval->nref)
{
@ -286,7 +326,15 @@ jsd_DropValue(JSDContext* jsdc, JSDValue* jsdval)
if(JSVAL_IS_GCTHING(jsdval->val))
{
JS_BeginRequest(jsdc->dumbContext);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob);
if(!call) {
JS_EndRequest(jsdc->dumbContext);
return;
}
JS_RemoveValueRoot(jsdc->dumbContext, &jsdval->val);
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(jsdc->dumbContext);
}
free(jsdval);
@ -362,6 +410,7 @@ static JSBool _buildProps(JSDContext* jsdc, JSDValue* jsdval)
JSContext* cx = jsdc->dumbContext;
JSPropertyDescArray pda;
uintN i;
JSCrossCompartmentCall *call = NULL;
JS_ASSERT(JS_CLIST_IS_EMPTY(&jsdval->props));
JS_ASSERT(!(CHECK_BIT_FLAG(jsdval->flags, GOT_PROPS)));
@ -371,9 +420,17 @@ static JSBool _buildProps(JSDContext* jsdc, JSDValue* jsdval)
return JS_FALSE;
JS_BeginRequest(cx);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob);
if(!call) {
JS_EndRequest(jsdc->dumbContext);
return JS_FALSE;
}
if(!JS_GetPropertyDescArray(cx, JSVAL_TO_OBJECT(jsdval->val), &pda))
{
JS_EndRequest(cx);
JS_LeaveCrossCompartmentCall(call);
return JS_FALSE;
}
@ -388,6 +445,7 @@ static JSBool _buildProps(JSDContext* jsdc, JSDValue* jsdval)
JS_APPEND_LINK(&prop->links, &jsdval->props);
}
JS_PutPropertyDescArray(cx, &pda);
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(cx);
SET_BIT_FLAG(jsdval->flags, GOT_PROPS);
return !JS_CLIST_IS_EMPTY(&jsdval->props);
@ -400,6 +458,7 @@ void
jsd_RefreshValue(JSDContext* jsdc, JSDValue* jsdval)
{
JSContext* cx = jsdc->dumbContext;
JSCrossCompartmentCall *call = NULL;
if(jsdval->string)
{
@ -407,7 +466,15 @@ jsd_RefreshValue(JSDContext* jsdc, JSDValue* jsdval)
if(!JSVAL_IS_STRING(jsdval->val))
{
JS_BeginRequest(cx);
call = JS_EnterCrossCompartmentCall(cx, jsdc->glob);
if(!call) {
JS_EndRequest(cx);
return;
}
JS_RemoveStringRoot(cx, &jsdval->string);
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(cx);
}
jsdval->string = NULL;
@ -479,6 +546,7 @@ jsd_GetValueProperty(JSDContext* jsdc, JSDValue* jsdval, JSString* name)
size_t nameLen;
jsval val, nameval;
jsid nameid;
JSCrossCompartmentCall *call = NULL;
if(!jsd_IsValueObject(jsdc, jsdval))
return NULL;
@ -500,10 +568,17 @@ jsd_GetValueProperty(JSDContext* jsdc, JSDValue* jsdval, JSString* name)
nameLen = JS_GetStringLength(name);
JS_BeginRequest(cx);
call = JS_EnterCrossCompartmentCall(cx, jsdc->glob);
if(!call) {
JS_EndRequest(cx);
return NULL;
}
JS_GetUCPropertyAttributes(cx, obj, nameChars, nameLen, &attrs, &found);
if (!found)
{
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(cx);
return NULL;
}
@ -516,6 +591,7 @@ jsd_GetValueProperty(JSDContext* jsdc, JSDValue* jsdval, JSString* name)
{
if (!JS_GetPendingException(cx, &pd.value))
{
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(cx);
return NULL;
}
@ -532,6 +608,7 @@ jsd_GetValueProperty(JSDContext* jsdc, JSDValue* jsdval, JSString* name)
pd.value = val;
}
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(cx);
nameval = STRING_TO_JSVAL(name);
@ -553,6 +630,8 @@ jsd_GetValueProperty(JSDContext* jsdc, JSDValue* jsdval, JSString* name)
JSDValue*
jsd_GetValuePrototype(JSDContext* jsdc, JSDValue* jsdval)
{
JSCrossCompartmentCall *call = NULL;
if(!(CHECK_BIT_FLAG(jsdval->flags, GOT_PROTO)))
{
JSObject* obj;
@ -564,7 +643,14 @@ jsd_GetValuePrototype(JSDContext* jsdc, JSDValue* jsdval)
if(!(obj = JSVAL_TO_OBJECT(jsdval->val)))
return NULL;
JS_BeginRequest(jsdc->dumbContext);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob);
if(!call) {
JS_EndRequest(jsdc->dumbContext);
return NULL;
}
proto = JS_GetPrototype(jsdc->dumbContext, obj);
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(jsdc->dumbContext);
if(!proto)
return NULL;
@ -578,6 +664,8 @@ jsd_GetValuePrototype(JSDContext* jsdc, JSDValue* jsdval)
JSDValue*
jsd_GetValueParent(JSDContext* jsdc, JSDValue* jsdval)
{
JSCrossCompartmentCall *call = NULL;
if(!(CHECK_BIT_FLAG(jsdval->flags, GOT_PARENT)))
{
JSObject* obj;
@ -589,7 +677,14 @@ jsd_GetValueParent(JSDContext* jsdc, JSDValue* jsdval)
if(!(obj = JSVAL_TO_OBJECT(jsdval->val)))
return NULL;
JS_BeginRequest(jsdc->dumbContext);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob);
if(!call) {
JS_EndRequest(jsdc->dumbContext);
return NULL;
}
parent = JS_GetParent(jsdc->dumbContext,obj);
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(jsdc->dumbContext);
if(!parent)
return NULL;
@ -603,6 +698,8 @@ jsd_GetValueParent(JSDContext* jsdc, JSDValue* jsdval)
JSDValue*
jsd_GetValueConstructor(JSDContext* jsdc, JSDValue* jsdval)
{
JSCrossCompartmentCall *call = NULL;
if(!(CHECK_BIT_FLAG(jsdval->flags, GOT_CTOR)))
{
JSObject* obj;
@ -615,13 +712,21 @@ jsd_GetValueConstructor(JSDContext* jsdc, JSDValue* jsdval)
if(!(obj = JSVAL_TO_OBJECT(jsdval->val)))
return NULL;
JS_BeginRequest(jsdc->dumbContext);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob);
if(!call) {
JS_EndRequest(jsdc->dumbContext);
return NULL;
}
proto = JS_GetPrototype(jsdc->dumbContext,obj);
if(!proto)
{
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(jsdc->dumbContext);
return NULL;
}
ctor = JS_GetConstructor(jsdc->dumbContext,proto);
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(jsdc->dumbContext);
if(!ctor)
return NULL;
@ -636,14 +741,23 @@ const char*
jsd_GetValueClassName(JSDContext* jsdc, JSDValue* jsdval)
{
jsval val = jsdval->val;
JSCrossCompartmentCall *call = NULL;
if(!jsdval->className && JSVAL_IS_OBJECT(val))
{
JSObject* obj;
if(!(obj = JSVAL_TO_OBJECT(val)))
return NULL;
JS_BeginRequest(jsdc->dumbContext);
call = JS_EnterCrossCompartmentCall(jsdc->dumbContext, jsdc->glob);
if(!call) {
JS_EndRequest(jsdc->dumbContext);
return NULL;
}
if(JS_GET_CLASS(jsdc->dumbContext, obj))
jsdval->className = JS_GET_CLASS(jsdc->dumbContext, obj)->name;
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(jsdc->dumbContext);
}
return jsdval->className;
@ -654,20 +768,28 @@ jsd_GetScriptForValue(JSDContext* jsdc, JSDValue* jsdval)
{
JSContext* cx = jsdc->dumbContext;
jsval val = jsdval->val;
JSFunction* fun;
JSFunction* fun = NULL;
JSExceptionState* exceptionState;
JSScript* script = NULL;
JSDScript* jsdscript;
JSCrossCompartmentCall *call = NULL;
if (!jsd_IsValueFunction(jsdc, jsdval))
return NULL;
JS_BeginRequest(cx);
call = JS_EnterCrossCompartmentCall(cx, JSVAL_TO_OBJECT(val));
if (!call) {
JS_EndRequest(cx);
return NULL;
}
exceptionState = JS_SaveExceptionState(cx);
fun = JS_ValueToFunction(cx, val);
JS_RestoreExceptionState(cx, exceptionState);
if (fun)
script = JS_GetFunctionScript(cx, fun);
JS_LeaveCrossCompartmentCall(call);
JS_EndRequest(cx);
if (!script)

Просмотреть файл

@ -163,34 +163,34 @@ Narcissus.interpreter = (function() {
else { return (name in hostGlobal); }
};
globalHandler.get = function(receiver, name) {
if (narcissusGlobal.hasOwnProperty(name)) {
if (narcissusGlobal.hasOwnProperty(name))
return narcissusGlobal[name];
}
var globalFun = hostGlobal[name];
if (definitions.isNativeCode(globalFun)) {
// Enables native browser functions like 'alert' to work correctly.
return Proxy.createFunction(
definitions.makePassthruHandler(globalFun),
function() { return globalFun.apply(hostGlobal, arguments); },
function() {
var a = arguments;
switch (a.length) {
case 0:
return new globalFun();
case 1:
return new globalFun(a[0]);
case 2:
return new globalFun(a[0], a[1]);
case 3:
return new globalFun(a[0], a[1], a[2]);
default:
var argStr = "";
for (var i=0; i<a.length; i++) {
argStr += 'a[' + i + '],';
}
return eval('new ' + name + '(' + argStr.slice(0,-1) + ');');
definitions.makePassthruHandler(globalFun),
function() { return globalFun.apply(hostGlobal, arguments); },
function() {
var a = arguments;
switch (a.length) {
case 0:
return new globalFun();
case 1:
return new globalFun(a[0]);
case 2:
return new globalFun(a[0], a[1]);
case 3:
return new globalFun(a[0], a[1], a[2]);
default:
var argStr = "";
for (var i=0; i<a.length; i++) {
argStr += 'a[' + i + '],';
}
});
return eval('new ' + name + '(' + argStr.slice(0,-1) + ');');
}
});
}
else { return globalFun; };
};
@ -307,7 +307,7 @@ Narcissus.interpreter = (function() {
: new TypeError(message);
}
function valuatePhis(n, v) {
function evaluatePhis(n, v) {
var ps = n.phiUses;
if (!ps)
return;
@ -319,7 +319,7 @@ Narcissus.interpreter = (function() {
if (ps[i].v === v)
break;
ps[i].v = v;
valuatePhis(ps[i], v);
evaluatePhis(ps[i], v);
}
}
@ -843,13 +843,13 @@ Narcissus.interpreter = (function() {
resolved.functionForm == parser.DECLARED_FORM)) {
v = resolved.v;
break;
} else {
for (s = x.scope; s; s = s.parent) {
if (n.value in s.object)
break;
}
v = new Reference(s && s.object, n.value, n);
}
for (s = x.scope; s; s = s.parent) {
if (n.value in s.object)
break;
}
v = new Reference(s && s.object, n.value, n);
break;
case NUMBER:
@ -869,7 +869,7 @@ Narcissus.interpreter = (function() {
if (n.backwards) {
n.v = v;
}
valuatePhis(n, v);
evaluatePhis(n, v);
return v;
}

Просмотреть файл

@ -86,10 +86,8 @@ Narcissus.parser = (function() {
if (typeof unbound !== "object")
continue;
/*
* We store the bound sub-builder as builder's own property
* so that we can have multiple builders at the same time.
*/
// We store the bound sub-builder as builder's own property
// so that we can have multiple builders at the same time.
var bound = builder[ns] = {};
for (var m in unbound) {
bound[m] = bindMethod(unbound[m], builder);
@ -1214,11 +1212,7 @@ Narcissus.parser = (function() {
case BREAK:
case CONTINUE:
if (tt === BREAK) {
b = builder.BREAK;
} else {
b = builder.CONTINUE;
}
b = (tt === BREAK) ? builder.BREAK : builder.CONTINUE;
n = b.build(t);
if (t.peekOnSameLine() === IDENTIFIER) {

Просмотреть файл

@ -1068,21 +1068,18 @@
for (var ns in super.prototype) {
var childNS = childProto[ns];
var superNS = superProto[ns];
var childNSType = typeof childNS;
if (childNSType === "undefined") {
if (childNS === undefined) {
childProto[ns] = superNS;
} else if (childNSType === "object") {
} else {
for (var m in superNS) {
let childMethod = childNS[m];
let superMethod = superNS[m];
if (typeof childMethod === "undefined") {
if (childMethod === undefined) {
childNS[m] = superMethod;
} else {
childNS[m] = function() {
if (this.binds)
return childMethod.apply(this, arguments);
else
return superMethod.apply(this, arguments);
return (this.binds ? childMethod : superMethod)
.apply(this, arguments);
};
}
}

Просмотреть файл

@ -132,6 +132,7 @@ CPPSRCS = \
jsgc.cpp \
jsgcchunk.cpp \
jsgcstats.cpp \
jscompartment.cpp \
jshash.cpp \
jsinterp.cpp \
jsinvoke.cpp \
@ -185,8 +186,10 @@ INSTALLED_HEADERS = \
jsemit.h \
jsfun.h \
jsgc.h \
jscell.h \
jsgcchunk.h \
jsgcstats.h \
jscompartment.h \
jshash.h \
jsinterp.h \
jsinttypes.h \
@ -686,6 +689,7 @@ endif
endif
ifdef SOLARIS_SUNPRO_CXX
ifeq ($(TARGET_CPU),sparc
# Sun Studio SPARC doesn't work well with gcc inline asm, use lock_SunOS_sparc*.il
jslock.o: jslock.cpp Makefile.in lock_sparcv8plus.il lock_sparcv9.il
$(REPORT_BUILD)
@ -695,6 +699,7 @@ ifeq (sparcv9,$(findstring sparcv9,$(OS_TEST)))
else
$(CXX) -o $@ -c $(COMPILE_CFLAGS) $(srcdir)/lock_sparcv8plus.il $<
endif # sparcv9
endif # sparc
endif # SOLARIS_SUNPRO_CXX
ifeq ($(OS_ARCH),IRIX)

Просмотреть файл

@ -68,8 +68,10 @@ BEGIN_TEST(testContexts_bug561444)
JS_BeginRequest(cx);
{
jsvalRoot v(cx);
JSAutoCrossCompartmentCall crossCall;
crossCall.enter(cx, d->obj);
JSAutoEnterCompartment ac;
ac.enterAndIgnoreErrors(cx, d->obj);
if (!JS_EvaluateScript(cx, d->obj, d->code, strlen(d->code), __FILE__, __LINE__, v.addr()))
return;
}
@ -87,8 +89,8 @@ BEGIN_TEST(testContexts_bug563735)
JSBool ok;
{
JSAutoRequest req(cx2);
JSAutoCrossCompartmentCall crossCall;
CHECK(crossCall.enter(cx2, global));
JSAutoEnterCompartment ac;
CHECK(ac.enter(cx2, global));
jsval v = JSVAL_NULL;
ok = JS_SetProperty(cx2, global, "x", &v);
}

Просмотреть файл

@ -3,15 +3,38 @@
*/
#include "tests.h"
#include "jsstr.h"
BEGIN_TEST(testIntString_bug515273)
{
jsvalRoot v(cx);
EVAL("'42';", v.addr());
EVAL("'1';", v.addr());
JSString *str = JSVAL_TO_STRING(v.value());
const char *bytes = JS_GetStringBytes(str);
CHECK(strcmp(bytes, "42") == 0);
CHECK(JSString::isStatic(str));
CHECK(strcmp(JS_GetStringBytes(str), "1") == 0);
EVAL("'42';", v.addr());
str = JSVAL_TO_STRING(v.value());
CHECK(JSString::isStatic(str));
CHECK(strcmp(JS_GetStringBytes(str), "42") == 0);
EVAL("'111';", v.addr());
str = JSVAL_TO_STRING(v.value());
CHECK(JSString::isStatic(str));
CHECK(strcmp(JS_GetStringBytes(str), "111") == 0);
/* Test other types of static strings. */
EVAL("'a';", v.addr());
str = JSVAL_TO_STRING(v.value());
CHECK(JSString::isStatic(str));
CHECK(strcmp(JS_GetStringBytes(str), "a") == 0);
EVAL("'bc';", v.addr());
str = JSVAL_TO_STRING(v.value());
CHECK(JSString::isStatic(str));
CHECK(strcmp(JS_GetStringBytes(str), "bc") == 0);
return true;
}
END_TEST(testIntString_bug515273)

Просмотреть файл

@ -286,7 +286,10 @@ protected:
if (!global)
return NULL;
JSAutoEnterCompartment enter(cx, global);
JSAutoEnterCompartment ac;
if (!ac.enter(cx, global))
return NULL;
/* Populate the global object with the standard globals,
like Object and Array. */
if (!JS_InitStandardClasses(cx, global))

Просмотреть файл

@ -106,6 +106,7 @@
#endif
using namespace js;
using namespace js::gc;
class AutoVersionAPI
{
@ -707,10 +708,6 @@ JSRuntime::~JSRuntime()
JS_DESTROY_LOCK(debuggerLock);
#endif
propertyTree.finish();
/* Delete all remaining Compartments. Ideally only the defaultCompartment should be left. */
for (JSCompartment **c = compartments.begin(); c != compartments.end(); ++c)
delete *c;
compartments.clear();
}
JS_PUBLIC_API(JSRuntime *)
@ -760,12 +757,6 @@ JS_NewRuntime(uint32 maxbytes)
return rt;
}
JS_PUBLIC_API(void)
JS_CommenceRuntimeShutDown(JSRuntime *rt)
{
rt->gcFlushCodeCaches = true;
}
JS_PUBLIC_API(void)
JS_DestroyRuntime(JSRuntime *rt)
{
@ -1190,7 +1181,7 @@ JS_LeaveCrossCompartmentCall(JSCrossCompartmentCall *call)
}
bool
JSAutoCrossCompartmentCall::enter(JSContext *cx, JSObject *target)
JSAutoEnterCompartment::enter(JSContext *cx, JSObject *target)
{
JS_ASSERT(!call);
if (cx->compartment == target->getCompartment(cx))
@ -1199,20 +1190,10 @@ JSAutoCrossCompartmentCall::enter(JSContext *cx, JSObject *target)
return call != NULL;
}
JS_FRIEND_API(JSCompartment *)
js_SwitchToCompartment(JSContext *cx, JSCompartment *compartment)
void
JSAutoEnterCompartment::enterAndIgnoreErrors(JSContext *cx, JSObject *target)
{
JSCompartment *c = cx->compartment;
cx->compartment = compartment;
return c;
}
JS_FRIEND_API(JSCompartment *)
js_SwitchToObjectCompartment(JSContext *cx, JSObject *obj)
{
JSCompartment *c = cx->compartment;
cx->compartment = obj->getCompartment(cx);
return c;
(void) enter(cx, target);
}
JS_PUBLIC_API(void *)
@ -2083,7 +2064,7 @@ JS_PUBLIC_API(void)
JS_CallTracer(JSTracer *trc, void *thing, uint32 kind)
{
JS_ASSERT(thing);
Mark(trc, thing, kind);
MarkKind(trc, thing, kind);
}
#ifdef DEBUG
@ -2579,7 +2560,7 @@ JS_IsAboutToBeFinalized(JSContext *cx, void *thing)
{
JS_ASSERT(thing);
JS_ASSERT(!cx->runtime->gcMarkingTracer);
return js_IsAboutToBeFinalized(thing);
return IsAboutToBeFinalized(thing);
}
JS_PUBLIC_API(void)

Просмотреть файл

@ -717,8 +717,8 @@ JS_SameValue(JSContext *cx, jsval v1, jsval v2);
extern JS_PUBLIC_API(JSRuntime *)
JS_NewRuntime(uint32 maxbytes);
extern JS_PUBLIC_API(void)
JS_CommenceRuntimeShutDown(JSRuntime *rt);
/* Deprecated. */
#define JS_CommenceRuntimeShutDown(rt) ((void) 0)
extern JS_PUBLIC_API(void)
JS_DestroyRuntime(JSRuntime *rt);
@ -957,53 +957,34 @@ JS_WrapObject(JSContext *cx, JSObject **objp);
extern JS_PUBLIC_API(JSBool)
JS_WrapValue(JSContext *cx, jsval *vp);
extern JS_FRIEND_API(JSCompartment *)
js_SwitchToCompartment(JSContext *cx, JSCompartment *compartment);
extern JS_FRIEND_API(JSCompartment *)
js_SwitchToObjectCompartment(JSContext *cx, JSObject *obj);
#ifdef __cplusplus
JS_END_EXTERN_C
class JS_PUBLIC_API(JSAutoCrossCompartmentCall)
class JS_PUBLIC_API(JSAutoEnterCompartment)
{
JSCrossCompartmentCall *call;
public:
JSAutoCrossCompartmentCall() : call(NULL) {}
JSAutoEnterCompartment() : call(NULL) {}
bool enter(JSContext *cx, JSObject *target);
void enterAndIgnoreErrors(JSContext *cx, JSObject *target);
bool entered() const { return call != NULL; }
~JSAutoCrossCompartmentCall() {
~JSAutoEnterCompartment() {
if (call)
JS_LeaveCrossCompartmentCall(call);
}
void swap(JSAutoCrossCompartmentCall &other) {
void swap(JSAutoEnterCompartment &other) {
JSCrossCompartmentCall *tmp = call;
call = other.call;
other.call = tmp;
}
};
class JSAutoEnterCompartment
{
JSContext *cx;
JSCompartment *compartment;
public:
JSAutoEnterCompartment(JSContext *cx, JSCompartment *newCompartment) : cx(cx) {
compartment = js_SwitchToCompartment(cx, newCompartment);
}
JSAutoEnterCompartment(JSContext *cx, JSObject *target) : cx(cx) {
compartment = js_SwitchToObjectCompartment(cx, target);
}
~JSAutoEnterCompartment() {
js_SwitchToCompartment(cx, compartment);
}
};
JS_BEGIN_EXTERN_C
#endif

Просмотреть файл

@ -107,6 +107,7 @@
#include "jscntxtinlines.h"
using namespace js;
using namespace js::gc;
/* 2^32 - 1 as a number and a string */
#define MAXINDEX 4294967295u

Просмотреть файл

@ -65,8 +65,7 @@
#include "jsobjinlines.h"
using namespace js;
using namespace js;
using namespace js::gc;
/*
* ATOM_HASH assumes that JSHashNumber is 32-bit even on 64-bit systems.
@ -444,8 +443,8 @@ js_SweepAtomState(JSContext *cx)
AtomEntryType entry = e.front();
if (AtomEntryFlags(entry) & (ATOM_PINNED | ATOM_INTERNED)) {
/* Pinned or interned key cannot be finalized. */
JS_ASSERT(!js_IsAboutToBeFinalized(AtomEntryToKey(entry)));
} else if (js_IsAboutToBeFinalized(AtomEntryToKey(entry))) {
JS_ASSERT(!IsAboutToBeFinalized(AtomEntryToKey(entry)));
} else if (IsAboutToBeFinalized(AtomEntryToKey(entry))) {
e.removeFront();
}
}
@ -500,7 +499,7 @@ js_AtomizeString(JSContext *cx, JSString *str, uintN flags)
JSAtomState *state = &cx->runtime->atomState;
AtomSet &atoms = state->atoms;
JS_LOCK(cx, &state->lock);
AutoLockDefaultCompartment lock(cx);
AtomSet::AddPtr p = atoms.lookupForAdd(str);
/* Hashing the string should have flattened it if it was a rope. */
@ -521,9 +520,9 @@ js_AtomizeString(JSContext *cx, JSString *str, uintN flags)
key = str;
atoms.add(p, StringToInitialAtomEntry(key));
} else {
JS_UNLOCK(cx, &state->lock);
if (flags & ATOM_TMPSTR) {
SwitchToCompartment sc(cx, cx->runtime->defaultCompartment);
if (flags & ATOM_NOCOPY) {
key = js_NewString(cx, str->flatChars(), str->flatLength());
if (!key)
@ -543,9 +542,7 @@ js_AtomizeString(JSContext *cx, JSString *str, uintN flags)
key = str;
}
JS_LOCK(cx, &state->lock);
if (!atoms.relookupOrAdd(p, key, StringToInitialAtomEntry(key))) {
JS_UNLOCK(cx, &state->lock);
JS_ReportOutOfMemory(cx); /* SystemAllocPolicy does not report */
return NULL;
}
@ -557,7 +554,6 @@ js_AtomizeString(JSContext *cx, JSString *str, uintN flags)
JS_ASSERT(key->isAtomized());
JSAtom *atom = STRING_TO_ATOM(key);
JS_UNLOCK(cx, &state->lock);
return atom;
}

99
js/src/jscell.h Normal file
Просмотреть файл

@ -0,0 +1,99 @@
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is SpiderMonkey code.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation.
* Portions created by the Initial Developer are Copyright (C) 2010
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
* Gregor Wagner <anygregor@gmail.com>
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef jscell_h___
#define jscell_h___
struct JSCompartment;
namespace js {
namespace gc {
template <typename T> struct Arena;
struct ArenaBitmap;
struct MarkingDelay;
struct Chunk;
struct FreeCell;
/*
* A GC cell is the base class for GC Things like JSObject, JSShortString,
* JSFunction, JSXML and for an empty cell called FreeCell. It helps avoiding
* casts from an Object to a Cell whenever we call GC related mark functions.
* Cell is not the base Class for JSString because static initialization
* (used for unitStringTables) does not work with inheritance.
*/
struct Cell {
static const size_t CellShift = 3;
static const size_t CellSize = size_t(1) << CellShift;
static const size_t CellMask = CellSize - 1;
inline Arena<Cell> *arena() const;
inline Chunk *chunk() const;
inline ArenaBitmap *bitmap() const;
JS_ALWAYS_INLINE size_t cellIndex() const;
JS_ALWAYS_INLINE void mark(uint32 color) const;
JS_ALWAYS_INLINE bool isMarked(uint32 color) const;
JS_ALWAYS_INLINE bool markIfUnmarked(uint32 color) const;
inline JSCompartment *compartment() const;
/* Needed for compatibility reasons because Cell can't be a base class of JSString */
JS_ALWAYS_INLINE js::gc::Cell *asCell() { return this; }
JS_ALWAYS_INLINE js::gc::FreeCell *asFreeCell() {
return reinterpret_cast<FreeCell *>(this);
}
};
/* FreeCell has always size 8 */
struct FreeCell : Cell {
union {
FreeCell *link;
double data;
};
};
JS_STATIC_ASSERT(sizeof(FreeCell) == 8);
} /* namespace gc */
} /* namespace js */
#endif /* jscell_h___ */

Просмотреть файл

@ -80,6 +80,7 @@
#endif
#include "jscntxtinlines.h"
#include "jscompartment.h"
#include "jsinterpinlines.h"
#include "jsobjinlines.h"
@ -101,6 +102,7 @@
#endif
using namespace js;
using namespace js::gc;
static const size_t ARENA_HEADER_SIZE_HACK = 40;
static const size_t TEMP_POOL_CHUNK_SIZE = 4096 - ARENA_HEADER_SIZE_HACK;
@ -505,8 +507,6 @@ void
JSThreadData::finish()
{
#ifdef DEBUG
/* All GC-related things must be already removed at this point. */
JS_ASSERT(gcFreeLists.isEmpty());
for (size_t i = 0; i != JS_ARRAY_LENGTH(scriptsToGC); ++i)
JS_ASSERT(!scriptsToGC[i]);
#endif
@ -529,16 +529,11 @@ void
JSThreadData::mark(JSTracer *trc)
{
stackSpace.mark(trc);
#ifdef JS_TRACER
traceMonitor.mark(trc);
#endif
}
void
JSThreadData::purge(JSContext *cx)
{
gcFreeLists.purge();
js_PurgeGSNCache(&gsnCache);
/* FIXME: bug 506341. */
@ -714,11 +709,6 @@ js_PurgeThreads(JSContext *cx)
JS_ASSERT(cx->thread != thread);
js_DestroyScriptsToGC(cx, &thread->data);
/*
* The following is potentially suboptimal as it also zeros the
* caches in data, but the code simplicity wins here.
*/
thread->data.gcFreeLists.purge();
DestroyThread(thread);
e.removeFront();
} else {
@ -2210,8 +2200,6 @@ void
JSContext::purge()
{
FreeOldArenas(runtime, &regExpPool);
/* FIXME: bug 586161 */
compartment->purge(this);
}
void

Просмотреть файл

@ -1031,11 +1031,15 @@ struct TraceMonitor {
FragStatsMap* profTab;
#endif
bool ontrace() const {
return !!tracecx;
}
/* Flush the JIT cache. */
void flush();
/* Mark all objects baked into native code in the code cache. */
void mark(JSTracer *trc);
/* Sweep any cache entry pointing to dead GC things. */
void sweep();
bool outOfMemory() const;
};
@ -1048,9 +1052,9 @@ struct TraceMonitor {
* executing. cx must be a context on the current thread.
*/
#ifdef JS_TRACER
# define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).tracecx != NULL)
# define JS_ON_TRACE(cx) (JS_TRACE_MONITOR(cx).ontrace())
#else
# define JS_ON_TRACE(cx) JS_FALSE
# define JS_ON_TRACE(cx) false
#endif
/* Number of potentially reusable scriptsToGC to search for the eval cache. */
@ -1114,8 +1118,6 @@ struct JSThreadData {
*/
volatile int32 interruptFlags;
JSGCFreeLists gcFreeLists;
/* Keeper of the contiguous stack used by all contexts in this thread. */
js::StackSpace stackSpace;
@ -1278,100 +1280,15 @@ typedef struct JSPropertyTreeEntry {
js::Shape *child;
} JSPropertyTreeEntry;
namespace js {
struct GCPtrHasher
{
typedef void *Lookup;
static HashNumber hash(void *key) {
return HashNumber(uintptr_t(key) >> JS_GCTHING_ZEROBITS);
}
static bool match(void *l, void *k) {
return l == k;
}
};
typedef HashMap<void *, uint32, GCPtrHasher, SystemAllocPolicy> GCLocks;
struct RootInfo {
RootInfo() {}
RootInfo(const char *name, JSGCRootType type) : name(name), type(type) {}
const char *name;
JSGCRootType type;
};
typedef js::HashMap<void *,
RootInfo,
js::DefaultHasher<void *>,
js::SystemAllocPolicy> RootedValueMap;
/* If HashNumber grows, need to change WrapperHasher. */
JS_STATIC_ASSERT(sizeof(HashNumber) == 4);
struct WrapperHasher
{
typedef Value Lookup;
static HashNumber hash(Value key) {
uint64 bits = JSVAL_BITS(Jsvalify(key));
return (uint32)bits ^ (uint32)(bits >> 32);
}
static bool match(const Value &l, const Value &k) {
return l == k;
}
};
typedef HashMap<Value, Value, WrapperHasher, SystemAllocPolicy> WrapperMap;
class AutoValueVector;
class AutoIdVector;
} /* namespace js */
struct JSCompartment {
JSRuntime *rt;
JSPrincipals *principals;
void *data;
bool marked;
js::WrapperMap crossCompartmentWrappers;
bool debugMode;
/* List all scripts in this compartment. */
JSCList scripts;
JSCompartment(JSRuntime *cx);
~JSCompartment();
bool init();
bool wrap(JSContext *cx, js::Value *vp);
bool wrap(JSContext *cx, JSString **strp);
bool wrap(JSContext *cx, JSObject **objp);
bool wrapId(JSContext *cx, jsid *idp);
bool wrap(JSContext *cx, js::PropertyOp *op);
bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
bool wrap(JSContext *cx, js::AutoIdVector &props);
bool wrapException(JSContext *cx);
void sweep(JSContext *cx);
#ifdef JS_METHODJIT
bool addScript(JSContext *cx, JSScript *script);
void removeScript(JSScript *script);
#endif
void purge(JSContext *cx);
};
typedef void
(* JSActivityCallback)(void *arg, JSBool active);
struct JSRuntime {
/* Default compartment. */
JSCompartment *defaultCompartment;
#ifdef JS_THREADSAFE
bool defaultCompartmentIsLocked;
#endif
/* List of compartments (protected by the GC lock). */
js::Vector<JSCompartment *, 0, js::SystemAllocPolicy> compartments;
@ -1416,12 +1333,6 @@ struct JSRuntime {
/* Garbage collector state, used by jsgc.c. */
js::GCChunkSet gcChunkSet;
/* GC chunks with at least one free arena. */
js::GCChunkInfoVector gcFreeArenaChunks;
#ifdef DEBUG
JSGCArena *gcEmptyArenaList;
#endif
JSGCArenaList gcArenaList[FINALIZE_LIMIT];
js::RootedValueMap gcRootsHash;
js::GCLocks gcLocksHash;
jsrefcount gcKeepAtoms;
@ -1436,7 +1347,6 @@ struct JSRuntime {
uint32 gcTriggerFactor;
size_t gcTriggerBytes;
volatile JSBool gcIsNeeded;
volatile JSBool gcFlushCodeCaches;
/*
* NB: do not pack another flag here by claiming gcPadding unless the new
@ -1737,8 +1647,8 @@ struct JSRuntime {
#endif
#ifdef JS_GCMETER
JSGCStats gcStats;
JSGCArenaStats gcArenaStats[FINALIZE_LIMIT];
js::gc::JSGCStats gcStats;
js::gc::JSGCArenaStats globalArenaStats[js::gc::FINALIZE_LIMIT];
#endif
#ifdef DEBUG
@ -2867,9 +2777,7 @@ class AutoEnumStateRooter : private AutoGCRooter
Value *addr() { return &stateValue; }
protected:
void trace(JSTracer *trc) {
JS_CALL_OBJECT_TRACER(trc, obj, "js::AutoEnumStateRooter.obj");
}
void trace(JSTracer *trc);
JSObject * const obj;
@ -2911,6 +2819,42 @@ public:
~AutoUnlockGC() { JS_LOCK_GC(rt); }
};
class AutoLockDefaultCompartment {
private:
JSContext *cx;
public:
AutoLockDefaultCompartment(JSContext *cx) : cx(cx) {
JS_LOCK(cx, &cx->runtime->atomState.lock);
#ifdef JS_THREADSAFE
cx->runtime->defaultCompartmentIsLocked = true;
#endif
}
~AutoLockDefaultCompartment() {
JS_UNLOCK(cx, &cx->runtime->atomState.lock);
#ifdef JS_THREADSAFE
cx->runtime->defaultCompartmentIsLocked = false;
#endif
}
};
class AutoUnlockDefaultCompartment {
private:
JSContext *cx;
public:
AutoUnlockDefaultCompartment(JSContext *cx) : cx(cx) {
JS_UNLOCK(cx, &cx->runtime->atomState.lock);
#ifdef JS_THREADSAFE
cx->runtime->defaultCompartmentIsLocked = false;
#endif
}
~AutoUnlockDefaultCompartment() {
JS_LOCK(cx, &cx->runtime->atomState.lock);
#ifdef JS_THREADSAFE
cx->runtime->defaultCompartmentIsLocked = true;
#endif
}
};
class AutoKeepAtoms {
JSRuntime *rt;
public:
@ -3157,7 +3101,7 @@ js_ReportAllocationOverflow(JSContext *cx);
JS_BEGIN_MACRO \
int stackDummy_; \
\
if (!JS_CHECK_STACK_SIZE(cx, stackDummy_)) { \
if (!JS_CHECK_STACK_SIZE(cx->stackLimit, &stackDummy_)) { \
js_ReportOverRecursed(cx); \
onerror; \
} \
@ -3207,16 +3151,6 @@ js_ReportValueErrorFlags(JSContext *cx, uintN flags, const uintN errorNumber,
extern JSErrorFormatString js_ErrorFormatString[JSErr_Limit];
/*
* See JS_SetThreadStackLimit in jsapi.c, where we check that the stack
* grows in the expected direction.
*/
#if JS_STACK_GROWTH_DIRECTION > 0
# define JS_CHECK_STACK_SIZE(cx, lval) ((jsuword)&(lval) < (cx)->stackLimit)
#else
# define JS_CHECK_STACK_SIZE(cx, lval) ((jsuword)&(lval) > (cx)->stackLimit)
#endif
#ifdef JS_THREADSAFE
# define JS_ASSERT_REQUEST_DEPTH(cx) (JS_ASSERT((cx)->thread), \
JS_ASSERT((cx)->thread->data.requestDepth >= 1))

Просмотреть файл

@ -474,12 +474,6 @@ FrameRegsIter::operator++()
return *this;
}
void
AutoIdArray::trace(JSTracer *trc) {
JS_ASSERT(tag == IDARRAY);
MarkIdRange(trc, idArray->length, idArray->vector, "JSAutoIdArray.idArray");
}
class AutoNamespaceArray : protected AutoGCRooter {
public:
AutoNamespaceArray(JSContext *cx) : AutoGCRooter(cx, NAMESPACES) {
@ -677,13 +671,17 @@ CallJSNativeConstructor(JSContext *cx, js::Native native, uintN argc, js::Value
* Native constructors must return non-primitive values on success.
* Although it is legal, if a constructor returns the callee, there is a
* 99.9999% chance it is a bug. If any valid code actually wants the
* constructor to return the callee, this can be removed.
* constructor to return the callee, the assertion can be removed or
* (another) conjunct can be added to the antecedent.
*
* Proxies are exceptions to both rules: they can return primitives and
* they allow content to return the callee.
*
* (new Object(Object)) returns the callee.
*/
extern JSBool proxy_Construct(JSContext *, uintN, Value *);
JS_ASSERT_IF(native != proxy_Construct,
JS_ASSERT_IF(native != proxy_Construct &&
(!callee->isFunction() || callee->getFunctionPrivate()->u.n.clasp != &js_ObjectClass),
!vp->isPrimitive() && callee != &vp[0].toObject());
return true;

337
js/src/jscompartment.cpp Normal file
Просмотреть файл

@ -0,0 +1,337 @@
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=4 sw=4 et tw=99:
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is Mozilla SpiderMonkey JavaScript 1.9 code, released
* May 28, 2008.
*
* The Initial Developer of the Original Code is
* Mozilla Foundation
* Portions created by the Initial Developer are Copyright (C) 2010
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#include "jscompartment.h"
#include "jsgc.h"
#include "jscntxt.h"
#include "jsproxy.h"
#include "jsscope.h"
#include "methodjit/PolyIC.h"
#include "methodjit/MonoIC.h"
#include "jsgcinlines.h"
using namespace js;
using namespace js::gc;
JSCompartment::JSCompartment(JSRuntime *rt)
: rt(rt), principals(NULL), data(NULL), marked(false), debugMode(false)
{
JS_INIT_CLIST(&scripts);
}
JSCompartment::~JSCompartment()
{
}
bool
JSCompartment::init()
{
chunk = NULL;
shortStringArena.init();
stringArena.init();
funArena.init();
#if JS_HAS_XML_SUPPORT
xmlArena.init();
#endif
objArena.init();
for (unsigned i = 0; i < JS_EXTERNAL_STRING_LIMIT; i++)
externalStringArenas[i].init();
for (unsigned i = 0; i < FINALIZE_LIMIT; i++)
freeLists.finalizables[i] = NULL;
#ifdef JS_GCMETER
memset(&compartmentStats, 0, sizeof(JSGCArenaStats) * FINALIZE_LIMIT);
#endif
return crossCompartmentWrappers.init();
}
bool
JSCompartment::arenaListsAreEmpty()
{
bool empty = objArena.isEmpty() &&
funArena.isEmpty() &&
#if JS_HAS_XML_SUPPORT
xmlArena.isEmpty() &&
#endif
shortStringArena.isEmpty() &&
stringArena.isEmpty();
if (!empty)
return false;
for (unsigned i = 0; i < JS_EXTERNAL_STRING_LIMIT; i++) {
if (!externalStringArenas[i].isEmpty())
return false;
}
return true;
}
bool
JSCompartment::wrap(JSContext *cx, Value *vp)
{
JS_ASSERT(cx->compartment == this);
uintN flags = 0;
JS_CHECK_RECURSION(cx, return false);
/* Only GC things have to be wrapped or copied. */
if (!vp->isMarkable())
return true;
/* Static strings do not have to be wrapped. */
if (vp->isString() && JSString::isStatic(vp->toString()))
return true;
/* Unwrap incoming objects. */
if (vp->isObject()) {
JSObject *obj = &vp->toObject();
/* If the object is already in this compartment, we are done. */
if (obj->getCompartment(cx) == this)
return true;
/* Don't unwrap an outer window proxy. */
if (!obj->getClass()->ext.innerObject) {
obj = vp->toObject().unwrap(&flags);
OBJ_TO_OUTER_OBJECT(cx, obj);
if (!obj)
return false;
vp->setObject(*obj);
}
/* If the wrapped object is already in this compartment, we are done. */
if (obj->getCompartment(cx) == this)
return true;
}
/* If we already have a wrapper for this value, use it. */
if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(*vp)) {
*vp = p->value;
return true;
}
if (vp->isString()) {
Value orig = *vp;
JSString *str = vp->toString();
JSString *wrapped = js_NewStringCopyN(cx, str->chars(), str->length());
if (!wrapped)
return false;
vp->setString(wrapped);
return crossCompartmentWrappers.put(orig, *vp);
}
JSObject *obj = &vp->toObject();
/*
* Recurse to wrap the prototype. Long prototype chains will run out of
* stack, causing an error in CHECK_RECURSE.
*
* Wrapping the proto before creating the new wrapper and adding it to the
* cache helps avoid leaving a bad entry in the cache on OOM. But note that
* if we wrapped both proto and parent, we would get infinite recursion
* here (since Object.prototype->parent->proto leads to Object.prototype
* itself).
*/
JSObject *proto = obj->getProto();
if (!wrap(cx, &proto))
return false;
/*
* We hand in the original wrapped object into the wrap hook to allow
* the wrap hook to reason over what wrappers are currently applied
* to the object.
*/
JSObject *wrapper = cx->runtime->wrapObjectCallback(cx, obj, proto, flags);
if (!wrapper)
return false;
wrapper->setProto(proto);
vp->setObject(*wrapper);
if (!crossCompartmentWrappers.put(wrapper->getProxyPrivate(), *vp))
return false;
/*
* Wrappers should really be parented to the wrapped parent of the wrapped
* object, but in that case a wrapped global object would have a NULL
* parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead,
* we parent all wrappers to the global object in their home compartment.
* This loses us some transparency, and is generally very cheesy.
*/
JSObject *global;
if (cx->hasfp()) {
global = cx->fp()->scopeChain().getGlobal();
} else {
global = cx->globalObject;
OBJ_TO_INNER_OBJECT(cx, global);
if (!global)
return false;
}
wrapper->setParent(global);
return true;
}
bool
JSCompartment::wrap(JSContext *cx, JSString **strp)
{
AutoValueRooter tvr(cx, StringValue(*strp));
if (!wrap(cx, tvr.addr()))
return false;
*strp = tvr.value().toString();
return true;
}
bool
JSCompartment::wrap(JSContext *cx, JSObject **objp)
{
if (!*objp)
return true;
AutoValueRooter tvr(cx, ObjectValue(**objp));
if (!wrap(cx, tvr.addr()))
return false;
*objp = &tvr.value().toObject();
return true;
}
bool
JSCompartment::wrapId(JSContext *cx, jsid *idp)
{
if (JSID_IS_INT(*idp))
return true;
AutoValueRooter tvr(cx, IdToValue(*idp));
if (!wrap(cx, tvr.addr()))
return false;
return ValueToId(cx, tvr.value(), idp);
}
bool
JSCompartment::wrap(JSContext *cx, PropertyOp *propp)
{
Value v = CastAsObjectJsval(*propp);
if (!wrap(cx, &v))
return false;
*propp = CastAsPropertyOp(v.toObjectOrNull());
return true;
}
bool
JSCompartment::wrap(JSContext *cx, PropertyDescriptor *desc)
{
return wrap(cx, &desc->obj) &&
(!(desc->attrs & JSPROP_GETTER) || wrap(cx, &desc->getter)) &&
(!(desc->attrs & JSPROP_SETTER) || wrap(cx, &desc->setter)) &&
wrap(cx, &desc->value);
}
bool
JSCompartment::wrap(JSContext *cx, AutoIdVector &props)
{
jsid *vector = props.begin();
jsint length = props.length();
for (size_t n = 0; n < size_t(length); ++n) {
if (!wrapId(cx, &vector[n]))
return false;
}
return true;
}
bool
JSCompartment::wrapException(JSContext *cx)
{
JS_ASSERT(cx->compartment == this);
if (cx->throwing) {
AutoValueRooter tvr(cx, cx->exception);
cx->throwing = false;
cx->exception.setNull();
if (wrap(cx, tvr.addr())) {
cx->throwing = true;
cx->exception = tvr.value();
}
return false;
}
return true;
}
void
JSCompartment::sweep(JSContext *cx)
{
chunk = NULL;
/* Remove dead wrappers from the table. */
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
if (IsAboutToBeFinalized(e.front().value.toGCThing()))
e.removeFront();
}
#if defined JS_METHODJIT && defined JS_MONOIC
for (JSCList *cursor = scripts.next; cursor != &scripts; cursor = cursor->next) {
JSScript *script = reinterpret_cast<JSScript *>(cursor);
if (script->jit)
mjit::ic::SweepCallICs(script);
}
#endif
}
void
JSCompartment::purge(JSContext *cx)
{
freeLists.purge();
#ifdef JS_METHODJIT
for (JSScript *script = (JSScript *)scripts.next;
&script->links != &scripts;
script = (JSScript *)script->links.next) {
if (script->jit) {
# if defined JS_POLYIC
mjit::ic::PurgePICs(cx, script);
# endif
# if defined JS_MONOIC
/*
* MICs do not refer to data which can be GC'ed, but are sensitive
* to shape regeneration.
*/
if (cx->runtime->gcRegenShapes)
mjit::ic::PurgeMICs(cx, script);
# endif
}
}
#endif
}

135
js/src/jscompartment.h Normal file
Просмотреть файл

@ -0,0 +1,135 @@
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is SpiderMonkey code.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation.
* Portions created by the Initial Developer are Copyright (C) 2010
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef jscompartment_h___
#define jscompartment_h___
#include "jscntxt.h"
#include "jsgc.h"
#include "jsobj.h"
#include "jsfun.h"
#include "jsgcstats.h"
#include "jsclist.h"
#include "jsxml.h"
struct JSCompartment {
JSRuntime *rt;
JSPrincipals *principals;
js::gc::Chunk *chunk;
js::gc::ArenaList<JSObject> objArena;
js::gc::ArenaList<JSFunction> funArena;
js::gc::ArenaList<JSShortString> shortStringArena;
js::gc::ArenaList<JSString> stringArena;
js::gc::ArenaList<JSString> externalStringArenas[js::gc::JS_EXTERNAL_STRING_LIMIT];
#if JS_HAS_XML_SUPPORT
js::gc::ArenaList<JSXML> xmlArena;
#endif
js::gc::FreeLists freeLists;
#ifdef JS_GCMETER
js::gc::JSGCArenaStats compartmentStats[js::gc::FINALIZE_LIMIT];
#endif
void *data;
bool marked;
js::WrapperMap crossCompartmentWrappers;
bool debugMode;
/* List all scripts in this compartment. */
JSCList scripts;
JSCompartment(JSRuntime *cx);
~JSCompartment();
bool init();
bool wrap(JSContext *cx, js::Value *vp);
bool wrap(JSContext *cx, JSString **strp);
bool wrap(JSContext *cx, JSObject **objp);
bool wrapId(JSContext *cx, jsid *idp);
bool wrap(JSContext *cx, js::PropertyOp *op);
bool wrap(JSContext *cx, js::PropertyDescriptor *desc);
bool wrap(JSContext *cx, js::AutoIdVector &props);
bool wrapException(JSContext *cx);
void sweep(JSContext *cx);
#ifdef JS_METHODJIT
bool addScript(JSContext *cx, JSScript *script);
void removeScript(JSScript *script);
#endif
void purge(JSContext *cx);
void finishArenaLists();
bool arenaListsAreEmpty();
};
namespace js {
class PreserveCompartment {
protected:
JSContext *cx;
private:
JSCompartment *oldCompartment;
JS_DECL_USE_GUARD_OBJECT_NOTIFIER
public:
PreserveCompartment(JSContext *cx JS_GUARD_OBJECT_NOTIFIER_PARAM) : cx(cx) {
JS_GUARD_OBJECT_NOTIFIER_INIT;
oldCompartment = cx->compartment;
}
~PreserveCompartment() {
cx->compartment = oldCompartment;
}
};
class SwitchToCompartment : public PreserveCompartment {
public:
SwitchToCompartment(JSContext *cx, JSCompartment *newCompartment) : PreserveCompartment(cx) {
cx->compartment = newCompartment;
}
SwitchToCompartment(JSContext *cx, JSObject *target) : PreserveCompartment(cx) {
cx->compartment = target->getCompartment(cx);
}
};
}
#endif /* jscompartment_h___ */

Просмотреть файл

@ -74,6 +74,7 @@
#include "methodjit/Retcon.h"
using namespace js;
using namespace js::gc;
typedef struct JSTrap {
JSCList links;
@ -601,8 +602,8 @@ js_TraceWatchPoints(JSTracer *trc, JSObject *obj)
if (wp->object == obj) {
wp->shape->trace(trc);
if (wp->shape->hasSetterValue() && wp->setter)
JS_CALL_OBJECT_TRACER(trc, CastAsObject(wp->setter), "wp->setter");
JS_CALL_OBJECT_TRACER(trc, wp->closure, "wp->closure");
MarkObject(trc, *CastAsObject(wp->setter), "wp->setter");
MarkObject(trc, *wp->closure, "wp->closure");
}
}
}
@ -620,7 +621,7 @@ js_SweepWatchPoints(JSContext *cx)
&wp->links != &rt->watchPointList;
wp = next) {
next = (JSWatchPoint *)wp->links.next;
if (js_IsAboutToBeFinalized(wp->object)) {
if (IsAboutToBeFinalized(wp->object)) {
sample = rt->debuggerMutations;
/* Ignore failures. */

Просмотреть файл

@ -83,6 +83,7 @@
#define TRYNOTE_SIZE(n) ((n) * sizeof(JSTryNote))
using namespace js;
using namespace js::gc;
static JSBool
NewTryNote(JSContext *cx, JSCodeGenerator *cg, JSTryNoteKind kind,
@ -2123,6 +2124,7 @@ BindNameToSlot(JSContext *cx, JSCodeGenerator *cg, JSParseNode *pn)
*/
if (cg->compileAndGo() &&
cg->compiler()->globalScope->globalObj &&
!pn->isDeoptimized() &&
!(cg->flags & TCF_STRICT_MODE_CODE)) {
switch (op) {
case JSOP_NAME: op = JSOP_GETGNAME; break;

Просмотреть файл

@ -67,6 +67,7 @@
#include "jsobjinlines.h"
using namespace js;
using namespace js::gc;
/* Forward declarations for js_ErrorClass's initializer. */
static JSBool
@ -407,16 +408,14 @@ exn_trace(JSTracer *trc, JSObject *obj)
priv = GetExnPrivate(trc->context, obj);
if (priv) {
if (priv->message)
JS_CALL_STRING_TRACER(trc, priv->message, "exception message");
MarkString(trc, priv->message, "exception message");
if (priv->filename)
JS_CALL_STRING_TRACER(trc, priv->filename, "exception filename");
MarkString(trc, priv->filename, "exception filename");
elem = priv->stackElems;
for (vcount = i = 0; i != priv->stackDepth; ++i, ++elem) {
if (elem->funName) {
JS_CALL_STRING_TRACER(trc, elem->funName,
"stack trace function name");
}
if (elem->funName)
MarkString(trc, elem->funName, "stack trace function name");
if (IS_GC_MARKING_TRACER(trc) && elem->filename)
js_MarkScriptFilename(elem->filename);
vcount += elem->argc;

Просмотреть файл

@ -92,6 +92,7 @@
#include "jsobjinlines.h"
using namespace js;
using namespace js::gc;
inline JSObject *
JSObject::getThrowTypeError() const

Просмотреть файл

@ -303,8 +303,6 @@ struct JSFunction : public JSObject
static const uint32 FIRST_FREE_SLOT = JSSLOT_PRIVATE + CLASS_RESERVED_SLOTS + 1;
};
JS_STATIC_ASSERT(sizeof(JSFunction) % JS_GCTHING_ALIGN == 0);
/*
* Trace-annotated native. This expands to a JSFunctionSpec initializer (like
* JS_FN in jsapi.h). fastcall is a FastNative; trcinfo is a

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Разница между файлами не показана из-за своего большого размера Загрузить разницу

482
js/src/jsgcinlines.h Normal file
Просмотреть файл

@ -0,0 +1,482 @@
/* -*- Mode: C; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
*
* ***** BEGIN LICENSE BLOCK *****
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
*
* The contents of this file are subject to the Mozilla Public License Version
* 1.1 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
* http://www.mozilla.org/MPL/
*
* Software distributed under the License is distributed on an "AS IS" basis,
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
* for the specific language governing rights and limitations under the
* License.
*
* The Original Code is SpiderMonkey code.
*
* The Initial Developer of the Original Code is
* Mozilla Corporation.
* Portions created by the Initial Developer are Copyright (C) 2010
* the Initial Developer. All Rights Reserved.
*
* Contributor(s):
*
*
* Alternatively, the contents of this file may be used under the terms of
* either of the GNU General Public License Version 2 or later (the "GPL"),
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
* in which case the provisions of the GPL or the LGPL are applicable instead
* of those above. If you wish to allow use of your version of this file only
* under the terms of either the GPL or the LGPL, and not to allow others to
* use your version of this file under the terms of the MPL, indicate your
* decision by deleting the provisions above and replace them with the notice
* and other provisions required by the GPL or the LGPL. If you do not delete
* the provisions above, a recipient may use your version of this file under
* the terms of any one of the MPL, the GPL or the LGPL.
*
* ***** END LICENSE BLOCK ***** */
#ifndef jsgcinlines_h___
#define jsgcinlines_h___
#include "jsgc.h"
#include "jscntxt.h"
#include "jscompartment.h"
#include "jslock.h"
#include "jstl.h"
#ifdef JS_GCMETER
# define METER(x) ((void) (x))
# define METER_IF(condition, x) ((void) ((condition) && (x)))
#else
# define METER(x) ((void) 0)
# define METER_IF(condition, x) ((void) 0)
#endif
/*
* Allocates a new GC thing. After a successful allocation the caller must
* fully initialize the thing before calling any function that can potentially
* trigger GC. This will ensure that GC tracing never sees junk values stored
* in the partially initialized thing.
*/
template <typename T>
JS_ALWAYS_INLINE T *
NewFinalizableGCThing(JSContext *cx, unsigned thingKind)
{
JS_ASSERT(thingKind < js::gc::FINALIZE_LIMIT);
METER(cx->compartment->compartmentStats[thingKind].alloc++);
do {
js::gc::FreeCell *cell = cx->compartment->freeLists.getNext(thingKind);
if (cell) {
CheckGCFreeListLink(cell);
return (T *)cell;
}
if (!RefillFinalizableFreeList<T>(cx, thingKind))
return NULL;
} while (true);
}
#undef METER
#undef METER_IF
inline JSObject *
js_NewGCObject(JSContext *cx)
{
return NewFinalizableGCThing<JSObject>(cx, js::gc::FINALIZE_OBJECT);
}
inline JSString *
js_NewGCString(JSContext *cx)
{
return NewFinalizableGCThing<JSString>(cx, js::gc::FINALIZE_STRING);
}
inline JSShortString *
js_NewGCShortString(JSContext *cx)
{
return (JSShortString *) NewFinalizableGCThing<JSShortString>(cx, js::gc::FINALIZE_SHORT_STRING);
}
inline JSString *
js_NewGCExternalString(JSContext *cx, uintN type)
{
JS_ASSERT(type < js::gc::JS_EXTERNAL_STRING_LIMIT);
type += js::gc::FINALIZE_EXTERNAL_STRING0;
return NewFinalizableGCThing<JSString>(cx, type);
}
inline JSFunction*
js_NewGCFunction(JSContext *cx)
{
return NewFinalizableGCThing<JSFunction>(cx, js::gc::FINALIZE_FUNCTION);
}
#if JS_HAS_XML_SUPPORT
inline JSXML *
js_NewGCXML(JSContext *cx)
{
return NewFinalizableGCThing<JSXML>(cx, js::gc::FINALIZE_XML);
}
#endif
namespace js {
namespace gc {
template<typename T>
static JS_ALWAYS_INLINE void
Mark(JSTracer *trc, T *thing)
{
JS_ASSERT(thing);
JS_ASSERT(JS_IS_VALID_TRACE_KIND(GetGCThingTraceKind(thing)));
JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
if (!IS_GC_MARKING_TRACER(trc)) {
uint32 kind = GetGCThingTraceKind(thing);
trc->callback(trc, thing, kind);
goto out;
}
TypedMarker(trc, thing);
out:
#ifdef DEBUG
trc->debugPrinter = NULL;
trc->debugPrintArg = NULL;
#endif
return; /* to avoid out: right_curl when DEBUG is not defined */
}
static inline void
MarkString(JSTracer *trc, JSString *str)
{
JS_ASSERT(str);
if (JSString::isStatic(str))
return;
JS_ASSERT(GetArena<JSString>((Cell *)str)->assureThingIsAligned((JSString *)str));
Mark(trc, str);
}
static inline void
MarkString(JSTracer *trc, JSString *str, const char *name)
{
JS_ASSERT(str);
JS_SET_TRACING_NAME(trc, name);
MarkString(trc, str);
}
static inline void
MarkObject(JSTracer *trc, JSObject &obj, const char *name)
{
JS_ASSERT(trc);
JS_ASSERT(&obj);
JS_SET_TRACING_NAME(trc, name);
JS_ASSERT(GetArena<JSObject>((Cell *)&obj)->assureThingIsAligned(&obj) ||
GetArena<JSFunction>((Cell *)&obj)->assureThingIsAligned((JSFunction *)&obj));
Mark(trc, &obj);
}
static inline void
MarkChildren(JSTracer *trc, JSObject *obj)
{
/* If obj has no map, it must be a newborn. */
if (!obj->map)
return;
/* Trace universal (ops-independent) members. */
if (JSObject *proto = obj->getProto())
MarkObject(trc, *proto, "proto");
if (JSObject *parent = obj->getParent())
MarkObject(trc, *parent, "parent");
if (obj->emptyShape)
obj->emptyShape->trace(trc);
/* Delegate to ops or the native marking op. */
TraceOp op = obj->getOps()->trace;
(op ? op : js_TraceObject)(trc, obj);
}
static inline void
MarkChildren(JSTracer *trc, JSFunction *fun)
{
JSObject *obj = reinterpret_cast<JSObject *>(fun);
if (!obj->map)
return;
if (JSObject *proto = obj->getProto())
MarkObject(trc, *proto, "proto");
if (JSObject *parent = obj->getParent())
MarkObject(trc, *parent, "parent");
TraceOp op = obj->getOps()->trace;
(op ? op : js_TraceObject)(trc, obj);
}
static inline void
MarkChildren(JSTracer *trc, JSString *str)
{
if (str->isDependent())
MarkString(trc, str->dependentBase(), "base");
else if (str->isRope()) {
if (str->isInteriorNode())
MarkString(trc, str->interiorNodeParent(), "parent");
MarkString(trc, str->ropeLeft(), "left child");
MarkString(trc, str->ropeRight(), "right child");
}
}
#ifdef JS_HAS_XML_SUPPORT
static inline void
MarkChildren(JSTracer *trc, JSXML *xml)
{
js_TraceXML(trc, xml);
}
#endif
#if JS_STACK_GROWTH_DIRECTION > 0
# define JS_CHECK_STACK_SIZE(limit, lval) ((jsuword)(lval) < limit)
#else
# define JS_CHECK_STACK_SIZE(limit, lval) ((jsuword)(lval) > limit)
#endif
static inline bool
RecursionTooDeep(GCMarker *gcmarker) {
#ifdef JS_GC_ASSUME_LOW_C_STACK
return true;
#else
int stackDummy;
return !JS_CHECK_STACK_SIZE(gcmarker->stackLimit, &stackDummy);
#endif
}
static JS_ALWAYS_INLINE void
TypedMarker(JSTracer *trc, JSXML *thing)
{
if (!reinterpret_cast<Cell *>(thing)->markIfUnmarked(reinterpret_cast<GCMarker *>(trc)->getMarkColor()))
return;
GCMarker *gcmarker = static_cast<GCMarker *>(trc);
if (RecursionTooDeep(gcmarker)) {
gcmarker->delayMarkingChildren(thing);
} else {
MarkChildren(trc, thing);
}
}
static JS_ALWAYS_INLINE void
TypedMarker(JSTracer *trc, JSObject *thing)
{
JS_ASSERT(thing);
JS_ASSERT(JSTRACE_OBJECT == GetFinalizableTraceKind(thing->asCell()->arena()->header()->thingKind));
GCMarker *gcmarker = static_cast<GCMarker *>(trc);
if (!thing->markIfUnmarked(gcmarker->getMarkColor()))
return;
if (RecursionTooDeep(gcmarker)) {
gcmarker->delayMarkingChildren(thing);
} else {
MarkChildren(trc, thing);
}
}
static JS_ALWAYS_INLINE void
TypedMarker(JSTracer *trc, JSFunction *thing)
{
JS_ASSERT(thing);
JS_ASSERT(JSTRACE_OBJECT == GetFinalizableTraceKind(thing->asCell()->arena()->header()->thingKind));
GCMarker *gcmarker = static_cast<GCMarker *>(trc);
if (!thing->markIfUnmarked(gcmarker->getMarkColor()))
return;
if (RecursionTooDeep(gcmarker)) {
gcmarker->delayMarkingChildren(thing);
} else {
MarkChildren(trc, static_cast<JSObject *>(thing));
}
}
static JS_ALWAYS_INLINE void
TypedMarker(JSTracer *trc, JSShortString *thing)
{
thing->asCell()->markIfUnmarked();
}
static JS_ALWAYS_INLINE void
TypedMarker(JSTracer *trc, JSString *thing)
{
/*
* Iterate through all nodes and leaves in the rope if this is part of a
* rope; otherwise, we only iterate once: on the string itself.
*/
JSRopeNodeIterator iter(thing);
JSString *str = iter.init();
do {
for (;;) {
if (JSString::isStatic(str))
break;
JS_ASSERT(JSTRACE_STRING == GetFinalizableTraceKind(str->asCell()->arena()->header()->thingKind));
if (!str->asCell()->markIfUnmarked())
break;
if (!str->isDependent())
break;
str = str->dependentBase();
}
str = iter.next();
} while (str);
}
static inline void
MarkAtomRange(JSTracer *trc, size_t len, JSAtom **vec, const char *name)
{
for (uint32 i = 0; i < len; i++) {
if (JSAtom *atom = vec[i]) {
JS_SET_TRACING_INDEX(trc, name, i);
Mark(trc, ATOM_TO_STRING(atom));
}
}
}
static inline void
MarkObjectRange(JSTracer *trc, size_t len, JSObject **vec, const char *name)
{
for (uint32 i = 0; i < len; i++) {
if (JSObject *obj = vec[i]) {
JS_SET_TRACING_INDEX(trc, name, i);
Mark(trc, obj);
}
}
}
static inline void
MarkId(JSTracer *trc, jsid id)
{
if (JSID_IS_STRING(id))
Mark(trc, JSID_TO_STRING(id));
else if (JS_UNLIKELY(JSID_IS_OBJECT(id)))
Mark(trc, JSID_TO_OBJECT(id));
}
static inline void
MarkId(JSTracer *trc, jsid id, const char *name)
{
JS_SET_TRACING_NAME(trc, name);
MarkId(trc, id);
}
static inline void
MarkIdRange(JSTracer *trc, jsid *beg, jsid *end, const char *name)
{
for (jsid *idp = beg; idp != end; ++idp) {
JS_SET_TRACING_INDEX(trc, name, (idp - beg));
MarkId(trc, *idp);
}
}
static inline void
MarkIdRange(JSTracer *trc, size_t len, jsid *vec, const char *name)
{
MarkIdRange(trc, vec, vec + len, name);
}
static inline void
MarkKind(JSTracer *trc, void *thing, uint32 kind)
{
JS_ASSERT(thing);
JS_ASSERT(kind == GetGCThingTraceKind(thing));
switch (kind) {
case JSTRACE_OBJECT:
Mark(trc, reinterpret_cast<JSObject *>(thing));
break;
case JSTRACE_STRING:
if (JSString::isStatic((JSString *)thing))
return;
Mark(trc, reinterpret_cast<JSString *>(thing));
break;
#if JS_HAS_XML_SUPPORT
case JSTRACE_XML:
Mark(trc, reinterpret_cast<JSXML *>(thing));
break;
#endif
default:
JS_ASSERT(false);
}
}
/* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */
static inline void
MarkValueRaw(JSTracer *trc, const js::Value &v)
{
if (v.isMarkable()) {
JS_ASSERT(v.toGCThing());
return MarkKind(trc, v.toGCThing(), v.gcKind());
}
}
static inline void
MarkValue(JSTracer *trc, const js::Value &v, const char *name)
{
JS_SET_TRACING_NAME(trc, name);
MarkValueRaw(trc, v);
}
static inline void
MarkValueRange(JSTracer *trc, Value *beg, Value *end, const char *name)
{
for (Value *vp = beg; vp < end; ++vp) {
JS_SET_TRACING_INDEX(trc, name, vp - beg);
MarkValueRaw(trc, *vp);
}
}
static inline void
MarkValueRange(JSTracer *trc, size_t len, Value *vec, const char *name)
{
MarkValueRange(trc, vec, vec + len, name);
}
/* N.B. Assumes JS_SET_TRACING_NAME/INDEX has already been called. */
static inline void
MarkGCThing(JSTracer *trc, void *thing, uint32 kind)
{
if (!thing)
return;
MarkKind(trc, thing, kind);
}
static inline void
MarkGCThing(JSTracer *trc, void *thing)
{
if (!thing)
return;
MarkKind(trc, thing, GetGCThingTraceKind(thing));
}
static inline void
MarkGCThing(JSTracer *trc, void *thing, const char *name)
{
JS_SET_TRACING_NAME(trc, name);
MarkGCThing(trc, thing);
}
static inline void
MarkGCThing(JSTracer *trc, void *thing, const char *name, size_t index)
{
JS_SET_TRACING_INDEX(trc, name, index);
MarkGCThing(trc, thing);
}
static inline void
Mark(JSTracer *trc, void *thing, uint32 kind, const char *name)
{
JS_ASSERT(thing);
JS_SET_TRACING_NAME(trc, name);
MarkKind(trc, thing, kind);
}
}}
#endif /* jsgcinlines_h___ */

Просмотреть файл

@ -42,8 +42,16 @@
#include "jsgc.h"
#include "jsxml.h"
#include "jsbuiltins.h"
#include "jscompartment.h"
using namespace js;
using namespace js::gc;
#define UL(x) ((unsigned long)(x))
#define PERCENT(x,y) (100.0 * (double) (x) / (double) (y))
namespace js {
namespace gc {
#if defined(JS_DUMP_CONSERVATIVE_GC_ROOTS) || defined(JS_GCMETER)
@ -68,6 +76,209 @@ ConservativeGCStats::dump(FILE *fp)
}
#endif
#ifdef JS_GCMETER
void
UpdateCompartmentStats(JSCompartment *comp, unsigned thingKind, uint32 nlivearenas,
uint32 nkilledArenas, uint32 nthings)
{
size_t narenas = 0;
JSGCArenaStats *compSt = &comp->compartmentStats[thingKind];
JSGCArenaStats *globSt = &comp->rt->globalArenaStats[thingKind];
narenas = nlivearenas + nkilledArenas;
JS_ASSERT(narenas >= compSt->livearenas);
compSt->newarenas = narenas - compSt->livearenas;
compSt->narenas = narenas;
compSt->livearenas = nlivearenas;
if (compSt->maxarenas < narenas)
compSt->maxarenas = narenas;
compSt->totalarenas += narenas;
compSt->nthings = nthings;
if (compSt->maxthings < nthings)
compSt->maxthings = nthings;
compSt->totalthings += nthings;
globSt->newarenas += compSt->newarenas;
globSt->narenas += narenas;
globSt->livearenas += compSt->livearenas;
globSt->totalarenas += compSt->totalarenas;
globSt->nthings += compSt->nthings;
globSt->totalthings += compSt->totalthings;
if (globSt->maxarenas < compSt->maxarenas)
globSt->maxarenas = compSt->maxarenas;
if (globSt->maxthings < compSt->maxthings)
globSt->maxthings = compSt->maxthings;
}
static const char *const GC_ARENA_NAMES[] = {
"object",
"function",
#if JS_HAS_XML_SUPPORT
"xml",
#endif
"short string",
"string",
"external_string_0",
"external_string_1",
"external_string_2",
"external_string_3",
"external_string_4",
"external_string_5",
"external_string_6",
"external_string_7",
};
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(GC_ARENA_NAMES) == FINALIZE_LIMIT);
void GetSizeAndThingsPerArena(int thingKind, size_t &thingSize, size_t &thingsPerArena)
{
switch (thingKind) {
case FINALIZE_OBJECT:
thingSize = sizeof(JSObject);
thingsPerArena = Arena<JSObject>::ThingsPerArena;
break;
case FINALIZE_STRING:
case FINALIZE_EXTERNAL_STRING0:
case FINALIZE_EXTERNAL_STRING1:
case FINALIZE_EXTERNAL_STRING2:
case FINALIZE_EXTERNAL_STRING3:
case FINALIZE_EXTERNAL_STRING4:
case FINALIZE_EXTERNAL_STRING5:
case FINALIZE_EXTERNAL_STRING6:
case FINALIZE_EXTERNAL_STRING7:
thingSize = sizeof(JSString);
thingsPerArena = Arena<JSString>::ThingsPerArena;
break;
case FINALIZE_SHORT_STRING:
thingSize = sizeof(JSShortString);
thingsPerArena = Arena<JSShortString>::ThingsPerArena;
break;
case FINALIZE_FUNCTION:
thingSize = sizeof(JSFunction);
thingsPerArena = Arena<JSFunction>::ThingsPerArena;
break;
#if JS_HAS_XML_SUPPORT
case FINALIZE_XML:
thingSize = sizeof(JSXML);
thingsPerArena = Arena<JSXML>::ThingsPerArena;
break;
#endif
default:
JS_ASSERT(false);
}
}
void
DumpArenaStats(JSGCArenaStats *stp, FILE *fp)
{
size_t sumArenas = 0, sumTotalArenas = 0, sumThings =0, sumMaxThings = 0;
size_t sumThingSize = 0, sumTotalThingSize = 0, sumArenaCapacity = 0;
size_t sumTotalArenaCapacity = 0, sumAlloc = 0, sumLocalAlloc = 0;
for (int i = 0; i < (int) FINALIZE_LIMIT; i++) {
JSGCArenaStats *st = &stp[i];
if (st->maxarenas == 0)
continue;
size_t thingSize = 0, thingsPerArena = 0;
GetSizeAndThingsPerArena(i, thingSize, thingsPerArena);
fprintf(fp, "%s arenas (thing size %lu, %lu things per arena):\n",
GC_ARENA_NAMES[i], UL(thingSize), UL(thingsPerArena));
fprintf(fp, " arenas before GC: %lu\n", UL(st->narenas));
fprintf(fp, " arenas after GC: %lu (%.1f%%)\n",
UL(st->livearenas), PERCENT(st->livearenas, st->narenas));
fprintf(fp, " max arenas: %lu\n", UL(st->maxarenas));
fprintf(fp, " things: %lu\n", UL(st->nthings));
fprintf(fp, " GC cell utilization: %.1f%%\n",
PERCENT(st->nthings, thingsPerArena * st->narenas));
fprintf(fp, " average cell utilization: %.1f%%\n",
PERCENT(st->totalthings, thingsPerArena * st->totalarenas));
fprintf(fp, " max things: %lu\n", UL(st->maxthings));
fprintf(fp, " alloc attempts: %lu\n", UL(st->alloc));
fprintf(fp, " alloc without locks: %lu (%.1f%%)\n",
UL(st->localalloc), PERCENT(st->localalloc, st->alloc));
sumArenas += st->narenas;
sumTotalArenas += st->totalarenas;
sumThings += st->nthings;
sumMaxThings += st->maxthings;
sumThingSize += thingSize * st->nthings;
sumTotalThingSize += size_t(thingSize * st->totalthings);
sumArenaCapacity += thingSize * thingsPerArena * st->narenas;
sumTotalArenaCapacity += thingSize * thingsPerArena * st->totalarenas;
sumAlloc += st->alloc;
sumLocalAlloc += st->localalloc;
putc('\n', fp);
}
fputs("Never used arenas:\n", fp);
for (int i = 0; i < (int) FINALIZE_LIMIT; i++) {
JSGCArenaStats *st = &stp[i];
if (st->maxarenas != 0)
continue;
fprintf(fp, "%s\n", GC_ARENA_NAMES[i]);
}
fprintf(fp, "\nTOTAL STATS:\n");
fprintf(fp, " total GC arenas: %lu\n", UL(sumArenas));
fprintf(fp, " total GC things: %lu\n", UL(sumThings));
fprintf(fp, " max total GC things: %lu\n", UL(sumMaxThings));
fprintf(fp, " GC cell utilization: %.1f%%\n",
PERCENT(sumThingSize, sumArenaCapacity));
fprintf(fp, " average cell utilization: %.1f%%\n",
PERCENT(sumTotalThingSize, sumTotalArenaCapacity));
fprintf(fp, " alloc attempts: %lu\n", UL(sumAlloc));
fprintf(fp, " alloc without locks: %lu (%.1f%%)\n",
UL(sumLocalAlloc), PERCENT(sumLocalAlloc, sumAlloc));
}
void
DumpCompartmentStats(JSCompartment *comp, FILE *fp)
{
if (comp->rt->defaultCompartment == comp)
fprintf(fp, "\n**** DefaultCompartment Allocation Statistics: %p ****\n\n", (void *) comp);
else
fprintf(fp, "\n**** Compartment Allocation Statistics: %p ****\n\n", (void *) comp);
DumpArenaStats(&comp->compartmentStats[0], fp);
}
#endif
} //gc
} //js
#ifdef JS_GCMETER
JS_FRIEND_API(void)
js_DumpGCStats(JSRuntime *rt, FILE *fp)
{
#define ULSTAT(x) UL(rt->gcStats.x)
if (JS_WANT_GC_METER_PRINT) {
fprintf(fp, "\n**** Global Arena Allocation Statistics: ****\n");
DumpArenaStats(&rt->globalArenaStats[0], fp);
fprintf(fp, " bytes allocated: %lu\n", UL(rt->gcBytes));
fprintf(fp, " allocation failures: %lu\n", ULSTAT(fail));
fprintf(fp, "allocation retries after GC: %lu\n", ULSTAT(retry));
fprintf(fp, " valid lock calls: %lu\n", ULSTAT(lock));
fprintf(fp, " valid unlock calls: %lu\n", ULSTAT(unlock));
fprintf(fp, " delayed tracing calls: %lu\n", ULSTAT(unmarked));
#ifdef DEBUG
fprintf(fp, " max trace later count: %lu\n", ULSTAT(maxunmarked));
#endif
fprintf(fp, "potentially useful GC calls: %lu\n", ULSTAT(poke));
fprintf(fp, " thing arenas freed so far: %lu\n\n", ULSTAT(afree));
}
if (JS_WANT_GC_PER_COMPARTMENT_PRINT)
for (JSCompartment **c = rt->compartments.begin(); c != rt->compartments.end(); ++c)
DumpCompartmentStats(*c, fp);
PodZero(&rt->globalArenaStats);
if (JS_WANT_CONSERVATIVE_GC_PRINT)
rt->gcStats.conservative.dump(fp);
#undef ULSTAT
}
#endif
namespace js {
#ifdef JS_DUMP_CONSERVATIVE_GC_ROOTS
void
GCMarker::dumpConservativeRoots()
@ -126,160 +337,8 @@ GCMarker::dumpConservativeRoots()
}
#endif /* JS_DUMP_CONSERVATIVE_GC_ROOTS */
#ifdef JS_GCMETER
void
UpdateArenaStats(JSGCArenaStats *st, uint32 nlivearenas, uint32 nkilledArenas,
uint32 nthings)
{
size_t narenas;
narenas = nlivearenas + nkilledArenas;
JS_ASSERT(narenas >= st->livearenas);
st->newarenas = narenas - st->livearenas;
st->narenas = narenas;
st->livearenas = nlivearenas;
if (st->maxarenas < narenas)
st->maxarenas = narenas;
st->totalarenas += narenas;
st->nthings = nthings;
if (st->maxthings < nthings)
st->maxthings = nthings;
st->totalthings += nthings;
}
JS_FRIEND_API(void)
js_DumpGCStats(JSRuntime *rt, FILE *fp)
{
static const char *const GC_ARENA_NAMES[] = {
"object",
"function",
#if JS_HAS_XML_SUPPORT
"xml",
#endif
"short string",
"string",
"external_string_0",
"external_string_1",
"external_string_2",
"external_string_3",
"external_string_4",
"external_string_5",
"external_string_6",
"external_string_7",
};
fprintf(fp, "\nGC allocation statistics:\n\n");
#define UL(x) ((unsigned long)(x))
#define ULSTAT(x) UL(rt->gcStats.x)
#define PERCENT(x,y) (100.0 * (double) (x) / (double) (y))
size_t sumArenas = 0;
size_t sumTotalArenas = 0;
size_t sumThings = 0;
size_t sumMaxThings = 0;
size_t sumThingSize = 0;
size_t sumTotalThingSize = 0;
size_t sumArenaCapacity = 0;
size_t sumTotalArenaCapacity = 0;
size_t sumAlloc = 0;
size_t sumLocalAlloc = 0;
size_t sumFail = 0;
size_t sumRetry = 0;
for (int i = 0; i < (int) FINALIZE_LIMIT; i++) {
size_t thingSize, thingsPerArena;
JSGCArenaStats *st;
thingSize = rt->gcArenaList[i].thingSize;
thingsPerArena = ThingsPerArena(thingSize);
st = &rt->gcArenaStats[i];
if (st->maxarenas == 0)
continue;
fprintf(fp,
"%s arenas (thing size %lu, %lu things per arena):",
GC_ARENA_NAMES[i], UL(thingSize), UL(thingsPerArena));
putc('\n', fp);
fprintf(fp, " arenas before GC: %lu\n", UL(st->narenas));
fprintf(fp, " new arenas before GC: %lu (%.1f%%)\n",
UL(st->newarenas), PERCENT(st->newarenas, st->narenas));
fprintf(fp, " arenas after GC: %lu (%.1f%%)\n",
UL(st->livearenas), PERCENT(st->livearenas, st->narenas));
fprintf(fp, " max arenas: %lu\n", UL(st->maxarenas));
fprintf(fp, " things: %lu\n", UL(st->nthings));
fprintf(fp, " GC cell utilization: %.1f%%\n",
PERCENT(st->nthings, thingsPerArena * st->narenas));
fprintf(fp, " average cell utilization: %.1f%%\n",
PERCENT(st->totalthings, thingsPerArena * st->totalarenas));
fprintf(fp, " max things: %lu\n", UL(st->maxthings));
fprintf(fp, " alloc attempts: %lu\n", UL(st->alloc));
fprintf(fp, " alloc without locks: %lu (%.1f%%)\n",
UL(st->localalloc), PERCENT(st->localalloc, st->alloc));
sumArenas += st->narenas;
sumTotalArenas += st->totalarenas;
sumThings += st->nthings;
sumMaxThings += st->maxthings;
sumThingSize += thingSize * st->nthings;
sumTotalThingSize += size_t(thingSize * st->totalthings);
sumArenaCapacity += thingSize * thingsPerArena * st->narenas;
sumTotalArenaCapacity += thingSize * thingsPerArena * st->totalarenas;
sumAlloc += st->alloc;
sumLocalAlloc += st->localalloc;
sumFail += st->fail;
sumRetry += st->retry;
putc('\n', fp);
}
fputs("Never used arenas:\n", fp);
for (int i = 0; i < (int) FINALIZE_LIMIT; i++) {
size_t thingSize, thingsPerArena;
JSGCArenaStats *st;
thingSize = rt->gcArenaList[i].thingSize;
thingsPerArena = ThingsPerArena(thingSize);
st = &rt->gcArenaStats[i];
if (st->maxarenas != 0)
continue;
fprintf(fp,
"%s (thing size %lu, %lu things per arena)\n",
GC_ARENA_NAMES[i], UL(thingSize), UL(thingsPerArena));
}
fprintf(fp, "\nTOTAL STATS:\n");
fprintf(fp, " bytes allocated: %lu\n", UL(rt->gcBytes));
fprintf(fp, " total GC arenas: %lu\n", UL(sumArenas));
fprintf(fp, " max allocated arenas: %lu\n", ULSTAT(maxnallarenas));
fprintf(fp, " max allocated chunks: %lu\n", ULSTAT(maxnchunks));
fprintf(fp, " total GC things: %lu\n", UL(sumThings));
fprintf(fp, " max total GC things: %lu\n", UL(sumMaxThings));
fprintf(fp, " GC cell utilization: %.1f%%\n",
PERCENT(sumThingSize, sumArenaCapacity));
fprintf(fp, " average cell utilization: %.1f%%\n",
PERCENT(sumTotalThingSize, sumTotalArenaCapacity));
fprintf(fp, "allocation retries after GC: %lu\n", UL(sumRetry));
fprintf(fp, " alloc attempts: %lu\n", UL(sumAlloc));
fprintf(fp, " alloc without locks: %lu (%.1f%%)\n",
UL(sumLocalAlloc), PERCENT(sumLocalAlloc, sumAlloc));
fprintf(fp, " allocation failures: %lu\n", UL(sumFail));
fprintf(fp, " valid lock calls: %lu\n", ULSTAT(lock));
fprintf(fp, " valid unlock calls: %lu\n", ULSTAT(unlock));
fprintf(fp, " delayed tracing calls: %lu\n", ULSTAT(unmarked));
#ifdef DEBUG
fprintf(fp, " max trace later count: %lu\n", ULSTAT(maxunmarked));
#endif
fprintf(fp, "potentially useful GC calls: %lu\n", ULSTAT(poke));
fprintf(fp, " thing arenas freed so far: %lu\n", ULSTAT(afree));
rt->gcStats.conservative.dump(fp);
#undef UL
#undef ULSTAT
#undef PERCENT
}
#endif
#ifdef MOZ_GCTIMER
namespace js {
jsrefcount newChunkCount = 0;
jsrefcount destroyChunkCount = 0;
@ -289,13 +348,13 @@ GCTimer::GCTimer() {
enter = rdtsc();
}
uint64
uint64
GCTimer::getFirstEnter() {
static uint64 firstEnter = rdtsc();
return firstEnter;
}
void
void
GCTimer::finish(bool lastGC) {
end = rdtsc();
@ -309,7 +368,7 @@ GCTimer::finish(bool lastGC) {
static FILE *gcFile;
if (!gcFile) {
gcFile = fopen("gcTimer.dat", "w");
gcFile = fopen("gcTimer.dat", "a");
fprintf(gcFile, " AppTime, Total, Mark, Sweep, FinObj,");
fprintf(gcFile, " FinStr, Destroy, newChunks, destoyChunks\n");
@ -337,39 +396,9 @@ GCTimer::finish(bool lastGC) {
destroyChunkCount = 0;
}
#ifdef JS_SCOPE_DEPTH_METER
void
DumpScopeDepthMeter(JSRuntime *rt)
{
static FILE *fp;
if (!fp)
fp = fopen("/tmp/scopedepth.stats", "w");
if (fp) {
JS_DumpBasicStats(&rt->protoLookupDepthStats, "proto-lookup depth", fp);
JS_DumpBasicStats(&rt->scopeSearchDepthStats, "scope-search depth", fp);
JS_DumpBasicStats(&rt->hostenvScopeDepthStats, "hostenv scope depth", fp);
JS_DumpBasicStats(&rt->lexicalScopeDepthStats, "lexical scope depth", fp);
putc('\n', fp);
fflush(fp);
}
}
#endif
#ifdef JS_DUMP_LOOP_STATS
void
DumpLoopStats(JSRuntime *rt)
{
static FILE *lsfp;
if (!lsfp)
lsfp = fopen("/tmp/loopstats", "w");
if (lsfp) {
JS_DumpBasicStats(&rt->loopStats, "loops", lsfp);
fflush(lsfp);
}
}
#endif
} //js
} /* namespace js */
#endif
#undef UL
#undef PERCENT

Просмотреть файл

@ -46,16 +46,17 @@
/* Define JS_GCMETER here if wanted */
#if defined JS_GCMETER
const bool JS_WANT_GC_METER_PRINT = true;
const bool JS_WANT_GC_PER_COMPARTMENT_PRINT = true;
const bool JS_WANT_CONSERVATIVE_GC_PRINT = true;
#elif defined DEBUG
# define JS_GCMETER 1
const bool JS_WANT_GC_METER_PRINT = false;
const bool JS_WANT_GC_PER_COMPARTMENT_PRINT = false;
const bool JS_WANT_CONSERVATIVE_GC_PRINT = false;
#endif
#define METER_UPDATE_MAX(maxLval, rval) \
METER_IF((maxLval) < (rval), (maxLval) = (rval))
namespace js {
namespace gc {
/*
* The conservative GC test for a word shows that it is either a valid GC
* thing or is not for one of the following reasons.
@ -72,7 +73,7 @@ enum ConservativeGCTest {
};
struct ConservativeGCStats {
uint32 counter[CGCT_END]; /* ConservativeGCTest classification
uint32 counter[gc::CGCT_END]; /* ConservativeGCTest classification
counters */
void add(const ConservativeGCStats &another) {
@ -83,15 +84,10 @@ struct ConservativeGCStats {
void dump(FILE *fp);
};
} /* namespace js */
#ifdef JS_GCMETER
struct JSGCArenaStats {
uint32 alloc; /* allocation attempts */
uint32 localalloc; /* allocations from local lists */
uint32 retry; /* allocation retries after running the GC */
uint32 fail; /* allocation failures */
uint32 nthings; /* live GC things */
uint32 maxthings; /* maximum of live GC cells */
double totalthings; /* live GC things the GC scanned so far */
@ -102,12 +98,17 @@ struct JSGCArenaStats {
uint32 totalarenas; /* total number of arenas with live things that
GC scanned so far */
};
#endif
#ifdef JS_GCMETER
struct JSGCStats {
uint32 lock; /* valid lock calls */
uint32 unlock; /* valid unlock calls */
uint32 unmarked; /* number of times marking of GC thing's children were
delayed due to a low C stack */
uint32 retry; /* allocation retries after running the GC */
uint32 fail; /* allocation failures */
#ifdef DEBUG
uint32 maxunmarked;/* maximum number of things with children to mark
later */
@ -119,27 +120,23 @@ struct JSGCStats {
uint32 nchunks; /* number of allocated chunks */
uint32 maxnchunks; /* maximum number of allocated chunks */
js::ConservativeGCStats conservative;
ConservativeGCStats conservative;
};
extern JS_FRIEND_API(void)
js_DumpGCStats(JSRuntime *rt, FILE *fp);
extern void
UpdateArenaStats(JSGCArenaStats *st, uint32 nlivearenas, uint32 nkilledArenas,
uint32 nthings);
UpdateCompartmentStats(JSCompartment *comp, unsigned thingKind, uint32 nlivearenas,
uint32 nkilledArenas, uint32 nthings);
#endif /* JS_GCMETER */
namespace js {
} //gc
#ifdef MOZ_GCTIMER
const bool JS_WANT_GC_SUITE_PRINT = false; //false for gnuplot output
extern jsrefcount newChunkCount;
extern jsrefcount destroyChunkCount;
const bool JS_WANT_GC_SUITE_PRINT = false; //false for gnuplot output
struct GCTimer {
uint64 enter;
uint64 startMark;
@ -150,7 +147,9 @@ struct GCTimer {
uint64 end;
GCTimer();
static uint64 getFirstEnter();
uint64 getFirstEnter();
void finish(bool lastGC);
};
@ -167,16 +166,9 @@ struct GCTimer {
# define GCTIMER_END(last) ((void) 0)
#endif
#ifdef JS_SCOPE_DEPTH_METER
extern void
DumpScopeDepthMeter(JSRuntime *rt);
#endif
} //js
#ifdef JS_DUMP_LOOP_STATS
extern void
DumpLoopStats(JSRuntime *rt);
#endif
extern JS_FRIEND_API(void)
js_DumpGCStats(JSRuntime *rt, FILE *fp);
} /* namepsace js */
#endif /* jsgcstats_h___ */
#endif /* jsgcstats_h__ */

Просмотреть файл

@ -322,6 +322,9 @@ class HashTable : AllocPolicy
bool init(uint32 length)
{
/* Make sure that init isn't called twice. */
JS_ASSERT(table == NULL);
/*
* Correct for sMaxAlphaFrac such that the table will not resize
* when adding 'length' entries.

Просмотреть файл

@ -94,6 +94,7 @@
#include "jsautooplen.h"
using namespace js;
using namespace js::gc;
/* jsinvoke_cpp___ indicates inclusion from jsinvoke.cpp. */
#if !JS_LONE_INTERPRET ^ defined jsinvoke_cpp___
@ -6512,7 +6513,7 @@ END_CASE(JSOP_ARRAYPUSH)
JS_ASSERT(cx->regs == &regs);
#ifdef JS_TRACER
if (regs.fp->hasImacropc() && cx->throwing) {
// Handle other exceptions as if they came from the imacro-calling pc.
// Handle exceptions as if they came from the imacro-calling pc.
regs.pc = regs.fp->imacropc();
regs.fp->clearImacropc();
atoms = script->atomMap.vector;

Просмотреть файл

@ -81,6 +81,7 @@
#include "jsstrinlines.h"
using namespace js;
using namespace js::gc;
static void iterator_finalize(JSContext *cx, JSObject *obj);
static void iterator_trace(JSTracer *trc, JSObject *obj);

Просмотреть файл

@ -363,7 +363,8 @@ ValueToUint16(JSContext *cx, const js::Value &v, uint16_t *out)
static inline int32
js_DoubleToECMAInt32(jsdouble d)
{
#if defined(__i386__) || defined(__i386)
#if defined(__i386__) || defined(__i386) || defined(__x86_64__) || \
defined(_M_IX86) || defined(_M_X64)
jsdpun du, duh, two32;
uint32 di_h, u_tmp, expon, shift_amount;
int32 mask32;

Просмотреть файл

@ -103,6 +103,7 @@
#include "jsautooplen.h"
using namespace js;
using namespace js::gc;
JS_FRIEND_DATA(const JSObjectMap) JSObjectMap::sharedNonNative(JSObjectMap::SHAPELESS);
@ -434,8 +435,7 @@ js_LeaveSharpObject(JSContext *cx, JSIdArray **idap)
static intN
gc_sharp_table_entry_marker(JSHashEntry *he, intN i, void *arg)
{
JS_CALL_OBJECT_TRACER((JSTracer *)arg, (JSObject *)he->key,
"sharp table entry");
MarkObject((JSTracer *)arg, *(JSObject *)he->key, "sharp table entry");
return JS_DHASH_NEXT;
}
@ -1389,7 +1389,7 @@ obj_hasOwnProperty(JSContext *cx, uintN argc, Value *vp)
}
JSBool
js_HasOwnPropertyHelper(JSContext *cx, JSLookupPropOp lookup, uintN argc,
js_HasOwnPropertyHelper(JSContext *cx, LookupPropOp lookup, uintN argc,
Value *vp)
{
jsid id;
@ -1420,7 +1420,7 @@ js_HasOwnPropertyHelper(JSContext *cx, JSLookupPropOp lookup, uintN argc,
}
JSBool
js_HasOwnProperty(JSContext *cx, JSLookupPropOp lookup, JSObject *obj, jsid id,
js_HasOwnProperty(JSContext *cx, LookupPropOp lookup, JSObject *obj, jsid id,
JSObject **objp, JSProperty **propp)
{
JSAutoResolveFlags rf(cx, JSRESOLVE_QUALIFIED | JSRESOLVE_DETECTING);
@ -1743,8 +1743,9 @@ js_GetOwnPropertyDescriptor(JSContext *cx, JSObject *obj, jsid id, Value *vp)
roots[1] = shape->setterValue();
}
JS_UNLOCK_OBJ(cx, pobj);
} else if (!pobj->getAttributes(cx, id, &attrs)) {
return false;
} else {
if (!pobj->getAttributes(cx, id, &attrs))
return false;
}
if (doGet && !obj->getProperty(cx, id, &roots[2]))
@ -1829,16 +1830,23 @@ obj_keys(JSContext *cx, uintN argc, Value *vp)
return JS_TRUE;
}
static JSBool
HasProperty(JSContext* cx, JSObject* obj, jsid id, Value* vp, JSBool* answerp)
static bool
HasProperty(JSContext* cx, JSObject* obj, jsid id, Value* vp, bool *foundp)
{
if (!JS_HasPropertyById(cx, obj, id, answerp))
return JS_FALSE;
if (!*answerp) {
if (!obj->hasProperty(cx, id, foundp, JSRESOLVE_QUALIFIED | JSRESOLVE_DETECTING))
return false;
if (!*foundp) {
vp->setUndefined();
return JS_TRUE;
return true;
}
return JS_GetPropertyById(cx, obj, id, Jsvalify(vp));
/*
* We must go through the method read barrier in case id is 'get' or 'set'.
* There is no obvious way to defer cloning a joined function object whose
* identity will be used by DefinePropertyOnObject, e.g., or reflected via
* js_GetOwnPropertyDescriptor, as the getter or setter callable object.
*/
return !!obj->getProperty(cx, id, vp);
}
PropDesc::PropDesc()
@ -1876,51 +1884,47 @@ PropDesc::initialize(JSContext* cx, jsid id, const Value &origval)
/* Start with the proper defaults. */
attrs = JSPROP_PERMANENT | JSPROP_READONLY;
JSBool hasProperty;
bool found;
/* 8.10.5 step 3 */
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.enumerableAtom), &v,
&hasProperty)) {
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.enumerableAtom), &v, &found))
return false;
}
if (hasProperty) {
if (found) {
hasEnumerable = JS_TRUE;
if (js_ValueToBoolean(v))
attrs |= JSPROP_ENUMERATE;
}
/* 8.10.5 step 4 */
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.configurableAtom), &v,
&hasProperty)) {
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.configurableAtom), &v, &found))
return false;
}
if (hasProperty) {
if (found) {
hasConfigurable = JS_TRUE;
if (js_ValueToBoolean(v))
attrs &= ~JSPROP_PERMANENT;
}
/* 8.10.5 step 5 */
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.valueAtom), &v, &hasProperty))
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.valueAtom), &v, &found))
return false;
if (hasProperty) {
if (found) {
hasValue = true;
value = v;
}
/* 8.10.6 step 6 */
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.writableAtom), &v, &hasProperty))
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.writableAtom), &v, &found))
return false;
if (hasProperty) {
if (found) {
hasWritable = JS_TRUE;
if (js_ValueToBoolean(v))
attrs &= ~JSPROP_READONLY;
}
/* 8.10.7 step 7 */
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.getAtom), &v, &hasProperty))
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.getAtom), &v, &found))
return false;
if (hasProperty) {
if (found) {
if ((v.isPrimitive() || !js_IsCallable(v)) && !v.isUndefined()) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_GET_SET_FIELD,
js_getter_str);
@ -1932,9 +1936,9 @@ PropDesc::initialize(JSContext* cx, jsid id, const Value &origval)
}
/* 8.10.7 step 8 */
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.setAtom), &v, &hasProperty))
if (!HasProperty(cx, desc, ATOM_TO_JSID(cx->runtime->atomState.setAtom), &v, &found))
return false;
if (hasProperty) {
if (found) {
if ((v.isPrimitive() || !js_IsCallable(v)) && !v.isUndefined()) {
JS_ReportErrorNumber(cx, js_GetErrorMessage, NULL, JSMSG_BAD_GET_SET_FIELD,
js_setter_str);

Просмотреть файл

@ -55,6 +55,7 @@
#include "jslock.h"
#include "jsvalue.h"
#include "jsvector.h"
#include "jscell.h"
namespace js {
@ -152,9 +153,6 @@ struct PropDesc {
return js::CastAsPropertyOp(setterObject());
}
static void traceDescriptorArray(JSTracer* trc, JSObject* obj);
static void finalizeDescriptorArray(JSContext* cx, JSObject* obj);
js::Value pd;
jsid id;
js::Value value, get, set;
@ -278,7 +276,7 @@ struct JSFunction;
* of Values for reserved and dynamic slots. If dslots is not null, dslots[-1]
* records the number of available slots.
*/
struct JSObject {
struct JSObject : js::gc::Cell {
/*
* TraceRecorder must be a friend because it generates code that
* manipulates JSObjects, which requires peeking under any encapsulation.
@ -979,6 +977,7 @@ struct JSObject {
const js::Value &privateSlotValue, JSContext *cx);
inline void finish(JSContext *cx);
JS_ALWAYS_INLINE void finalize(JSContext *cx, unsigned thindKind);
/*
* Like init, but also initializes map. The catch: proto must be the result
@ -1057,7 +1056,7 @@ struct JSObject {
void clear(JSContext *cx);
JSBool lookupProperty(JSContext *cx, jsid id, JSObject **objp, JSProperty **propp) {
JSLookupPropOp op = getOps()->lookupProperty;
js::LookupPropOp op = getOps()->lookupProperty;
return (op ? op : js_LookupProperty)(cx, this, id, objp, propp);
}
@ -1080,12 +1079,12 @@ struct JSObject {
}
JSBool getAttributes(JSContext *cx, jsid id, uintN *attrsp) {
JSAttributesOp op = getOps()->getAttributes;
js::AttributesOp op = getOps()->getAttributes;
return (op ? op : js_GetAttributes)(cx, this, id, attrsp);
}
JSBool setAttributes(JSContext *cx, jsid id, uintN *attrsp) {
JSAttributesOp op = getOps()->setAttributes;
js::AttributesOp op = getOps()->setAttributes;
return (op ? op : js_SetAttributes)(cx, this, id, attrsp);
}
@ -1100,7 +1099,7 @@ struct JSObject {
}
JSType typeOf(JSContext *cx) {
JSTypeOfOp op = getOps()->typeOf;
js::TypeOfOp op = getOps()->typeOf;
return (op ? op : js_TypeOf)(cx, this);
}
@ -1161,7 +1160,6 @@ struct JSObject {
};
JS_STATIC_ASSERT(offsetof(JSObject, fslots) % sizeof(js::Value) == 0);
JS_STATIC_ASSERT(sizeof(JSObject) % JS_GCTHING_ALIGN == 0);
#define JSSLOT_START(clasp) (((clasp)->flags & JSCLASS_HAS_PRIVATE) \
? JSSLOT_PRIVATE + 1 \
@ -1337,11 +1335,11 @@ extern void
js_TraceSharpMap(JSTracer *trc, JSSharpObjectMap *map);
extern JSBool
js_HasOwnPropertyHelper(JSContext *cx, JSLookupPropOp lookup, uintN argc,
js_HasOwnPropertyHelper(JSContext *cx, js::LookupPropOp lookup, uintN argc,
js::Value *vp);
extern JSBool
js_HasOwnProperty(JSContext *cx, JSLookupPropOp lookup, JSObject *obj, jsid id,
js_HasOwnProperty(JSContext *cx, js::LookupPropOp lookup, JSObject *obj, jsid id,
JSObject **objp, JSProperty **propp);
extern JSBool

Просмотреть файл

@ -60,6 +60,9 @@
#include "jsscopeinlines.h"
#include "jsstr.h"
#include "jsgcinlines.h"
#include "jsprobes.h"
inline void
JSObject::dropProperty(JSContext *cx, JSProperty *prop)
{
@ -115,6 +118,26 @@ JSObject::unbrand(JSContext *cx)
return true;
}
inline void
JSObject::finalize(JSContext *cx, unsigned thingKind)
{
JS_ASSERT(thingKind == js::gc::FINALIZE_OBJECT ||
thingKind == js::gc::FINALIZE_FUNCTION);
/* Cope with stillborn objects that have no map. */
if (!map)
return;
/* Finalize obj first, in case it needs map and slots. */
js::Class *clasp = getClass();
if (clasp->finalize)
clasp->finalize(cx, this);
js::Probes::finalizeObject(this);
finish(cx);
}
/*
* Property read barrier for deferred cloning of compiler-created function
* objects optimized as typically non-escaping, ad-hoc methods in obj.

Просмотреть файл

@ -66,6 +66,7 @@
#include "jsobjinlines.h"
using namespace js;
using namespace js::gc;
#ifdef _MSC_VER
#pragma warning(push)
@ -574,7 +575,10 @@ static JSBool IsNumChar(jschar c)
return ((c <= '9' && c >= '0') || c == '.' || c == '-' || c == '+' || c == 'e' || c == 'E');
}
static JSBool HandleData(JSContext *cx, JSONParser *jp, JSONDataType type);
static JSBool HandleDataString(JSContext *cx, JSONParser *jp);
static JSBool HandleDataKeyString(JSContext *cx, JSONParser *jp);
static JSBool HandleDataNumber(JSContext *cx, JSONParser *jp);
static JSBool HandleDataKeyword(JSContext *cx, JSONParser *jp);
static JSBool PopState(JSContext *cx, JSONParser *jp);
static bool
@ -712,11 +716,11 @@ js_FinishJSONParse(JSContext *cx, JSONParser *jp, const Value &reviver)
// strings because a closing quote triggers value processing.
if ((jp->statep - jp->stateStack) == 1) {
if (*jp->statep == JSON_PARSE_STATE_KEYWORD) {
early_ok = HandleData(cx, jp, JSON_DATA_KEYWORD);
early_ok = HandleDataKeyword(cx, jp);
if (early_ok)
PopState(cx, jp);
} else if (*jp->statep == JSON_PARSE_STATE_NUMBER) {
early_ok = HandleData(cx, jp, JSON_DATA_NUMBER);
early_ok = HandleDataNumber(cx, jp);
if (early_ok)
PopState(cx, jp);
}
@ -948,29 +952,36 @@ HandleKeyword(JSContext *cx, JSONParser *jp, const jschar *buf, uint32 len)
}
static JSBool
HandleData(JSContext *cx, JSONParser *jp, JSONDataType type)
HandleDataString(JSContext *cx, JSONParser *jp)
{
JSBool ok;
JSBool ok = HandleString(cx, jp, jp->buffer.begin(), jp->buffer.length());
if (ok)
jp->buffer.clear();
return ok;
}
switch (type) {
case JSON_DATA_STRING:
ok = HandleString(cx, jp, jp->buffer.begin(), jp->buffer.length());
break;
static JSBool
HandleDataKeyString(JSContext *cx, JSONParser *jp)
{
JSBool ok = jp->objectKey.append(jp->buffer.begin(), jp->buffer.end());
if (ok)
jp->buffer.clear();
return ok;
}
case JSON_DATA_KEYSTRING:
ok = jp->objectKey.append(jp->buffer.begin(), jp->buffer.end());
break;
case JSON_DATA_NUMBER:
ok = HandleNumber(cx, jp, jp->buffer.begin(), jp->buffer.length());
break;
default:
JS_ASSERT(type == JSON_DATA_KEYWORD);
ok = HandleKeyword(cx, jp, jp->buffer.begin(), jp->buffer.length());
break;
}
static JSBool
HandleDataNumber(JSContext *cx, JSONParser *jp)
{
JSBool ok = HandleNumber(cx, jp, jp->buffer.begin(), jp->buffer.length());
if (ok)
jp->buffer.clear();
return ok;
}
static JSBool
HandleDataKeyword(JSContext *cx, JSONParser *jp)
{
JSBool ok = HandleKeyword(cx, jp, jp->buffer.begin(), jp->buffer.length());
if (ok)
jp->buffer.clear();
return ok;
@ -1107,14 +1118,13 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
if (c == '"') {
if (!PopState(cx, jp))
return JS_FALSE;
JSONDataType jdt;
if (*jp->statep == JSON_PARSE_STATE_OBJECT_IN_PAIR) {
jdt = JSON_DATA_KEYSTRING;
if (!HandleDataKeyString(cx, jp))
return JS_FALSE;
} else {
jdt = JSON_DATA_STRING;
if (!HandleDataString(cx, jp))
return JS_FALSE;
}
if (!HandleData(cx, jp, jdt))
return JS_FALSE;
} else if (c == '\\') {
*jp->statep = JSON_PARSE_STATE_STRING_ESCAPE;
} else if (c <= 0x1F) {
@ -1184,7 +1194,7 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
if (!PopState(cx, jp))
return JS_FALSE;
if (!HandleData(cx, jp, JSON_DATA_KEYWORD))
if (!HandleDataKeyword(cx, jp))
return JS_FALSE;
}
break;
@ -1198,7 +1208,7 @@ js_ConsumeJSONText(JSContext *cx, JSONParser *jp, const jschar *data, uint32 len
i--;
if (!PopState(cx, jp))
return JS_FALSE;
if (!HandleData(cx, jp, JSON_DATA_NUMBER))
if (!HandleDataNumber(cx, jp))
return JS_FALSE;
}
break;

Просмотреть файл

@ -37,9 +37,7 @@
#ifndef json_h___
#define json_h___
/*
* JS JSON functions.
*/
#include "jsprvtd.h"
#include "jspubtd.h"
#include "jsvalue.h"
@ -107,13 +105,6 @@ enum JSONParserState {
JSON_PARSE_STATE_KEYWORD
};
enum JSONDataType {
JSON_DATA_STRING,
JSON_DATA_KEYSTRING,
JSON_DATA_NUMBER,
JSON_DATA_KEYWORD
};
struct JSONParser;
extern JSONParser *

Просмотреть файл

@ -82,6 +82,7 @@
#include "jsautooplen.h"
using namespace js;
using namespace js::gc;
/*
* Index limit must stay within 32 bits.

Просмотреть файл

@ -99,6 +99,7 @@
#endif
using namespace js;
using namespace js::gc;
/*
* Asserts to verify assumptions behind pn_ macros.
@ -314,7 +315,7 @@ Parser::trace(JSTracer *trc)
{
JSObjectBox *objbox = traceListHead;
while (objbox) {
JS_CALL_OBJECT_TRACER(trc, objbox->object, "parser.object");
MarkObject(trc, *objbox->object, "parser.object");
objbox = objbox->traceLink;
}
}
@ -3600,6 +3601,7 @@ BindVarOrConst(JSContext *cx, BindData *data, JSAtom *atom, JSTreeContext *tc)
if (stmt && stmt->type == STMT_WITH) {
data->fresh = false;
pn->pn_dflags |= PND_DEOPTIMIZED;
return true;
}
@ -8502,78 +8504,79 @@ Parser::primaryExpr(TokenKind tt, JSBool afterDot)
#endif
) && !(tc->flags & TCF_DECL_DESTRUCTURING)) {
JSStmtInfo *stmt = js_LexicalLookup(tc, pn->pn_atom, NULL);
if (!stmt || stmt->type != STMT_WITH) {
JSDefinition *dn;
JSAtomListElement *ale = tc->decls.lookup(pn->pn_atom);
if (ale) {
dn = ALE_DEFN(ale);
JSDefinition *dn;
JSAtomListElement *ale = tc->decls.lookup(pn->pn_atom);
if (ale) {
dn = ALE_DEFN(ale);
#if JS_HAS_BLOCK_SCOPE
/*
* Skip out-of-scope let bindings along an ALE list or hash
* chain. These can happen due to |let (x = x) x| block and
* expression bindings, where the x on the right of = comes
* from an outer scope. See bug 496532.
*/
while (dn->isLet() && !BlockIdInScope(dn->pn_blockid, tc)) {
do {
ale = ALE_NEXT(ale);
} while (ale && ALE_ATOM(ale) != pn->pn_atom);
if (!ale)
break;
dn = ALE_DEFN(ale);
}
#endif
/*
* Skip out-of-scope let bindings along an ALE list or hash
* chain. These can happen due to |let (x = x) x| block and
* expression bindings, where the x on the right of = comes
* from an outer scope. See bug 496532.
*/
while (dn->isLet() && !BlockIdInScope(dn->pn_blockid, tc)) {
do {
ale = ALE_NEXT(ale);
} while (ale && ALE_ATOM(ale) != pn->pn_atom);
if (!ale)
break;
dn = ALE_DEFN(ale);
}
#endif
}
if (ale) {
dn = ALE_DEFN(ale);
} else {
ale = tc->lexdeps.lookup(pn->pn_atom);
if (ale) {
dn = ALE_DEFN(ale);
} else {
ale = tc->lexdeps.lookup(pn->pn_atom);
if (ale) {
dn = ALE_DEFN(ale);
} else {
/*
* No definition before this use in any lexical scope.
* Add a mapping in tc->lexdeps from pn->pn_atom to a
* new node for the forward-referenced definition. This
* placeholder definition node will be adopted when we
* parse the real defining declaration form, or left as
* a free variable definition if we never see the real
* definition.
*/
ale = MakePlaceholder(pn, tc);
if (!ale)
return NULL;
dn = ALE_DEFN(ale);
/*
* No definition before this use in any lexical scope.
* Add a mapping in tc->lexdeps from pn->pn_atom to a
* new node for the forward-referenced definition. This
* placeholder definition node will be adopted when we
* parse the real defining declaration form, or left as
* a free variable definition if we never see the real
* definition.
*/
ale = MakePlaceholder(pn, tc);
if (!ale)
return NULL;
dn = ALE_DEFN(ale);
/*
* In case this is a forward reference to a function,
* we pessimistically set PND_FUNARG if the next token
* is not a left parenthesis.
*
* If the definition eventually parsed into dn is not a
* function, this flag won't hurt, and if we do parse a
* function with pn's name, then the PND_FUNARG flag is
* necessary for safe context->display-based optimiza-
* tion of the closure's static link.
*/
JS_ASSERT(PN_TYPE(dn) == TOK_NAME);
JS_ASSERT(dn->pn_op == JSOP_NOP);
if (tokenStream.peekToken() != TOK_LP)
dn->pn_dflags |= PND_FUNARG;
}
/*
* In case this is a forward reference to a function,
* we pessimistically set PND_FUNARG if the next token
* is not a left parenthesis.
*
* If the definition eventually parsed into dn is not a
* function, this flag won't hurt, and if we do parse a
* function with pn's name, then the PND_FUNARG flag is
* necessary for safe context->display-based optimiza-
* tion of the closure's static link.
*/
JS_ASSERT(PN_TYPE(dn) == TOK_NAME);
JS_ASSERT(dn->pn_op == JSOP_NOP);
if (tokenStream.peekToken() != TOK_LP)
dn->pn_dflags |= PND_FUNARG;
}
JS_ASSERT(dn->pn_defn);
LinkUseToDef(pn, dn, tc);
/* Here we handle the backward function reference case. */
if (tokenStream.peekToken() != TOK_LP)
dn->pn_dflags |= PND_FUNARG;
pn->pn_dflags |= (dn->pn_dflags & PND_FUNARG);
}
JS_ASSERT(dn->pn_defn);
LinkUseToDef(pn, dn, tc);
/* Here we handle the backward function reference case. */
if (tokenStream.peekToken() != TOK_LP)
dn->pn_dflags |= PND_FUNARG;
pn->pn_dflags |= (dn->pn_dflags & PND_FUNARG);
if (stmt && stmt->type == STMT_WITH)
pn->pn_dflags |= PND_DEOPTIMIZED;
}
#if JS_HAS_XML_SUPPORT

Просмотреть файл

@ -119,7 +119,7 @@ jsprobes_jsvaltovoid(JSContext *cx, const js::Value &argval)
//return (void *)argval.toDouble();
}
return argval.asGCThing();
return argval.toGCThing();
}
#endif

Просмотреть файл

@ -51,6 +51,7 @@
#include "jsobjinlines.h"
using namespace js;
using namespace js::gc;
namespace js {
@ -220,7 +221,7 @@ JSProxyHandler::iterate(JSContext *cx, JSObject *proxy, uintN flags, Value *vp)
return false;
return EnumeratedIdVectorToIterator(cx, proxy, flags, props, vp);
}
JSString *
JSProxyHandler::obj_toString(JSContext *cx, JSObject *proxy)
{
@ -323,7 +324,7 @@ DerivedTrap(JSContext *cx, JSObject *handler, JSAtom *atom, Value *fvalp)
atom == ATOM(set) ||
atom == ATOM(enumerateOwn) ||
atom == ATOM(iterate));
return GetTrap(cx, handler, atom, fvalp);
}
@ -1098,8 +1099,8 @@ proxy_create(JSContext *cx, uintN argc, Value *vp)
"create", "0", "s");
return false;
}
JSObject *handler;
if (!(handler = NonNullObject(cx, vp[2])))
JSObject *handler = NonNullObject(cx, vp[2]);
if (!handler)
return false;
JSObject *proto, *parent = NULL;
if (argc > 1 && vp[3].isObject()) {
@ -1128,8 +1129,8 @@ proxy_createFunction(JSContext *cx, uintN argc, Value *vp)
"createFunction", "1", "");
return false;
}
JSObject *handler;
if (!(handler = NonNullObject(cx, vp[2])))
JSObject *handler = NonNullObject(cx, vp[2]);
if (!handler)
return false;
JSObject *proto, *parent;
parent = vp[0].toObject().getParent();
@ -1167,8 +1168,8 @@ proxy_isTrapping(JSContext *cx, uintN argc, Value *vp)
"isTrapping", "0", "s");
return false;
}
JSObject *obj;
if (!(obj = NonNullObject(cx, vp[2])))
JSObject *obj = NonNullObject(cx, vp[2]);
if (!obj)
return false;
vp->setBoolean(obj->isProxy());
return true;
@ -1182,8 +1183,8 @@ proxy_fix(JSContext *cx, uintN argc, Value *vp)
"fix", "0", "s");
return false;
}
JSObject *obj;
if (!(obj = NonNullObject(cx, vp[2])))
JSObject *obj = NonNullObject(cx, vp[2]);
if (!obj)
return false;
if (obj->isProxy()) {
JSBool flag;
@ -1313,8 +1314,8 @@ FixProxy(JSContext *cx, JSObject *proxy, JSBool *bp)
return false;
}
JSObject *props;
if (!(props = NonNullObject(cx, tvr.value())))
JSObject *props = NonNullObject(cx, tvr.value());
if (!props)
return false;
JSObject *proto = proxy->getProto();
@ -1338,7 +1339,7 @@ FixProxy(JSContext *cx, JSObject *proxy, JSBool *bp)
return false;
}
/* Trade spaces between the newborn object and the proxy. */
/* Trade contents between the newborn object and the proxy. */
proxy->swap(newborn);
/* The GC will dispose of the proxy object. */

Просмотреть файл

@ -65,6 +65,7 @@ using namespace nanojit;
#endif
using namespace js;
using namespace js::gc;
/*
* RegExpStatics allocates memory -- in order to keep the statics stored
@ -522,7 +523,7 @@ regexp_trace(JSTracer *trc, JSObject *obj)
{
RegExp *re = RegExp::extractFrom(obj);
if (re && re->getSource())
JS_CALL_STRING_TRACER(trc, re->getSource(), "source");
MarkString(trc, re->getSource(), "source");
}
static JSBool

Просмотреть файл

@ -67,6 +67,7 @@
#include "jsscopeinlines.h"
using namespace js;
using namespace js::gc;
uint32
js_GenerateShape(JSContext *cx, bool gcLocked)
@ -1361,16 +1362,16 @@ Shape::trace(JSTracer *trc) const
if (attrs & (JSPROP_GETTER | JSPROP_SETTER)) {
if ((attrs & JSPROP_GETTER) && rawGetter) {
JS_SET_TRACING_DETAILS(trc, PrintPropertyGetterOrSetter, this, 0);
Mark(trc, getterObject(), JSTRACE_OBJECT);
Mark(trc, getterObject());
}
if ((attrs & JSPROP_SETTER) && rawSetter) {
JS_SET_TRACING_DETAILS(trc, PrintPropertyGetterOrSetter, this, 1);
Mark(trc, setterObject(), JSTRACE_OBJECT);
Mark(trc, setterObject());
}
}
if (isMethod()) {
JS_SET_TRACING_DETAILS(trc, PrintPropertyMethod, this, 0);
Mark(trc, &methodObject(), JSTRACE_OBJECT);
Mark(trc, &methodObject());
}
}

Просмотреть файл

@ -291,6 +291,7 @@ struct Shape : public JSObjectMap
friend struct ::JSObject;
friend struct ::JSFunction;
friend class js::PropertyTree;
friend bool HasUnreachableGCThings(TreeFragment *f);
protected:
mutable js::PropertyTable *table;

Просмотреть файл

@ -71,6 +71,7 @@
#include "jsscriptinlines.h"
using namespace js;
using namespace js::gc;
static const jsbytecode emptyScriptCode[] = {JSOP_STOP, SRC_NULL};
@ -1286,7 +1287,7 @@ js_TraceScript(JSTracer *trc, JSScript *script)
--i;
if (objarray->vector[i]) {
JS_SET_TRACING_INDEX(trc, "objects", i);
Mark(trc, objarray->vector[i], JSTRACE_OBJECT);
Mark(trc, objarray->vector[i]);
}
} while (i != 0);
}
@ -1298,7 +1299,7 @@ js_TraceScript(JSTracer *trc, JSScript *script)
--i;
if (objarray->vector[i]) {
JS_SET_TRACING_INDEX(trc, "regexps", i);
Mark(trc, objarray->vector[i], JSTRACE_OBJECT);
Mark(trc, objarray->vector[i]);
}
} while (i != 0);
}
@ -1310,7 +1311,7 @@ js_TraceScript(JSTracer *trc, JSScript *script)
if (script->u.object) {
JS_SET_TRACING_NAME(trc, "object");
Mark(trc, script->u.object, JSTRACE_OBJECT);
Mark(trc, script->u.object);
}
if (IS_GC_MARKING_TRACER(trc) && script->filename)

Просмотреть файл

@ -83,10 +83,16 @@
#include "jscntxtinlines.h"
using namespace js;
using namespace js::gc;
JS_STATIC_ASSERT(size_t(JSString::MAX_LENGTH) <= size_t(JSVAL_INT_MAX));
JS_STATIC_ASSERT(JSString::MAX_LENGTH <= JSVAL_INT_MAX);
JS_STATIC_ASSERT(JS_EXTERNAL_STRING_LIMIT == 8);
JSStringFinalizeOp str_finalizers[JS_EXTERNAL_STRING_LIMIT] = {
NULL, NULL, NULL, NULL, NULL, NULL, NULL, NULL
};
const jschar *
js_GetStringChars(JSContext *cx, JSString *str)
{
@ -3120,7 +3126,7 @@ static JSFunctionSpec string_methods[] = {
#pragma pack(push, 8)
#endif
JSString JSString::unitStringTable[]
const JSString JSString::unitStringTable[]
#ifdef __GNUC__
__attribute__ ((aligned (8)))
#endif
@ -3146,7 +3152,7 @@ __attribute__ ((aligned (8)))
#define R TO_SMALL_CHAR
JSString::SmallChar JSString::toSmallChar[] = { R7(0) };
const JSString::SmallChar JSString::toSmallChar[] = { R7(0) };
#undef R
@ -3159,7 +3165,7 @@ JSString::SmallChar JSString::toSmallChar[] = { R7(0) };
'A' - 36))
#define R FROM_SMALL_CHAR
jschar JSString::fromSmallChar[] = { R6(0) };
const jschar JSString::fromSmallChar[] = { R6(0) };
#undef R
@ -3180,7 +3186,7 @@ jschar JSString::fromSmallChar[] = { R6(0) };
#pragma pack(push, 8)
#endif
JSString JSString::length2StringTable[]
const JSString JSString::length2StringTable[]
#ifdef __GNUC__
__attribute__ ((aligned (8)))
#endif
@ -3222,7 +3228,7 @@ JS_STATIC_ASSERT(100 + (1 << 7) + (1 << 4) + (1 << 3) + (1 << 2) == 256);
#pragma pack(push, 8)
#endif
JSString JSString::hundredStringTable[]
const JSString JSString::hundredStringTable[]
#ifdef __GNUC__
__attribute__ ((aligned (8)))
#endif
@ -3240,7 +3246,7 @@ __attribute__ ((aligned (8)))
TO_SMALL_CHAR(((c) % 10) + '0') : \
JSString::hundredStringTable + ((c) - 100))
JSString *JSString::intStringTable[] = { R8(0) };
const JSString *const JSString::intStringTable[] = { R8(0) };
#undef R
@ -3485,7 +3491,7 @@ js_NewDependentString(JSContext *cx, JSString *base, size_t start,
jschar *chars = base->chars() + start;
if (length == 1 && *chars < UNIT_STRING_LIMIT)
return &JSString::unitStringTable[*chars];
return const_cast<JSString *>(&JSString::unitStringTable[*chars]);
/* Try to avoid long chains of dependent strings. */
while (base->isDependent())
@ -4212,7 +4218,7 @@ DeflatedStringCache::sweep(JSContext *cx)
for (Map::Enum e(map); !e.empty(); e.popFront()) {
JSString *str = e.front().key;
if (js_IsAboutToBeFinalized(str)) {
if (IsAboutToBeFinalized(str)) {
char *bytes = e.front().value;
e.removeFront();
@ -4351,7 +4357,7 @@ js_GetStringBytes(JSContext *cx, JSString *str)
rt = cx->runtime;
} else {
/* JS_GetStringBytes calls us with null cx. */
rt = js_GetGCThingRuntime(str);
rt = GetGCThingRuntime(str);
}
return rt->deflatedStringCache->getBytes(cx, str);

Просмотреть файл

@ -55,6 +55,7 @@
#include "jslock.h"
#include "jsobj.h"
#include "jsvalue.h"
#include "jscell.h"
#define JSSTRING_BIT(n) ((size_t)1 << (n))
#define JSSTRING_BITMASK(n) (JSSTRING_BIT(n) - 1)
@ -67,6 +68,8 @@ enum {
NUM_HUNDRED_STRINGS = 156U
};
extern JSStringFinalizeOp str_finalizers[8];
extern jschar *
js_GetDependentStringChars(JSString *str);
@ -128,7 +131,7 @@ struct JSString {
friend JSAtom *
js_AtomizeString(JSContext *cx, JSString *str, uintN flags);
public:
/*
* Not private because we want to be able to use static
* initializers for them. Don't use these directly!
@ -196,7 +199,14 @@ struct JSString {
return (mLengthAndFlags & flag) != 0;
}
public:
inline js::gc::Cell *asCell() {
return reinterpret_cast<js::gc::Cell *>(this);
}
inline js::gc::FreeCell *asFreeCell() {
return reinterpret_cast<js::gc::FreeCell *>(this);
}
/*
* Generous but sane length bound; the "-1" is there for comptibility with
* OOM tests.
@ -266,6 +276,7 @@ struct JSString {
/* Specific flat string initializer and accessor methods. */
JS_ALWAYS_INLINE void initFlat(jschar *chars, size_t length) {
JS_ASSERT(length <= MAX_LENGTH);
JS_ASSERT(!isStatic(this));
e.mBase = NULL;
e.mCapacity = 0;
mLengthAndFlags = (length << FLAGS_LENGTH_SHIFT) | FLAT;
@ -274,6 +285,7 @@ struct JSString {
JS_ALWAYS_INLINE void initFlatMutable(jschar *chars, size_t length, size_t cap) {
JS_ASSERT(length <= MAX_LENGTH);
JS_ASSERT(!isStatic(this));
e.mBase = NULL;
e.mCapacity = cap;
mLengthAndFlags = (length << FLAGS_LENGTH_SHIFT) | FLAT | MUTABLE;
@ -324,6 +336,7 @@ struct JSString {
*/
inline void flatSetAtomized() {
JS_ASSERT(isFlat());
JS_ASSERT(!isStatic(this));
JS_ATOMIC_SET_MASK((jsword *)&mLengthAndFlags, ATOMIZED);
}
@ -335,7 +348,13 @@ struct JSString {
inline void flatClearMutable() {
JS_ASSERT(isFlat());
mLengthAndFlags &= ~MUTABLE;
/*
* We cannot eliminate the flag check before writing to mLengthAndFlags as
* static strings may reside in write-protected memory. See bug 599481.
*/
if (mLengthAndFlags & MUTABLE)
mLengthAndFlags &= ~MUTABLE;
}
/*
@ -344,6 +363,7 @@ struct JSString {
*/
inline void initDependent(JSString *bstr, jschar *chars, size_t len) {
JS_ASSERT(len <= MAX_LENGTH);
JS_ASSERT(!isStatic(this));
e.mParent = NULL;
mChars = chars;
mLengthAndFlags = DEPENDENT | (len << FLAGS_LENGTH_SHIFT);
@ -368,6 +388,7 @@ struct JSString {
inline void initTopNode(JSString *left, JSString *right, size_t len,
JSRopeBufferInfo *buf) {
JS_ASSERT(left->length() + right->length() <= MAX_LENGTH);
JS_ASSERT(!isStatic(this));
mLengthAndFlags = TOP_NODE | (len << FLAGS_LENGTH_SHIFT);
mLeft = left;
e.mRight = right;
@ -507,16 +528,16 @@ struct JSString {
static const SmallChar INVALID_SMALL_CHAR = -1;
static jschar fromSmallChar[];
static SmallChar toSmallChar[];
static JSString unitStringTable[];
static JSString length2StringTable[];
static JSString hundredStringTable[];
static const jschar fromSmallChar[];
static const SmallChar toSmallChar[];
static const JSString unitStringTable[];
static const JSString length2StringTable[];
static const JSString hundredStringTable[];
/*
* Since int strings can be unit strings, length-2 strings, or hundred
* strings, we keep a table to map from integer to the correct string.
*/
static JSString *intStringTable[];
static const JSString *const intStringTable[];
static const char deflatedIntStringTable[];
static const char deflatedUnitStringTable[];
static const char deflatedLength2StringTable[];
@ -525,6 +546,8 @@ struct JSString {
static JSString *getUnitString(JSContext *cx, JSString *str, size_t index);
static JSString *length2String(jschar c1, jschar c2);
static JSString *intString(jsint i);
JS_ALWAYS_INLINE void finalize(JSContext *cx, unsigned thingKind);
};
/*
@ -532,7 +555,7 @@ struct JSString {
* mallocing the string buffer for a small string. We keep 2 string headers'
* worth of space in short strings so that more strings can be stored this way.
*/
struct JSShortString {
struct JSShortString : js::gc::Cell {
JSString mHeader;
JSString mDummy;
@ -561,6 +584,8 @@ struct JSShortString {
static inline bool fitsIntoShortString(size_t length) {
return length <= MAX_SHORT_STRING_LENGTH;
}
JS_ALWAYS_INLINE void finalize(JSContext *cx, unsigned thingKind);
};
/*
@ -707,8 +732,6 @@ JS_STATIC_ASSERT(JSString::TOP_NODE & JSString::ROPE_BIT);
JS_STATIC_ASSERT(((JSString::MAX_LENGTH << JSString::FLAGS_LENGTH_SHIFT) >>
JSString::FLAGS_LENGTH_SHIFT) == JSString::MAX_LENGTH);
JS_STATIC_ASSERT(sizeof(JSString) % JS_GCTHING_ALIGN == 0);
extern const jschar *
js_GetStringChars(JSContext *cx, JSString *str);

Просмотреть файл

@ -46,7 +46,7 @@ inline JSString *
JSString::unitString(jschar c)
{
JS_ASSERT(c < UNIT_STRING_LIMIT);
return &unitStringTable[c];
return const_cast<JSString *>(&unitStringTable[c]);
}
inline JSString *
@ -64,7 +64,8 @@ JSString::length2String(jschar c1, jschar c2)
{
JS_ASSERT(fitsInSmallChar(c1));
JS_ASSERT(fitsInSmallChar(c2));
return &length2StringTable[(((size_t)toSmallChar[c1]) << 6) + toSmallChar[c2]];
return const_cast<JSString *>
(&length2StringTable[(((size_t)toSmallChar[c1]) << 6) + toSmallChar[c2]]);
}
inline JSString *
@ -72,7 +73,50 @@ JSString::intString(jsint i)
{
jsuint u = jsuint(i);
JS_ASSERT(u < INT_STRING_LIMIT);
return JSString::intStringTable[u];
return const_cast<JSString *>(JSString::intStringTable[u]);
}
inline void
JSString::finalize(JSContext *cx, unsigned thingKind) {
if (JS_LIKELY(thingKind == js::gc::FINALIZE_STRING)) {
JS_ASSERT(!JSString::isStatic(this));
JS_RUNTIME_UNMETER(cx->runtime, liveStrings);
if (isDependent()) {
JS_ASSERT(dependentBase());
JS_RUNTIME_UNMETER(cx->runtime, liveDependentStrings);
} else if (isFlat()) {
/*
* flatChars for stillborn string is null, but cx->free checks
* for a null pointer on its own.
*/
cx->free(flatChars());
} else if (isTopNode()) {
cx->free(topNodeBuffer());
}
} else {
unsigned type = thingKind - js::gc::FINALIZE_EXTERNAL_STRING0;
JS_ASSERT(type < JS_ARRAY_LENGTH(str_finalizers));
JS_ASSERT(!isStatic(this));
JS_ASSERT(isFlat());
JS_RUNTIME_UNMETER(cx->runtime, liveStrings);
/* A stillborn string has null chars. */
jschar *chars = flatChars();
if (!chars)
return;
JSStringFinalizeOp finalizer = str_finalizers[type];
if (finalizer)
finalizer(cx, this);
}
}
inline void
JSShortString::finalize(JSContext *cx, unsigned thingKind)
{
JS_ASSERT(js::gc::FINALIZE_SHORT_STRING == thingKind);
JS_ASSERT(!JSString::isStatic(header()));
JS_ASSERT(header()->isFlat());
JS_RUNTIME_UNMETER(cx->runtime, liveStrings);
}
inline

Просмотреть файл

@ -103,6 +103,7 @@
namespace nanojit {
using namespace js;
using namespace js::gc;
/* Implement embedder-specific nanojit members. */
@ -2297,7 +2298,7 @@ FlushJITCache(JSContext *cx)
}
static void
TrashTree(JSContext* cx, TreeFragment* f);
TrashTree(TreeFragment* f);
template <class T>
static T&
@ -2536,10 +2537,10 @@ TraceRecorder::~TraceRecorder()
JS_ASSERT(traceMonitor->recorder != this);
if (trashSelf)
TrashTree(cx, fragment->root);
TrashTree(fragment->root);
for (unsigned int i = 0; i < whichTreesToTrash.length(); i++)
TrashTree(cx, whichTreesToTrash[i]);
TrashTree(whichTreesToTrash[i]);
/* Purge the tempAlloc used during recording. */
tempAlloc().reset();
@ -2614,7 +2615,7 @@ TraceRecorder::finishAbort(const char* reason)
* Otherwise, we may be throwing away another recorder's valid side exits.
*/
if (fragment->root == fragment) {
TrashTree(cx, fragment->toTreeFragment());
TrashTree(fragment->toTreeFragment());
} else {
JS_ASSERT(numSideExitsBefore <= fragment->root->sideExits.length());
fragment->root->sideExits.setLength(numSideExitsBefore);
@ -2920,46 +2921,67 @@ TraceMonitor::flush()
needFlush = JS_FALSE;
}
static inline void
MarkTree(JSTracer* trc, TreeFragment *f)
inline bool
HasUnreachableGCThings(TreeFragment *f)
{
/*
* We do not check here for dead scripts as JSScript is not a GC thing.
* Instead PurgeScriptFragments is used to remove dead script fragments.
* See bug 584860.
*/
if (IsAboutToBeFinalized(f->globalObj))
return true;
Value* vp = f->gcthings.data();
unsigned len = f->gcthings.length();
while (len--) {
for (unsigned len = f->gcthings.length(); len; --len) {
Value &v = *vp++;
JS_SET_TRACING_NAME(trc, "jitgcthing");
JS_ASSERT(v.isMarkable());
Mark(trc, v.asGCThing(), v.gcKind());
if (IsAboutToBeFinalized(v.toGCThing()))
return true;
}
const Shape** shapep = f->shapes.data();
len = f->shapes.length();
while (len--) {
for (unsigned len = f->shapes.length(); len; --len) {
const Shape* shape = *shapep++;
shape->trace(trc);
if (!shape->marked())
return true;
}
return false;
}
void
TraceMonitor::mark(JSTracer* trc)
TraceMonitor::sweep()
{
if (!trc->context->runtime->gcFlushCodeCaches) {
for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
TreeFragment* f = vmfragments[i];
while (f) {
if (f->code())
MarkTree(trc, f);
TreeFragment* peer = f->peer;
while (peer) {
if (peer->code())
MarkTree(trc, peer);
peer = peer->peer;
}
f = f->next;
JS_ASSERT(!ontrace());
debug_only_print0(LC_TMTracer, "Purging fragments with dead things");
for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
TreeFragment** fragp = &vmfragments[i];
while (TreeFragment* frag = *fragp) {
TreeFragment* peer = frag;
do {
if (HasUnreachableGCThings(peer))
break;
peer = peer->peer;
} while (peer);
if (peer) {
debug_only_printf(LC_TMTracer,
"TreeFragment peer %p has dead gc thing."
"Disconnecting tree %p with ip %p\n",
(void *) peer, (void *) frag, frag->ip);
JS_ASSERT(frag->root == frag);
*fragp = frag->next;
do {
verbose_only( FragProfiling_FragFinalizer(frag, this); )
TrashTree(frag);
frag = frag->peer;
} while (frag);
} else {
fragp = &frag->next;
}
}
if (recorder)
MarkTree(trc, recorder->getTree());
}
if (recorder && HasUnreachableGCThings(recorder->getTree()))
recorder->finishAbort("dead GC things");
}
/*
@ -5671,7 +5693,7 @@ TraceRecorder::startRecorder(JSContext* cx, VMSideExit* anchor, VMFragment* f,
}
static void
TrashTree(JSContext* cx, TreeFragment* f)
TrashTree(TreeFragment* f)
{
JS_ASSERT(f == f->root);
debug_only_printf(LC_TMTreeVis, "TREEVIS TRASH FRAG=%p\n", (void*)f);
@ -5684,11 +5706,11 @@ TrashTree(JSContext* cx, TreeFragment* f)
TreeFragment** data = f->dependentTrees.data();
unsigned length = f->dependentTrees.length();
for (unsigned n = 0; n < length; ++n)
TrashTree(cx, data[n]);
TrashTree(data[n]);
data = f->linkedTrees.data();
length = f->linkedTrees.length();
for (unsigned n = 0; n < length; ++n)
TrashTree(cx, data[n]);
TrashTree(data[n]);
}
static void
@ -5900,7 +5922,7 @@ AttemptToStabilizeTree(JSContext* cx, JSObject* globalObj, VMSideExit* exit, jsb
return false;
} else if (consensus == TypeConsensus_Undemotes) {
/* The original tree is unconnectable, so trash it. */
TrashTree(cx, peer);
TrashTree(peer);
return false;
}
@ -7812,6 +7834,11 @@ PurgeScriptFragments(JSContext* cx, JSScript* script)
"Purging fragments for JSScript %p.\n", (void*)script);
TraceMonitor* tm = &JS_TRACE_MONITOR(cx);
/* A recorder script is being evaluated and can not be destroyed or GC-ed. */
JS_ASSERT_IF(tm->recorder,
JS_UPTRDIFF(tm->recorder->getTree()->ip, script->code) >= script->length);
for (size_t i = 0; i < FRAGMENT_TABLE_SIZE; ++i) {
TreeFragment** fragp = &tm->vmfragments[i];
while (TreeFragment* frag = *fragp) {
@ -7827,7 +7854,7 @@ PurgeScriptFragments(JSContext* cx, JSScript* script)
*fragp = frag->next;
do {
verbose_only( FragProfiling_FragFinalizer(frag, tm); )
TrashTree(cx, frag);
TrashTree(frag);
} while ((frag = frag->peer) != NULL);
continue;
}
@ -10374,7 +10401,8 @@ functionProbe(JSContext *cx, JSFunction *fun, JSBool enter)
return true;
}
JS_DEFINE_CALLINFO_3(static, BOOL, functionProbe, CONTEXT, FUNCTION, BOOL, 0, 0)
JS_DEFINE_CALLINFO_3(static, BOOL, functionProbe, CONTEXT, FUNCTION, BOOL,
0, ACCSET_STORE_ANY)
#endif
JS_REQUIRES_STACK AbortableRecordingStatus

Просмотреть файл

@ -1424,8 +1424,9 @@ class TraceRecorder
bool &blacklist);
friend void AbortRecording(JSContext*, const char*);
friend class BoxArg;
friend void TraceMonitor::sweep();
public:
public:
static bool JS_REQUIRES_STACK
startRecorder(JSContext*, VMSideExit*, VMFragment*,
unsigned stackSlots, unsigned ngslots, JSValueType* typeMap,

Просмотреть файл

@ -64,6 +64,7 @@
#include "jsobjinlines.h"
using namespace js;
using namespace js::gc;
/*
* ArrayBuffer
@ -306,7 +307,7 @@ TypedArray::obj_trace(JSTracer *trc, JSObject *obj)
{
TypedArray *tarray = fromJSObject(obj);
JS_ASSERT(tarray);
JS_CALL_OBJECT_TRACER(trc, tarray->bufferJS, "typedarray.buffer");
MarkObject(trc, *tarray->bufferJS, "typedarray.buffer");
}
JSBool

Просмотреть файл

@ -600,7 +600,7 @@ class Value
}
JS_ALWAYS_INLINE
void *asGCThing() const {
void *toGCThing() const {
JS_ASSERT(isGCThing());
return JSVAL_TO_GCTHING_IMPL(data);
}
@ -881,6 +881,19 @@ typedef JSBool
(* StrictPropertyIdOp)(JSContext *cx, JSObject *obj, jsid id, Value *vp, JSBool strict);
typedef JSBool
(* CallOp)(JSContext *cx, uintN argc, Value *vp);
typedef JSBool
(* LookupPropOp)(JSContext *cx, JSObject *obj, jsid id, JSObject **objp,
JSProperty **propp);
typedef JSBool
(* AttributesOp)(JSContext *cx, JSObject *obj, jsid id, uintN *attrsp);
typedef JSType
(* TypeOfOp)(JSContext *cx, JSObject *obj);
typedef void
(* TraceOp)(JSTracer *trc, JSObject *obj);
typedef JSObject *
(* ObjectOp)(JSContext *cx, JSObject *obj);
typedef void
(* FinalizeOp)(JSContext *cx, JSObject *obj);
class AutoIdVector;
@ -959,19 +972,19 @@ struct ClassExtension {
#define JS_NULL_CLASS_EXT {NULL,NULL,NULL,NULL,NULL}
struct ObjectOps {
JSLookupPropOp lookupProperty;
js::LookupPropOp lookupProperty;
js::DefinePropOp defineProperty;
js::PropertyIdOp getProperty;
js::StrictPropertyIdOp setProperty;
JSAttributesOp getAttributes;
JSAttributesOp setAttributes;
js::AttributesOp getAttributes;
js::AttributesOp setAttributes;
js::StrictPropertyIdOp deleteProperty;
js::NewEnumerateOp enumerate;
JSTypeOfOp typeOf;
JSTraceOp trace;
js::TypeOfOp typeOf;
js::TraceOp trace;
js::FixOp fix;
JSObjectOp thisObject;
JSFinalizeOp clear;
js::ObjectOp thisObject;
js::FinalizeOp clear;
};
#define JS_NULL_OBJECT_OPS {NULL,NULL,NULL,NULL,NULL,NULL, NULL,NULL,NULL,NULL,NULL,NULL}

Просмотреть файл

@ -50,10 +50,12 @@
#ifdef JS_METHODJIT
# include "assembler/jit/ExecutableAllocator.h"
#endif
#include "jscompartment.h"
#include "jsobjinlines.h"
using namespace js;
using namespace js::gc;
static int sWrapperFamily = 0;
@ -256,7 +258,7 @@ JSWrapper::fun_toString(JSContext *cx, JSObject *wrapper, uintN indent)
void
JSWrapper::trace(JSTracer *trc, JSObject *wrapper)
{
JS_CALL_OBJECT_TRACER(trc, wrappedObject(wrapper), "wrappedObject");
MarkObject(trc, *wrappedObject(wrapper), "wrappedObject");
}
bool
@ -294,244 +296,6 @@ TransparentObjectWrapper(JSContext *cx, JSObject *obj, JSObject *wrappedProto, u
}
JSCompartment::JSCompartment(JSRuntime *rt)
: rt(rt), principals(NULL), data(NULL), marked(false), debugMode(false)
{
JS_INIT_CLIST(&scripts);
}
JSCompartment::~JSCompartment()
{
}
bool
JSCompartment::init()
{
return crossCompartmentWrappers.init();
}
bool
JSCompartment::wrap(JSContext *cx, Value *vp)
{
JS_ASSERT(cx->compartment == this);
uintN flags = 0;
JS_CHECK_RECURSION(cx, return false);
/* Only GC things have to be wrapped or copied. */
if (!vp->isMarkable())
return true;
/* Static strings do not have to be wrapped. */
if (vp->isString() && JSString::isStatic(vp->toString()))
return true;
/* Unwrap incoming objects. */
if (vp->isObject()) {
JSObject *obj = &vp->toObject();
/* If the object is already in this compartment, we are done. */
if (obj->getCompartment(cx) == this)
return true;
/* Don't unwrap an outer window proxy. */
if (!obj->getClass()->ext.innerObject) {
obj = vp->toObject().unwrap(&flags);
OBJ_TO_OUTER_OBJECT(cx, obj);
if (!obj)
return false;
vp->setObject(*obj);
}
/* If the wrapped object is already in this compartment, we are done. */
if (obj->getCompartment(cx) == this)
return true;
}
/* If we already have a wrapper for this value, use it. */
if (WrapperMap::Ptr p = crossCompartmentWrappers.lookup(*vp)) {
*vp = p->value;
return true;
}
if (vp->isString()) {
Value orig = *vp;
JSString *str = vp->toString();
JSString *wrapped = js_NewStringCopyN(cx, str->chars(), str->length());
if (!wrapped)
return false;
vp->setString(wrapped);
return crossCompartmentWrappers.put(orig, *vp);
}
JSObject *obj = &vp->toObject();
/*
* Recurse to wrap the prototype. Long prototype chains will run out of
* stack, causing an error in CHECK_RECURSE.
*
* Wrapping the proto before creating the new wrapper and adding it to the
* cache helps avoid leaving a bad entry in the cache on OOM. But note that
* if we wrapped both proto and parent, we would get infinite recursion
* here (since Object.prototype->parent->proto leads to Object.prototype
* itself).
*/
JSObject *proto = obj->getProto();
if (!wrap(cx, &proto))
return false;
/*
* We hand in the original wrapped object into the wrap hook to allow
* the wrap hook to reason over what wrappers are currently applied
* to the object.
*/
JSObject *wrapper = cx->runtime->wrapObjectCallback(cx, obj, proto, flags);
if (!wrapper)
return false;
wrapper->setProto(proto);
vp->setObject(*wrapper);
if (!crossCompartmentWrappers.put(wrapper->getProxyPrivate(), *vp))
return false;
/*
* Wrappers should really be parented to the wrapped parent of the wrapped
* object, but in that case a wrapped global object would have a NULL
* parent without being a proper global object (JSCLASS_IS_GLOBAL). Instead,
* we parent all wrappers to the global object in their home compartment.
* This loses us some transparency, and is generally very cheesy.
*/
JSObject *global;
if (cx->hasfp()) {
global = cx->fp()->scopeChain().getGlobal();
} else {
global = cx->globalObject;
OBJ_TO_INNER_OBJECT(cx, global);
if (!global)
return false;
}
wrapper->setParent(global);
return true;
}
bool
JSCompartment::wrap(JSContext *cx, JSString **strp)
{
AutoValueRooter tvr(cx, StringValue(*strp));
if (!wrap(cx, tvr.addr()))
return false;
*strp = tvr.value().toString();
return true;
}
bool
JSCompartment::wrap(JSContext *cx, JSObject **objp)
{
if (!*objp)
return true;
AutoValueRooter tvr(cx, ObjectValue(**objp));
if (!wrap(cx, tvr.addr()))
return false;
*objp = &tvr.value().toObject();
return true;
}
bool
JSCompartment::wrapId(JSContext *cx, jsid *idp)
{
if (JSID_IS_INT(*idp))
return true;
AutoValueRooter tvr(cx, IdToValue(*idp));
if (!wrap(cx, tvr.addr()))
return false;
return ValueToId(cx, tvr.value(), idp);
}
bool
JSCompartment::wrap(JSContext *cx, PropertyOp *propp)
{
Value v = CastAsObjectJsval(*propp);
if (!wrap(cx, &v))
return false;
*propp = CastAsPropertyOp(v.toObjectOrNull());
return true;
}
bool
JSCompartment::wrap(JSContext *cx, PropertyDescriptor *desc)
{
return wrap(cx, &desc->obj) &&
(!(desc->attrs & JSPROP_GETTER) || wrap(cx, &desc->getter)) &&
(!(desc->attrs & JSPROP_SETTER) || wrap(cx, &desc->setter)) &&
wrap(cx, &desc->value);
}
bool
JSCompartment::wrap(JSContext *cx, AutoIdVector &props)
{
jsid *vector = props.begin();
jsint length = props.length();
for (size_t n = 0; n < size_t(length); ++n) {
if (!wrapId(cx, &vector[n]))
return false;
}
return true;
}
bool
JSCompartment::wrapException(JSContext *cx)
{
JS_ASSERT(cx->compartment == this);
if (cx->throwing) {
AutoValueRooter tvr(cx, cx->exception);
cx->throwing = false;
cx->exception.setNull();
if (wrap(cx, tvr.addr())) {
cx->throwing = true;
cx->exception = tvr.value();
}
return false;
}
return true;
}
void
JSCompartment::sweep(JSContext *cx)
{
/* Remove dead wrappers from the table. */
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
if (js_IsAboutToBeFinalized(e.front().value.asGCThing()))
e.removeFront();
}
}
void
JSCompartment::purge(JSContext *cx)
{
#ifdef JS_METHODJIT
for (JSScript *script = (JSScript *)scripts.next;
&script->links != &scripts;
script = (JSScript *)script->links.next) {
if (script->jit) {
# if defined JS_POLYIC
mjit::ic::PurgePICs(cx, script);
# endif
# if defined JS_MONOIC
/*
* MICs do not refer to data which can be GC'ed, but are sensitive
* to shape regeneration.
*/
if (cx->runtime->gcRegenShapes)
mjit::ic::PurgeMICs(cx, script);
# endif
}
}
#endif
}
AutoCompartment::AutoCompartment(JSContext *cx, JSObject *target)
: context(cx),
origin(cx->compartment),

Просмотреть файл

@ -45,6 +45,8 @@
#include "jsapi.h"
#include "jsproxy.h"
JS_BEGIN_EXTERN_C
/* No-op wrapper handler base class. */
class JSWrapper : public js::JSProxyHandler {
uintN mFlags;
@ -175,4 +177,6 @@ TransparentObjectWrapper(JSContext *cx, JSObject *obj, JSObject *wrappedProto, u
}
JS_END_EXTERN_C
#endif

Просмотреть файл

@ -80,6 +80,7 @@
#endif
using namespace js;
using namespace js::gc;
/*
* NOTES
@ -873,6 +874,15 @@ attr_identity(const void *a, const void *b)
return qname_identity(xmla->name, xmlb->name);
}
void
JSXMLArrayCursor::trace(JSTracer *trc) {
#ifdef DEBUG
size_t index = 0;
#endif
for (JSXMLArrayCursor *cursor = this; cursor; cursor = cursor->next)
js::gc::MarkGCThing(trc, cursor->root, "cursor_root", index++);
}
static void
XMLArrayCursorTrace(JSTracer *trc, JSXMLArrayCursor *cursor)
{
@ -1282,7 +1292,7 @@ ParseNodeToXML(Parser *parser, JSParseNode *pn,
JSXMLClass xml_class;
int stackDummy;
if (!JS_CHECK_STACK_SIZE(cx, stackDummy)) {
if (!JS_CHECK_STACK_SIZE(cx->stackLimit, &stackDummy)) {
ReportCompileErrorNumber(cx, &parser->tokenStream, pn, JSREPORT_ERROR,
JSMSG_OVER_RECURSED);
return NULL;
@ -4635,7 +4645,7 @@ xml_trace_vector(JSTracer *trc, JSXML **vec, uint32 len)
xml = vec[i];
if (xml) {
JS_SET_TRACING_INDEX(trc, "xml_vector", i);
Mark(trc, xml, JSTRACE_XML);
Mark(trc, xml);
}
}
}
@ -6943,15 +6953,15 @@ void
js_TraceXML(JSTracer *trc, JSXML *xml)
{
if (xml->object)
JS_CALL_OBJECT_TRACER(trc, xml->object, "object");
MarkObject(trc, *xml->object, "object");
if (xml->name)
JS_CALL_OBJECT_TRACER(trc, xml->name, "name");
MarkObject(trc, *xml->name, "name");
if (xml->parent)
JS_CALL_TRACER(trc, xml->parent, JSTRACE_XML, "xml_parent");
if (JSXML_HAS_VALUE(xml)) {
if (xml->xml_value)
JS_CALL_STRING_TRACER(trc, xml->xml_value, "value");
MarkString(trc, xml->xml_value, "value");
return;
}
@ -6966,7 +6976,7 @@ js_TraceXML(JSTracer *trc, JSXML *xml)
if (xml->xml_target)
JS_CALL_TRACER(trc, xml->xml_target, JSTRACE_XML, "target");
if (xml->xml_targetprop)
JS_CALL_OBJECT_TRACER(trc, xml->xml_targetprop, "targetprop");
MarkObject(trc, *xml->xml_targetprop, "targetprop");
} else {
MarkObjectRange(trc, xml->xml_namespaces.length,
(JSObject **) xml->xml_namespaces.vector,
@ -6984,22 +6994,6 @@ js_TraceXML(JSTracer *trc, JSXML *xml)
}
}
void
js_FinalizeXML(JSContext *cx, JSXML *xml)
{
if (JSXML_HAS_KIDS(xml)) {
xml->xml_kids.finish(cx);
if (xml->xml_class == JSXML_CLASS_ELEMENT) {
xml->xml_namespaces.finish(cx);
xml->xml_attrs.finish(cx);
}
}
#ifdef DEBUG_notme
JS_REMOVE_LINK(&xml->links);
#endif
}
JSObject *
js_NewXMLObject(JSContext *cx, JSXMLClass xml_class)
{

Просмотреть файл

@ -41,6 +41,7 @@
#include "jspubtd.h"
#include "jsobj.h"
#include "jscell.h"
extern const char js_AnyName_str[];
extern const char js_AttributeName_str[];
@ -113,13 +114,7 @@ struct JSXMLArrayCursor
return root = array->vector[index];
}
void trace(JSTracer *trc) {
#ifdef DEBUG
size_t index = 0;
#endif
for (JSXMLArrayCursor *cursor = this; cursor; cursor = cursor->next)
js::MarkGCThing(trc, cursor->root, "cursor_root", index++);
}
void trace(JSTracer *trc);
};
#define JSXML_PRESET_CAPACITY JS_BIT(31)
@ -162,7 +157,23 @@ typedef struct JSXMLElemVar {
JSXMLArray attrs;
} JSXMLElemVar;
struct JSXML {
/* union member shorthands */
#define xml_kids u.list.kids
#define xml_target u.list.target
#define xml_targetprop u.list.targetprop
#define xml_namespaces u.elem.namespaces
#define xml_attrs u.elem.attrs
#define xml_value u.value
/* xml_class-testing macros */
#define JSXML_HAS_KIDS(xml) JSXML_CLASS_HAS_KIDS((xml)->xml_class)
#define JSXML_HAS_VALUE(xml) JSXML_CLASS_HAS_VALUE((xml)->xml_class)
#define JSXML_HAS_NAME(xml) JSXML_CLASS_HAS_NAME((xml)->xml_class)
#define JSXML_LENGTH(xml) (JSXML_CLASS_HAS_KIDS((xml)->xml_class) \
? (xml)->xml_kids.length \
: 0)
struct JSXML : js::gc::Cell {
#ifdef DEBUG_notme
JSCList links;
uint32 serial;
@ -178,38 +189,30 @@ struct JSXML {
JSXMLElemVar elem;
JSString *value;
} u;
void finalize(JSContext *cx, unsigned thingKind) {
if (JSXML_HAS_KIDS(this)) {
xml_kids.finish(cx);
if (xml_class == JSXML_CLASS_ELEMENT) {
xml_namespaces.finish(cx);
xml_attrs.finish(cx);
}
}
#ifdef DEBUG_notme
JS_REMOVE_LINK(&links);
#endif
}
};
JS_STATIC_ASSERT(sizeof(JSXML) % JS_GCTHING_ALIGN == 0);
/* union member shorthands */
#define xml_kids u.list.kids
#define xml_target u.list.target
#define xml_targetprop u.list.targetprop
#define xml_namespaces u.elem.namespaces
#define xml_attrs u.elem.attrs
#define xml_value u.value
/* xml_flags values */
#define XMLF_WHITESPACE_TEXT 0x1
/* xml_class-testing macros */
#define JSXML_HAS_KIDS(xml) JSXML_CLASS_HAS_KIDS((xml)->xml_class)
#define JSXML_HAS_VALUE(xml) JSXML_CLASS_HAS_VALUE((xml)->xml_class)
#define JSXML_HAS_NAME(xml) JSXML_CLASS_HAS_NAME((xml)->xml_class)
#define JSXML_LENGTH(xml) (JSXML_CLASS_HAS_KIDS((xml)->xml_class) \
? (xml)->xml_kids.length \
: 0)
extern JSXML *
js_NewXML(JSContext *cx, JSXMLClass xml_class);
extern void
js_TraceXML(JSTracer *trc, JSXML *xml);
extern void
js_FinalizeXML(JSContext *cx, JSXML *xml);
extern JSObject *
js_NewXMLObject(JSContext *cx, JSXMLClass xml_class);

Просмотреть файл

@ -379,13 +379,39 @@ double calld1(double x, double i, double y, double l, double x1, double i1, doub
return x + i * y - l + x1 / i1 - y1 * l1;
}
// The calling tests with mixed argument types are sensible for all platforms, but they highlight
// the differences between the supported ABIs on ARM.
double callid1(int i, double x, double y, int j, int k, double z) {
return (x + y + z) / (double)(i + j + k);
}
double callid2(int i, int j, int k, double x) {
return x / (double)(i + j + k);
}
double callid3(int i, int j, double x, int k, double y, double z) {
return (x + y + z) / (double)(i + j + k);
}
// Simple print function for testing void calls.
void printi(int x) {
cout << x << endl;
}
Function functions[] = {
FN(puts, CallInfo::typeSig1(ARGTYPE_I, ARGTYPE_P)),
FN(sin, CallInfo::typeSig1(ARGTYPE_D, ARGTYPE_D)),
FN(malloc, CallInfo::typeSig1(ARGTYPE_P, ARGTYPE_P)),
FN(free, CallInfo::typeSig1(ARGTYPE_V, ARGTYPE_P)),
FN(calld1, CallInfo::typeSig8(ARGTYPE_D, ARGTYPE_D, ARGTYPE_D, ARGTYPE_D,
ARGTYPE_D, ARGTYPE_D, ARGTYPE_D, ARGTYPE_D, ARGTYPE_D)),
FN(puts, CallInfo::typeSig1(ARGTYPE_I, ARGTYPE_P)),
FN(sin, CallInfo::typeSig1(ARGTYPE_D, ARGTYPE_D)),
FN(malloc, CallInfo::typeSig1(ARGTYPE_P, ARGTYPE_P)),
FN(free, CallInfo::typeSig1(ARGTYPE_V, ARGTYPE_P)),
FN(calld1, CallInfo::typeSig8(ARGTYPE_D, ARGTYPE_D, ARGTYPE_D, ARGTYPE_D,
ARGTYPE_D, ARGTYPE_D, ARGTYPE_D, ARGTYPE_D, ARGTYPE_D)),
FN(callid1, CallInfo::typeSig6(ARGTYPE_D, ARGTYPE_I, ARGTYPE_D, ARGTYPE_D,
ARGTYPE_I, ARGTYPE_I, ARGTYPE_D)),
FN(callid2, CallInfo::typeSig4(ARGTYPE_D, ARGTYPE_I, ARGTYPE_I, ARGTYPE_I, ARGTYPE_D)),
FN(callid3, CallInfo::typeSig6(ARGTYPE_D, ARGTYPE_I, ARGTYPE_I, ARGTYPE_D,
ARGTYPE_I, ARGTYPE_D, ARGTYPE_D)),
FN(printi, CallInfo::typeSig1(ARGTYPE_V, ARGTYPE_I)),
};
template<typename out, typename in> out
@ -719,12 +745,13 @@ FragmentAssembler::assemble_call(const string &op)
}
// Select return type from opcode.
ArgType retType = ARGTYPE_V;
if (mOpcode == LIR_calli) retType = ARGTYPE_I;
else if (mOpcode == LIR_calld) retType = ARGTYPE_D;
ArgType retType = ARGTYPE_P;
if (mOpcode == LIR_callv) retType = ARGTYPE_V;
else if (mOpcode == LIR_calli) retType = ARGTYPE_I;
#ifdef NANOJIT_64BIT
else if (mOpcode == LIR_callq) retType = ARGTYPE_Q;
#endif
else if (mOpcode == LIR_calld) retType = ARGTYPE_D;
else nyi("callh");
ci->_typesig = CallInfo::typeSigN(retType, argc, argTypes);
}
@ -1171,10 +1198,11 @@ FragmentAssembler::assembleFragment(LirTokenStream &in, bool implicitBegin, cons
ins = assemble_jump_jov();
break;
case LIR_callv:
case LIR_calli:
CASESF(LIR_hcalli:)
case LIR_calld:
CASE64(LIR_callq:)
case LIR_calld:
ins = assemble_call(op);
break;

Просмотреть файл

@ -0,0 +1,10 @@
i = immi -1;
j = immi 3;
k = immi 6;
x = immd 1.1;
y = immd 3.3;
z = immd 4.4;
res = calld callid1 cdecl i x y j k z
retd res

Просмотреть файл

@ -0,0 +1 @@
Output is: 1.1

Просмотреть файл

@ -0,0 +1,8 @@
i = immi -1;
j = immi 3;
k = immi 6;
x = immd 8.8;
res = calld callid2 cdecl i j k x
retd res

Просмотреть файл

@ -0,0 +1 @@
Output is: 1.1

Просмотреть файл

@ -0,0 +1,10 @@
i = immi -1;
j = immi 3;
k = immi 6;
x = immd 1.1;
y = immd 3.3;
z = immd 4.4;
res = calld callid3 cdecl i j x k y z
retd res

Просмотреть файл

@ -0,0 +1 @@
Output is: 1.1

Просмотреть файл

@ -0,0 +1,5 @@
; test call to void function
forty_two = immi 42
callv printi cdecl forty_two
reti forty_two

Просмотреть файл

@ -0,0 +1,2 @@
42
Output is: 42

Просмотреть файл

@ -145,6 +145,14 @@ static const JSC::MacroAssembler::RegisterID JSReturnReg_Data = JSC::ARMRegiste
static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::ARMRegisters::r1;
#endif
bool addressUsesRegister(Address address, RegisterID reg) {
return address.base == reg;
}
bool addressUsesRegister(BaseIndex address, RegisterID reg) {
return (address.base == reg) || (address.index == reg);
}
size_t distanceOf(Label l) {
return differenceBetween(startLabel, l);
}
@ -157,6 +165,11 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::ARMRegiste
load32(Address(obj, offsetof(JSObject, objShape)), shape);
}
Jump guardShape(RegisterID obj, uint32 shape) {
return branch32(NotEqual, Address(obj, offsetof(JSObject, objShape)),
Imm32(shape));
}
Jump testFunction(Condition cond, RegisterID fun) {
return branchPtr(cond, Address(fun, offsetof(JSObject, clasp)),
ImmPtr(&js_FunctionClass));

Просмотреть файл

@ -52,6 +52,7 @@
#include "FrameState-inl.h"
#include "jsscriptinlines.h"
#include "InlineFrameAssembler.h"
#include "jscompartment.h"
#include "jsautooplen.h"
@ -602,7 +603,7 @@ mjit::Compiler::generateMethod()
OpcodeStatus &opinfo = analysis[PC];
frame.setInTryBlock(opinfo.inTryBlock);
if (opinfo.nincoming || opinfo.trap) {
frame.forgetEverything(opinfo.stackDepth);
frame.syncAndForgetEverything(opinfo.stackDepth);
opinfo.safePoint = true;
}
jumpMap[uint32(PC - script->code)] = masm.label();
@ -683,7 +684,7 @@ mjit::Compiler::generateMethod()
BEGIN_CASE(JSOP_GOTO)
{
/* :XXX: this isn't really necessary if we follow the branch. */
frame.forgetEverything();
frame.syncAndForgetEverything();
Jump j = masm.jump();
jumpAndTrace(j, PC + GET_JUMP_OFFSET(PC));
}
@ -786,7 +787,7 @@ mjit::Compiler::generateMethod()
/* Branch is never taken, don't bother doing anything. */
if (result) {
frame.forgetEverything();
frame.syncAndForgetEverything();
Jump j = masm.jump();
jumpAndTrace(j, target);
}
@ -1105,10 +1106,10 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_AND)
BEGIN_CASE(JSOP_TABLESWITCH)
frame.forgetEverything();
frame.syncAndForgetEverything();
masm.move(ImmPtr(PC), Registers::ArgReg1);
/* prepareStubCall() is not needed due to forgetEverything() */
/* prepareStubCall() is not needed due to syncAndForgetEverything() */
stubCall(stubs::TableSwitch);
frame.pop();
@ -1118,10 +1119,10 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_TABLESWITCH)
BEGIN_CASE(JSOP_LOOKUPSWITCH)
frame.forgetEverything();
frame.syncAndForgetEverything();
masm.move(ImmPtr(PC), Registers::ArgReg1);
/* prepareStubCall() is not needed due to forgetEverything() */
/* prepareStubCall() is not needed due to syncAndForgetEverything() */
stubCall(stubs::LookupSwitch);
frame.pop();
@ -1213,12 +1214,23 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_GETLOCAL)
BEGIN_CASE(JSOP_SETLOCAL)
BEGIN_CASE(JSOP_SETLOCALPOP)
frame.storeLocal(GET_SLOTNO(PC));
if (op == JSOP_SETLOCALPOP)
{
jsbytecode *next = &PC[JSOP_SETLOCAL_LENGTH];
bool pop = JSOp(*next) == JSOP_POP && !analysis[next].nincoming;
frame.storeLocal(GET_SLOTNO(PC), pop);
if (pop) {
frame.pop();
PC += JSOP_SETLOCAL_LENGTH + JSOP_POP_LENGTH;
break;
}
}
END_CASE(JSOP_SETLOCAL)
BEGIN_CASE(JSOP_SETLOCALPOP)
frame.storeLocal(GET_SLOTNO(PC), true);
frame.pop();
END_CASE(JSOP_SETLOCALPOP)
BEGIN_CASE(JSOP_UINT16)
frame.push(Value(Int32Value((int32_t) GET_UINT16(PC))));
END_CASE(JSOP_UINT16)
@ -1361,7 +1373,7 @@ mjit::Compiler::generateMethod()
if (fun) {
JSLocalKind localKind = fun->lookupLocal(cx, inner->atom, NULL);
if (localKind != JSLOCAL_NONE)
frame.forgetEverything();
frame.syncAndForgetEverything();
}
prepareStubCall(Uses(0));
@ -1428,6 +1440,7 @@ mjit::Compiler::generateMethod()
END_CASE(JSOP_LAMBDA)
BEGIN_CASE(JSOP_TRY)
frame.syncAndForgetEverything();
END_CASE(JSOP_TRY)
BEGIN_CASE(JSOP_GETFCSLOT)
@ -1552,7 +1565,7 @@ mjit::Compiler::generateMethod()
/* For now, don't bother doing anything for this opcode. */
JSObject *obj = script->getObject(fullAtomIndex(PC));
frame.forgetEverything();
frame.syncAndForgetEverything();
masm.move(ImmPtr(obj), Registers::ArgReg1);
uint32 n = js_GetEnterBlockStackDefs(cx, script, PC);
stubCall(stubs::EnterBlock);
@ -1821,14 +1834,14 @@ mjit::Compiler::emitReturn()
/* There will always be a call object. */
prepareStubCall(Uses(0));
stubCall(stubs::PutCallObject);
frame.throwaway();
frame.discardFrame();
} else {
/* if (hasCallObj() || hasArgsObj()) stubs::PutActivationObjects() */
Jump putObjs = masm.branchTest32(Assembler::NonZero,
Address(JSFrameReg, JSStackFrame::offsetOfFlags()),
Imm32(JSFRAME_HAS_CALL_OBJ | JSFRAME_HAS_ARGS_OBJ));
stubcc.linkExit(putObjs, Uses(frame.frameDepth()));
frame.throwaway();
frame.discardFrame();
stubcc.leave();
stubcc.call(stubs::PutActivationObjects);
@ -1914,6 +1927,8 @@ mjit::Compiler::interruptCheckHelper()
Jump noInterrupt = stubcc.masm.branchTest32(Assembler::Zero, flag);
#endif
frame.freeReg(reg);
frame.sync(stubcc.masm, Uses(0));
stubcc.masm.move(ImmPtr(PC), Registers::ArgReg1);
stubcc.call(stubs::Interrupt);
@ -1923,8 +1938,6 @@ mjit::Compiler::interruptCheckHelper()
#ifdef JS_THREADSAFE
stubcc.linkRejoin(noInterrupt);
#endif
frame.freeReg(reg);
}
void
@ -2016,7 +2029,9 @@ mjit::Compiler::inlineCallHelper(uint32 argc, bool callingNew)
* registers we've preserved.
*/
frame.syncAndKill(Registers(Registers::AvailRegs), Uses(argc + 2));
frame.resetRegState();
frame.unpinKilledReg(dataReg);
if (typeReg.isSet())
frame.unpinKilledReg(typeReg.reg());
Registers tempRegs;
@ -2280,7 +2295,7 @@ mjit::Compiler::emitStubCmpOp(BoolStub stub, jsbytecode *target, JSOp fused)
} else {
JS_ASSERT(fused == JSOP_IFEQ || fused == JSOP_IFNE);
frame.forgetEverything();
frame.syncAndForgetEverything();
Assembler::Condition cond = (fused == JSOP_IFEQ)
? Assembler::Zero
: Assembler::NonZero;
@ -3654,7 +3669,7 @@ mjit::Compiler::iterMore()
/* Get props_cursor, test */
RegisterID T2 = frame.allocReg();
frame.forgetEverything();
frame.syncAndForgetEverything();
masm.loadPtr(Address(T1, offsetof(NativeIterator, props_cursor)), T2);
masm.loadPtr(Address(T1, offsetof(NativeIterator, props_end)), T1);
Jump jFast = masm.branchPtr(Assembler::LessThan, T2, T1);
@ -3891,8 +3906,7 @@ mjit::Compiler::jsop_setgname(uint32 index)
mic.shape);
masm.move(ImmPtr(obj), objReg);
} else {
objReg = frame.tempRegForData(objFe);
frame.pinReg(objReg);
objReg = frame.copyDataIntoReg(objFe);
RegisterID reg = frame.allocReg();
masm.loadShape(objReg, reg);
@ -3972,8 +3986,7 @@ mjit::Compiler::jsop_setgname(uint32 index)
JS_ASSERT(mic.patchValueOffset == masm.differenceBetween(mic.load, masm.label()));
#endif
if (objFe->isConstant())
frame.freeReg(objReg);
frame.freeReg(objReg);
frame.popn(2);
if (mic.u.name.dataConst) {
frame.push(v);

Просмотреть файл

@ -1000,7 +1000,7 @@ mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub, jsbytecode *tar
frame.pop();
frame.pop();
frame.throwaway();
frame.discardFrame();
/* Start of the slow path for equality stub call. */
Label stubCall = stubcc.masm.label();
@ -1286,7 +1286,7 @@ mjit::Compiler::jsop_relational_double(JSOp op, BoolStub stub, jsbytecode *targe
stubcc.call(stub);
frame.popn(2);
frame.forgetEverything();
frame.syncAndForgetEverything();
Jump j = masm.branchDouble(dblCond, fpLeft, fpRight);
@ -1453,7 +1453,12 @@ mjit::Compiler::jsop_relational_full(JSOp op, BoolStub stub, jsbytecode *target,
frame.pinReg(reg.reg());
frame.popn(2);
frame.forgetEverything();
frame.syncAndKillEverything();
frame.unpinKilledReg(cmpReg);
if (reg.isSet())
frame.unpinKilledReg(reg.reg());
frame.syncAndForgetEverything();
/* Operands could have been reordered, so use cmpOp. */
Assembler::Condition i32Cond;

Просмотреть файл

@ -206,10 +206,12 @@ mjit::Compiler::jsop_rsh_unknown_any(FrameEntry *lhs, FrameEntry *rhs)
frame.pinReg(rhsType.reg());
}
RegisterID lhsType = frame.tempRegForType(lhs);
frame.pinReg(lhsType);
RegisterID lhsData = frame.copyDataIntoReg(lhs);
frame.unpinReg(lhsType);
MaybeRegisterID lhsType;
if (rhsType.isSet() && frame.haveSameBacking(lhs, rhs))
lhsType = rhsType;
else
lhsType = frame.tempRegForType(lhs);
/* Non-integer rhs jumps to stub. */
MaybeJump rhsIntGuard;
@ -219,11 +221,11 @@ mjit::Compiler::jsop_rsh_unknown_any(FrameEntry *lhs, FrameEntry *rhs)
}
/* Non-integer lhs jumps to double guard. */
Jump lhsIntGuard = masm.testInt32(Assembler::NotEqual, lhsType);
Jump lhsIntGuard = masm.testInt32(Assembler::NotEqual, lhsType.reg());
stubcc.linkExitDirect(lhsIntGuard, stubcc.masm.label());
/* Attempt to convert lhs double to int32. */
Jump lhsDoubleGuard = stubcc.masm.testDouble(Assembler::NotEqual, lhsType);
Jump lhsDoubleGuard = stubcc.masm.testDouble(Assembler::NotEqual, lhsType.reg());
frame.loadDouble(lhs, FPRegisters::First, stubcc.masm);
Jump lhsTruncateGuard = stubcc.masm.branchTruncateDoubleToInt32(FPRegisters::First, lhsData);
stubcc.crossJump(stubcc.masm.jump(), masm.label());
@ -664,7 +666,7 @@ mjit::Compiler::jsop_equality(JSOp op, BoolStub stub, jsbytecode *target, JSOp f
*/
if (target) {
frame.forgetEverything();
frame.syncAndForgetEverything();
if ((op == JSOP_EQ && fused == JSOP_IFNE) ||
(op == JSOP_NE && fused == JSOP_IFEQ)) {
@ -722,6 +724,8 @@ mjit::Compiler::jsop_relational(JSOp op, BoolStub stub, jsbytecode *target, JSOp
emitStubCmpOp(stub, target, fused);
} else if (!target && (lhs->isType(JSVAL_TYPE_STRING) || rhs->isType(JSVAL_TYPE_STRING))) {
emitStubCmpOp(stub, target, fused);
} else if (frame.haveSameBacking(lhs, rhs)) {
emitStubCmpOp(stub, target, fused);
} else {
jsop_equality_int_string(op, stub, target, fused);
}
@ -911,8 +915,7 @@ mjit::Compiler::booleanJumpScript(JSOp op, jsbytecode *target)
type.setReg(frame.copyTypeIntoReg(fe));
data.setReg(frame.copyDataIntoReg(fe));
/* :FIXME: Can something more lightweight be used? */
frame.forgetEverything();
frame.syncAndForgetEverything();
Assembler::Condition cond = (op == JSOP_IFNE || op == JSOP_OR)
? Assembler::NonZero
@ -999,7 +1002,7 @@ mjit::Compiler::jsop_ifneq(JSOp op, jsbytecode *target)
if (op == JSOP_IFEQ)
b = !b;
if (b) {
frame.forgetEverything();
frame.syncAndForgetEverything();
jumpAndTrace(masm.jump(), target);
}
return;
@ -1019,7 +1022,7 @@ mjit::Compiler::jsop_andor(JSOp op, jsbytecode *target)
/* Short-circuit. */
if ((op == JSOP_OR && b == JS_TRUE) ||
(op == JSOP_AND && b == JS_FALSE)) {
frame.forgetEverything();
frame.syncAndForgetEverything();
jumpAndTrace(masm.jump(), target);
}

Просмотреть файл

@ -128,6 +128,7 @@ class FrameEntry
void track(uint32 index) {
clear();
index_ = index;
tracked = true;
}
void clear() {
@ -210,6 +211,14 @@ class FrameEntry
copy = fe;
}
inline bool isTracked() const {
return tracked;
}
inline void untrack() {
tracked = false;
}
private:
JSValueType knownType;
jsval_layout v_;
@ -219,7 +228,8 @@ class FrameEntry
FrameEntry *copy;
bool copied;
bool isNumber;
char padding[2];
bool tracked;
char padding[1];
};
} /* namespace mjit */

Просмотреть файл

@ -43,16 +43,13 @@
namespace js {
namespace mjit {
inline FrameEntry *
FrameState::addToTracker(uint32 index)
inline void
FrameState::addToTracker(FrameEntry *fe)
{
JS_ASSERT(!base[index]);
FrameEntry *fe = &entries[index];
base[index] = fe;
JS_ASSERT(!fe->isTracked());
fe->track(tracker.nentries);
tracker.add(fe);
JS_ASSERT(tracker.nentries <= script->nslots);
return fe;
}
inline FrameEntry *
@ -60,9 +57,9 @@ FrameState::peek(int32 depth)
{
JS_ASSERT(depth < 0);
JS_ASSERT(sp + depth >= spBase);
FrameEntry *fe = sp[depth];
if (!fe) {
fe = addToTracker(indexOf(depth));
FrameEntry *fe = &sp[depth];
if (!fe->isTracked()) {
addToTracker(fe);
fe->resetSynced();
}
return fe;
@ -89,11 +86,13 @@ inline JSC::MacroAssembler::RegisterID
FrameState::allocReg()
{
RegisterID reg;
if (!freeRegs.empty())
if (!freeRegs.empty()) {
reg = freeRegs.takeAnyReg();
else
} else {
reg = evictSomeReg();
regstate[reg].fe = NULL;
regstate[reg].forget();
}
return reg;
}
@ -101,11 +100,13 @@ inline JSC::MacroAssembler::RegisterID
FrameState::allocReg(uint32 mask)
{
RegisterID reg;
if (freeRegs.hasRegInMask(mask))
if (freeRegs.hasRegInMask(mask)) {
reg = freeRegs.takeRegInMask(mask);
else
} else {
reg = evictSomeReg(mask);
regstate[reg].fe = NULL;
regstate[reg].forget();
}
return reg;
}
@ -113,11 +114,15 @@ inline JSC::MacroAssembler::RegisterID
FrameState::allocReg(FrameEntry *fe, RematInfo::RematType type)
{
RegisterID reg;
if (!freeRegs.empty())
if (!freeRegs.empty()) {
reg = freeRegs.takeAnyReg();
else
} else {
reg = evictSomeReg();
regstate[reg] = RegisterState(fe, type);
regstate[reg].forget();
}
regstate[reg].associate(fe, type);
return reg;
}
@ -162,8 +167,8 @@ FrameState::pop()
{
JS_ASSERT(sp > spBase);
FrameEntry *fe = *--sp;
if (!fe)
FrameEntry *fe = --sp;
if (!fe->isTracked())
return;
forgetAllRegs(fe);
@ -172,7 +177,8 @@ FrameState::pop()
inline void
FrameState::freeReg(RegisterID reg)
{
JS_ASSERT(regstate[reg].fe == NULL);
JS_ASSERT(!regstate[reg].usedBy());
freeRegs.putReg(reg);
}
@ -183,28 +189,30 @@ FrameState::forgetReg(RegisterID reg)
* Important: Do not touch the fe here. We can peephole optimize away
* loads and stores by re-using the contents of old FEs.
*/
JS_ASSERT_IF(regstate[reg].fe, !regstate[reg].fe->isCopy());
freeRegs.putReg(reg);
JS_ASSERT_IF(regstate[reg].fe(), !regstate[reg].fe()->isCopy());
if (!regstate[reg].isPinned()) {
regstate[reg].forget();
freeRegs.putReg(reg);
}
}
inline void
FrameState::forgetEverything(uint32 newStackDepth)
FrameState::syncAndForgetEverything(uint32 newStackDepth)
{
forgetEverything();
syncAndForgetEverything();
sp = spBase + newStackDepth;
}
inline FrameEntry *
FrameState::rawPush()
{
JS_ASSERT(unsigned(sp - base) < nargs + script->nslots);
JS_ASSERT(unsigned(sp - entries) < nargs + script->nslots);
sp++;
if (!sp->isTracked())
addToTracker(sp);
if (FrameEntry *fe = sp[-1])
return fe;
return addToTracker(&sp[-1] - base);
return sp++;
}
inline void
@ -217,10 +225,9 @@ FrameState::push(const Value &v)
inline void
FrameState::pushSynced()
{
if (sp->isTracked())
sp->resetSynced();
sp++;
if (FrameEntry *fe = sp[-1])
fe->resetSynced();
}
inline void
@ -242,7 +249,7 @@ FrameState::pushSynced(JSValueType type, RegisterID reg)
fe->data.sync();
fe->setType(type);
fe->data.setRegister(reg);
regstate[reg] = RegisterState(fe, RematInfo::DATA);
regstate[reg].associate(fe, RematInfo::DATA);
}
inline void
@ -281,8 +288,8 @@ FrameState::pushRegs(RegisterID type, RegisterID data)
fe->resetUnsynced();
fe->type.setRegister(type);
fe->data.setRegister(data);
regstate[type] = RegisterState(fe, RematInfo::TYPE);
regstate[data] = RegisterState(fe, RematInfo::DATA);
regstate[type].associate(fe, RematInfo::TYPE);
regstate[data].associate(fe, RematInfo::DATA);
}
inline void
@ -295,7 +302,7 @@ FrameState::pushTypedPayload(JSValueType type, RegisterID payload)
fe->resetUnsynced();
fe->setType(type);
fe->data.setRegister(payload);
regstate[payload] = RegisterState(fe, RematInfo::DATA);
regstate[payload].associate(fe, RematInfo::DATA);
}
inline void
@ -320,7 +327,7 @@ FrameState::pushNumber(MaybeRegisterID payload, bool asInt32)
if (payload.isSet()) {
fe->data.unsync();
fe->data.setRegister(payload.reg());
regstate[payload.reg()] = RegisterState(fe, RematInfo::DATA);
regstate[payload.reg()].associate(fe, RematInfo::DATA);
} else {
fe->data.setMemory();
}
@ -339,7 +346,7 @@ FrameState::pushInt32(RegisterID payload)
fe->isNumber = true;
fe->data.unsync();
fe->data.setRegister(payload);
regstate[payload] = RegisterState(fe, RematInfo::DATA);
regstate[payload].associate(fe, RematInfo::DATA);
}
inline void
@ -362,13 +369,13 @@ FrameState::pushUntypedPayload(JSValueType type, RegisterID payload)
fe->setNotCopied();
fe->setCopyOf(NULL);
fe->data.setRegister(payload);
regstate[payload] = RegisterState(fe, RematInfo::DATA);
regstate[payload].associate(fe, RematInfo::DATA);
}
inline JSC::MacroAssembler::RegisterID
FrameState::tempRegForType(FrameEntry *fe, RegisterID fallback)
{
JS_ASSERT(regstate[fallback].fe == NULL);
JS_ASSERT(!regstate[fallback].fe());
if (fe->isCopy())
fe = fe->copyOf();
@ -435,7 +442,7 @@ FrameState::tempRegInMaskForData(FrameEntry *fe, uint32 mask)
return old;
/* Keep the old register pinned. */
regstate[old].fe = NULL;
regstate[old].forget();
reg = allocReg(mask);
masm.move(old, reg);
freeReg(old);
@ -443,7 +450,7 @@ FrameState::tempRegInMaskForData(FrameEntry *fe, uint32 mask)
reg = allocReg(mask);
masm.loadPayload(addressOf(fe), reg);
}
regstate[reg] = RegisterState(fe, RematInfo::DATA);
regstate[reg].associate(fe, RematInfo::DATA);
fe->data.setRegister(reg);
return reg;
}
@ -625,28 +632,31 @@ inline FrameEntry *
FrameState::getLocal(uint32 slot)
{
uint32 index = nargs + slot;
if (FrameEntry *fe = base[index])
return fe;
FrameEntry *fe = addToTracker(index);
fe->resetSynced();
FrameEntry *fe = &entries[index];
if (!fe->isTracked()) {
addToTracker(fe);
fe->resetSynced();
}
return fe;
}
inline void
FrameState::pinReg(RegisterID reg)
{
JS_ASSERT(!freeRegs.hasReg(reg));
JS_ASSERT(regstate[reg].fe);
regstate[reg].save = regstate[reg].fe;
regstate[reg].fe = NULL;
regstate[reg].pin();
}
inline void
FrameState::unpinReg(RegisterID reg)
{
JS_ASSERT(!freeRegs.hasReg(reg));
JS_ASSERT(!regstate[reg].fe);
regstate[reg].fe = regstate[reg].save;
regstate[reg].unpin();
}
inline void
FrameState::unpinKilledReg(RegisterID reg)
{
regstate[reg].unpinUnsafe();
freeRegs.putReg(reg);
}
inline void
@ -658,12 +668,6 @@ FrameState::forgetAllRegs(FrameEntry *fe)
forgetReg(fe->data.reg());
}
inline FrameEntry *
FrameState::tosFe() const
{
return &entries[uint32(sp - base)];
}
inline void
FrameState::swapInTracker(FrameEntry *lhs, FrameEntry *rhs)
{
@ -718,8 +722,8 @@ FrameState::pushLocal(uint32 n)
* SETLOCAL equivocation of stack slots, and let expressions, just
* weakly assert on the fixed local vars.
*/
FrameEntry *fe = base[localIndex(n)];
if (fe && n < script->nfixed) {
FrameEntry *fe = &locals[n];
if (fe->isTracked() && n < script->nfixed) {
JS_ASSERT(fe->type.inMemory());
JS_ASSERT(fe->data.inMemory());
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -45,6 +45,7 @@
#include "methodjit/FrameEntry.h"
#include "CodeGenIncludes.h"
#include "ImmutableSync.h"
#include "jscompartment.h"
namespace js {
namespace mjit {
@ -175,22 +176,103 @@ class FrameState
uint32 nentries;
};
/*
* Some RegisterState invariants.
*
* If |fe| is non-NULL, |save| is NULL.
* If |save| is non-NULL, |fe| is NULL.
* That is, both |fe| and |save| cannot be non-NULL.
*
* If either |fe| or |save| is non-NULL, the register is not in freeRegs.
* If both |fe| and |save| are NULL, the register is either in freeRegs,
* or owned by the compiler.
*/
struct RegisterState {
RegisterState()
RegisterState() : fe_(NULL), save_(NULL)
{ }
RegisterState(FrameEntry *fe, RematInfo::RematType type)
: fe(fe), type(type)
{ }
: fe_(fe), save_(NULL), type_(type)
{
JS_ASSERT(!save_);
}
bool isPinned() const {
assertConsistency();
return !!save_;
}
void assertConsistency() const {
JS_ASSERT_IF(fe_, !save_);
JS_ASSERT_IF(save_, !fe_);
}
FrameEntry *fe() const {
assertConsistency();
return fe_;
}
RematInfo::RematType type() const {
assertConsistency();
return type_;
}
FrameEntry *usedBy() const {
if (fe_)
return fe_;
return save_;
}
void associate(FrameEntry *fe, RematInfo::RematType type) {
JS_ASSERT(!fe_);
JS_ASSERT(!save_);
fe_ = fe;
type_ = type;
JS_ASSERT(!save_);
}
/* Change ownership. */
void reassociate(FrameEntry *fe) {
assertConsistency();
JS_ASSERT(fe);
fe_ = fe;
}
/* Unassociate this register from the FE. */
void forget() {
JS_ASSERT(fe_);
fe_ = NULL;
JS_ASSERT(!save_);
}
void pin() {
assertConsistency();
save_ = fe_;
fe_ = NULL;
}
void unpin() {
assertConsistency();
fe_ = save_;
save_ = NULL;
}
void unpinUnsafe() {
assertConsistency();
save_ = NULL;
}
private:
/* FrameEntry owning this register, or NULL if not owned by a frame. */
FrameEntry *fe;
FrameEntry *fe_;
/* Hack - simplifies register allocation for pairs. */
FrameEntry *save;
FrameEntry *save_;
/* Part of the FrameEntry that owns the FE. */
RematInfo::RematType type;
RematInfo::RematType type_;
};
public:
@ -505,9 +587,10 @@ class FrameState
void storeTo(FrameEntry *fe, Address address, bool popHint = false);
/*
* Stores the top stack slot back to a local variable.
* Stores the top stack slot back to a slot.
*/
void storeLocal(uint32 n, bool popGuaranteed = false, bool typeChange = true);
void storeTop(FrameEntry *target, bool popGuaranteed = false, bool typeChange = true);
/*
* Restores state from a slow path.
@ -526,28 +609,34 @@ class FrameState
void syncAndKill(Registers kill, Uses uses, Uses ignored);
void syncAndKill(Registers kill, Uses uses) { syncAndKill(kill, uses, Uses(0)); }
/*
* Reset the register state.
*/
void resetRegState();
/* Syncs and kills everything. */
void syncAndKillEverything() {
syncAndKill(Registers(Registers::AvailRegs), Uses(frameDepth()));
}
/*
* Clear all tracker entries, syncing all outstanding stores in the process.
* The stack depth is in case some merge points' edges did not immediately
* precede the current instruction.
*/
inline void forgetEverything(uint32 newStackDepth);
inline void syncAndForgetEverything(uint32 newStackDepth);
/*
* Same as above, except the stack depth is not changed. This is used for
* branching opcodes.
*/
void forgetEverything();
void syncAndForgetEverything();
/*
* Throw away the entire frame state, without syncing anything.
* This can only be called after a syncAndKill() against all registers.
*/
void throwaway();
void forgetEverything();
/*
* Discard the entire framestate forcefully.
*/
void discardFrame();
/*
* Mark an existing slot with a type.
@ -603,8 +692,9 @@ class FrameState
/*
* Marks a register such that it cannot be spilled by the register
* allocator. Any pinned registers must be unpinned at the end of the op.
* Note: This function should only be used on registers tied to FEs.
* allocator. Any pinned registers must be unpinned at the end of the op,
* no matter what. In addition, pinReg() can only be used on registers
* which are associated with FrameEntries.
*/
inline void pinReg(RegisterID reg);
@ -613,6 +703,11 @@ class FrameState
*/
inline void unpinReg(RegisterID reg);
/*
* Same as unpinReg(), but does not restore the FrameEntry.
*/
inline void unpinKilledReg(RegisterID reg);
/*
* Dups the top item on the stack.
*/
@ -639,7 +734,6 @@ class FrameState
*/
uint32 stackDepth() const { return sp - spBase; }
uint32 frameDepth() const { return stackDepth() + script->nfixed; }
inline FrameEntry *tosFe() const;
#ifdef DEBUG
void assertValidRegisterState() const;
@ -687,7 +781,7 @@ class FrameState
RegisterID evictSomeReg(uint32 mask);
void evictReg(RegisterID reg);
inline FrameEntry *rawPush();
inline FrameEntry *addToTracker(uint32 index);
inline void addToTracker(FrameEntry *fe);
inline void syncType(const FrameEntry *fe, Address to, Assembler &masm) const;
inline void syncData(const FrameEntry *fe, Address to, Assembler &masm) const;
inline FrameEntry *getLocal(uint32 slot);
@ -695,9 +789,10 @@ class FrameState
inline void swapInTracker(FrameEntry *lhs, FrameEntry *rhs);
inline uint32 localIndex(uint32 n);
void pushCopyOf(uint32 index);
void syncFancy(Assembler &masm, Registers avail, uint32 resumeAt,
void syncFancy(Assembler &masm, Registers avail, FrameEntry *resumeAt,
FrameEntry *bottom) const;
inline bool tryFastDoubleLoad(FrameEntry *fe, FPRegisterID fpReg, Assembler &masm) const;
void resetInternalState();
/*
* "Uncopies" the backing store of a FrameEntry that has been copied. The
@ -708,25 +803,29 @@ class FrameState
* Later addition: uncopy() returns the first copy found.
*/
FrameEntry *uncopy(FrameEntry *original);
FrameEntry *walkTrackerForUncopy(FrameEntry *original);
FrameEntry *walkFrameForUncopy(FrameEntry *original);
/*
* All registers in the FE are forgotten. If it is copied, it is uncopied
* beforehand.
*/
void forgetEntry(FrameEntry *fe);
FrameEntry *entryFor(uint32 index) const {
JS_ASSERT(base[index]);
JS_ASSERT(entries[index].isTracked());
return &entries[index];
}
void moveOwnership(RegisterID reg, FrameEntry *newFe) {
regstate[reg].fe = newFe;
}
RegisterID evictSomeReg() {
return evictSomeReg(Registers::AvailRegs);
}
uint32 indexOf(int32 depth) {
return uint32((sp + depth) - base);
return uint32((sp + depth) - entries);
}
uint32 indexOfFe(FrameEntry *fe) {
uint32 indexOfFe(FrameEntry *fe) const {
return uint32(fe - entries);
}
@ -742,20 +841,17 @@ class FrameState
/* Cache of FrameEntry objects. */
FrameEntry *entries;
/* Base pointer of the FrameEntry vector. */
FrameEntry **base;
/* Base pointer for arguments. */
FrameEntry **args;
FrameEntry *args;
/* Base pointer for local variables. */
FrameEntry **locals;
FrameEntry *locals;
/* Base pointer for the stack. */
FrameEntry **spBase;
FrameEntry *spBase;
/* Dynamic stack pointer. */
FrameEntry **sp;
FrameEntry *sp;
/* Vector of tracked slot indexes. */
Tracker tracker;

Просмотреть файл

@ -45,7 +45,7 @@ using namespace js;
using namespace js::mjit;
ImmutableSync::ImmutableSync(JSContext *cx, const FrameState &frame)
: cx(cx), entries(NULL), frame(frame)
: cx(cx), entries(NULL), frame(frame), generation(0)
{
}
@ -57,19 +57,18 @@ ImmutableSync::~ImmutableSync()
bool
ImmutableSync::init(uint32 nentries)
{
entries = (SyncEntry *)cx->malloc(sizeof(SyncEntry) * nentries);
entries = (SyncEntry *)cx->calloc(sizeof(SyncEntry) * nentries);
return !!entries;
}
void
ImmutableSync::reset(Assembler *masm, Registers avail, uint32 n,
FrameEntry *bottom)
ImmutableSync::reset(Assembler *masm, Registers avail, FrameEntry *top, FrameEntry *bottom)
{
this->avail = avail;
this->nentries = n;
this->masm = masm;
this->top = top;
this->bottom = bottom;
memset(entries, 0, sizeof(SyncEntry) * nentries);
this->generation++;
memset(regs, 0, sizeof(regs));
}
@ -92,17 +91,10 @@ ImmutableSync::allocReg()
if (!regs[i]) {
/* If the frame does not own this register, take it! */
FrameEntry *fe = frame.regstate[i].fe;
FrameEntry *fe = frame.regstate[i].fe();
if (!fe)
return reg;
/*
* The Reifier does not own this register, but the frame does.
* This must mean that we've not yet processed this entry, and
* that it's data has not been clobbered.
*/
JS_ASSERT(fe->trackerIndex() < nentries);
evictFromFrame = i;
/*
@ -115,18 +107,14 @@ ImmutableSync::allocReg()
}
if (evictFromFrame != FrameState::InvalidIndex) {
FrameEntry *fe = frame.regstate[evictFromFrame].fe;
FrameEntry *fe = frame.regstate[evictFromFrame].fe();
SyncEntry &e = entryFor(fe);
if (frame.regstate[evictFromFrame].type == RematInfo::TYPE) {
if (frame.regstate[evictFromFrame].type() == RematInfo::TYPE) {
JS_ASSERT(!e.typeClobbered);
e.typeSynced = true;
e.typeClobbered = true;
masm->storeTypeTag(fe->type.reg(), frame.addressOf(fe));
} else {
JS_ASSERT(!e.dataClobbered);
e.dataSynced = true;
e.dataClobbered = true;
masm->storePayload(fe->data.reg(), frame.addressOf(fe));
}
return RegisterID(evictFromFrame);
}
@ -150,39 +138,38 @@ ImmutableSync::allocReg()
inline ImmutableSync::SyncEntry &
ImmutableSync::entryFor(FrameEntry *fe)
{
JS_ASSERT(fe->trackerIndex() < nentries);
return entries[fe->trackerIndex()];
JS_ASSERT(fe <= top);
SyncEntry &e = entries[frame.indexOfFe(fe)];
if (e.generation != generation)
e.reset(generation);
return e;
}
void
ImmutableSync::sync(FrameEntry *fe)
{
JS_ASSERT(nentries);
#ifdef DEBUG
top = fe;
#endif
if (fe->isCopy())
syncCopy(fe);
else
syncNormal(fe);
nentries--;
}
bool
ImmutableSync::shouldSyncType(FrameEntry *fe, SyncEntry &e)
{
if (fe->type.inRegister() && !e.typeClobbered)
return true;
if (e.hasTypeReg)
return true;
return frame.inTryBlock || fe >= bottom;
/* Registers are synced up-front. */
return !fe->type.synced() && !fe->type.inRegister();
}
bool
ImmutableSync::shouldSyncData(FrameEntry *fe, SyncEntry &e)
{
if (fe->data.inRegister() && !e.dataClobbered)
return true;
if (e.hasDataReg)
return true;
return frame.inTryBlock || fe >= bottom;
/* Registers are synced up-front. */
return !fe->data.synced() && !fe->data.inRegister();
}
JSC::MacroAssembler::RegisterID
@ -216,8 +203,7 @@ ImmutableSync::ensureDataReg(FrameEntry *fe, SyncEntry &e)
void
ImmutableSync::syncCopy(FrameEntry *fe)
{
if (!frame.inTryBlock && fe < bottom)
return;
JS_ASSERT(fe >= bottom);
FrameEntry *backing = fe->copyOf();
SyncEntry &e = entryFor(backing);
@ -254,7 +240,7 @@ ImmutableSync::syncNormal(FrameEntry *fe)
e.type = fe->getKnownType();
}
if (!fe->data.synced() && !e.dataSynced && shouldSyncData(fe, e)) {
if (shouldSyncData(fe, e)) {
if (fe->isConstant()) {
masm->storeValue(fe->getValue(), addr);
return;
@ -262,7 +248,7 @@ ImmutableSync::syncNormal(FrameEntry *fe)
masm->storePayload(ensureDataReg(fe, e), addr);
}
if (!fe->type.synced() && !e.typeSynced && shouldSyncType(fe, e)) {
if (shouldSyncType(fe, e)) {
if (e.learnedType)
masm->storeTypeTag(ImmType(e.type), addr);
else
@ -272,14 +258,14 @@ ImmutableSync::syncNormal(FrameEntry *fe)
if (e.hasDataReg) {
avail.putReg(e.dataReg);
regs[e.dataReg] = NULL;
} else if (!e.dataClobbered && fe->data.inRegister() && frame.regstate[fe->data.reg()].fe) {
} else if (!e.dataClobbered && fe->data.inRegister() && frame.regstate[fe->data.reg()].fe()) {
avail.putReg(fe->data.reg());
}
if (e.hasTypeReg) {
avail.putReg(e.typeReg);
regs[e.typeReg] = NULL;
} else if (!e.typeClobbered && fe->type.inRegister() && frame.regstate[fe->type.reg()].fe) {
} else if (!e.typeClobbered && fe->type.inRegister() && frame.regstate[fe->type.reg()].fe()) {
avail.putReg(fe->type.reg());
}
}

Просмотреть файл

@ -70,16 +70,24 @@ class ImmutableSync
*
* They are separated for readability.
*/
bool dataSynced;
bool typeSynced;
uint32 generation;
bool dataClobbered;
bool typeClobbered;
RegisterID dataReg;
RegisterID typeReg;
bool hasDataReg;
bool hasTypeReg;
bool learnedType;
RegisterID dataReg;
RegisterID typeReg;
JSValueType type;
void reset(uint32 gen) {
dataClobbered = false;
typeClobbered = false;
hasDataReg = false;
hasTypeReg = false;
learnedType = false;
generation = gen;
}
};
public:
@ -87,8 +95,7 @@ class ImmutableSync
~ImmutableSync();
bool init(uint32 nentries);
void reset(Assembler *masm, Registers avail, uint32 n,
FrameEntry *bottom);
void reset(Assembler *masm, Registers avail, FrameEntry *top, FrameEntry *bottom);
void sync(FrameEntry *fe);
private:
@ -111,7 +118,9 @@ class ImmutableSync
Registers avail;
Assembler *masm;
SyncEntry *regs[Assembler::TotalRegisters];
FrameEntry *top;
FrameEntry *bottom;
uint32 generation;
};
} /* namespace mjit */

Просмотреть файл

@ -86,9 +86,6 @@ using namespace JSC;
return v; \
} while (0)
static bool
InlineReturn(VMFrame &f, JSBool ok);
static jsbytecode *
FindExceptionHandler(JSContext *cx)
{
@ -642,30 +639,40 @@ AdvanceReturnPC(JSContext *cx)
#ifdef JS_TRACER
static inline bool
SwallowErrors(VMFrame &f, JSStackFrame *stopFp)
HandleErrorInExcessFrames(VMFrame &f, JSStackFrame *stopFp)
{
JSContext *cx = f.cx;
/* Remove the bottom frame. */
bool ok = false;
for (;;) {
JSStackFrame *fp = cx->fp();
/*
* Callers of this called either Interpret() or JaegerShot(), which would
* have searched for exception handlers already. If we see stopFp, just
* return false. Otherwise, pop the frame, since it's guaranteed useless.
*/
JSStackFrame *fp = cx->fp();
if (fp == stopFp)
return false;
/* Look for an imacro with hard-coded exception handlers. */
if (fp->hasImacropc() && cx->throwing) {
bool returnOK = InlineReturn(f, false);
/* Remove the bottom frame. */
for (;;) {
fp = cx->fp();
/* Clear imacros. */
if (fp->hasImacropc()) {
cx->regs->pc = fp->imacropc();
fp->clearImacropc();
if (ok)
break;
}
JS_ASSERT(!fp->hasImacropc());
/* If there's an exception and a handler, set the pc and leave. */
jsbytecode *pc = FindExceptionHandler(cx);
if (pc) {
cx->regs->pc = pc;
ok = true;
break;
if (cx->throwing) {
jsbytecode *pc = FindExceptionHandler(cx);
if (pc) {
cx->regs->pc = pc;
returnOK = true;
break;
}
}
/* Don't unwind if this was the entry frame. */
@ -673,15 +680,14 @@ SwallowErrors(VMFrame &f, JSStackFrame *stopFp)
break;
/* Unwind and return. */
ok &= bool(js_UnwindScope(cx, 0, cx->throwing));
InlineReturn(f, ok);
returnOK &= bool(js_UnwindScope(cx, 0, returnOK || cx->throwing));
returnOK = InlineReturn(f, returnOK);
}
/* Update the VMFrame before leaving. */
JS_ASSERT(&f.regs == cx->regs);
JS_ASSERT_IF(!returnOK, cx->fp() == stopFp);
JS_ASSERT_IF(!ok, cx->fp() == stopFp);
return ok;
return returnOK;
}
static inline bool
@ -728,7 +734,7 @@ FrameIsFinished(JSContext *cx)
}
static bool
RemoveExcessFrames(VMFrame &f, JSStackFrame *entryFrame)
FinishExcessFrames(VMFrame &f, JSStackFrame *entryFrame)
{
JSContext *cx = f.cx;
while (cx->fp() != entryFrame || entryFrame->hasImacropc()) {
@ -737,7 +743,7 @@ RemoveExcessFrames(VMFrame &f, JSStackFrame *entryFrame)
if (AtSafePoint(cx)) {
JSScript *script = fp->script();
if (!JaegerShotAtSafePoint(cx, script->nmap[cx->regs->pc - script->code])) {
if (!SwallowErrors(f, entryFrame))
if (!HandleErrorInExcessFrames(f, entryFrame))
return false;
/* Could be anywhere - restart outer loop. */
@ -747,7 +753,7 @@ RemoveExcessFrames(VMFrame &f, JSStackFrame *entryFrame)
AdvanceReturnPC(cx);
} else {
if (!PartialInterpret(f)) {
if (!SwallowErrors(f, entryFrame))
if (!HandleErrorInExcessFrames(f, entryFrame))
return false;
} else if (cx->fp() != entryFrame) {
/*
@ -838,7 +844,7 @@ RunTracer(VMFrame &f)
return NULL;
case TPA_Error:
if (!SwallowErrors(f, entryFrame))
if (!HandleErrorInExcessFrames(f, entryFrame))
THROWV(NULL);
JS_ASSERT(!cx->fp()->hasImacropc());
break;
@ -871,8 +877,8 @@ RunTracer(VMFrame &f)
*/
restart:
/* Step 1. Initial removal of excess frames. */
if (!RemoveExcessFrames(f, entryFrame))
/* Step 1. Finish frames created after the entry frame. */
if (!FinishExcessFrames(f, entryFrame))
THROWV(NULL);
/* IMacros are guaranteed to have been removed by now. */
@ -903,7 +909,7 @@ RunTracer(VMFrame &f)
/* Step 4. Do a partial interp, then restart the whole process. */
if (!PartialInterpret(f)) {
if (!SwallowErrors(f, entryFrame))
if (!HandleErrorInExcessFrames(f, entryFrame))
THROWV(NULL);
}

Просмотреть файл

@ -190,11 +190,16 @@ struct Registers {
return !(freeMask & mask);
}
RegisterID takeAnyReg() {
RegisterID peekReg() {
JS_ASSERT(!empty());
int ireg;
JS_FLOOR_LOG2(ireg, freeMask);
RegisterID reg = (RegisterID)ireg;
return reg;
}
RegisterID takeAnyReg() {
RegisterID reg = peekReg();
takeReg(reg);
return reg;
}

Просмотреть файл

@ -45,6 +45,10 @@
#include "PolyIC.h"
#include "TrampolineCompiler.h"
#include "jscntxtinlines.h"
#include "jscompartment.h"
#include "jsscope.h"
#include "jsgcinlines.h"
using namespace js;
using namespace js::mjit;
@ -866,23 +870,6 @@ mjit::ReleaseScriptCode(JSContext *cx, JSScript *script)
}
}
void
mjit::SweepCallICs(JSContext *cx)
{
#ifdef JS_MONOIC
JSRuntime *rt = cx->runtime;
for (size_t i = 0; i < rt->compartments.length(); i++) {
JSCompartment *compartment = rt->compartments[i];
for (JSScript *script = (JSScript *)compartment->scripts.next;
&script->links != &compartment->scripts;
script = (JSScript *)script->links.next) {
if (script->jit)
ic::SweepCallICs(cx, script);
}
}
#endif
}
#ifdef JS_METHODJIT_PROFILE_STUBS
void JS_FASTCALL
mjit::ProfileStubCall(VMFrame &f)

Просмотреть файл

@ -204,9 +204,6 @@ TryCompile(JSContext *cx, JSScript *script, JSFunction *fun, JSObject *scopeChai
void
ReleaseScriptCode(JSContext *cx, JSScript *script);
void
SweepCallICs(JSContext *cx);
static inline CompileStatus
CanMethodJIT(JSContext *cx, JSScript *script, JSFunction *fun, JSObject *scopeChain)
{

Просмотреть файл

@ -732,7 +732,7 @@ ic::PurgeMICs(JSContext *cx, JSScript *script)
}
void
ic::SweepCallICs(JSContext *cx, JSScript *script)
ic::SweepCallICs(JSScript *script)
{
for (uint32 i = 0; i < script->jit->nCallICs; i++) {
ic::CallICInfo &ic = script->callICs[i];
@ -742,8 +742,8 @@ ic::SweepCallICs(JSContext *cx, JSScript *script)
* executing a stub generated by a guard on that object. This lets us
* precisely GC call ICs while keeping the identity guard safe.
*/
bool fastFunDead = ic.fastGuardedObject && js_IsAboutToBeFinalized(ic.fastGuardedObject);
bool nativeDead = ic.fastGuardedNative && js_IsAboutToBeFinalized(ic.fastGuardedNative);
bool fastFunDead = ic.fastGuardedObject && IsAboutToBeFinalized(ic.fastGuardedObject);
bool nativeDead = ic.fastGuardedNative && IsAboutToBeFinalized(ic.fastGuardedNative);
if (!fastFunDead && !nativeDead)
continue;

Просмотреть файл

@ -193,7 +193,7 @@ void JS_FASTCALL NativeNew(VMFrame &f, uint32 index);
void JS_FASTCALL NativeCall(VMFrame &f, uint32 index);
void PurgeMICs(JSContext *cx, JSScript *script);
void SweepCallICs(JSContext *cx, JSScript *script);
void SweepCallICs(JSScript *script);
} /* namespace ic */
} /* namespace mjit */

Просмотреть файл

@ -74,11 +74,8 @@ class Assembler : public BaseAssembler
public:
static const JSC::MacroAssembler::Scale JSVAL_SCALE = JSC::MacroAssembler::TimesEight;
Address payloadOf(Address address) {
return address;
}
BaseIndex payloadOf(BaseIndex address) {
template <typename T>
T payloadOf(T address) {
return address;
}
@ -108,62 +105,40 @@ class Assembler : public BaseAssembler
}
}
void loadTypeTag(Address address, RegisterID reg) {
template <typename T>
void loadTypeTag(T address, RegisterID reg) {
load32(tagOf(address), reg);
}
void loadTypeTag(BaseIndex address, RegisterID reg) {
load32(tagOf(address), reg);
}
void storeTypeTag(ImmType imm, Address address) {
template <typename T>
void storeTypeTag(ImmType imm, T address) {
store32(imm, tagOf(address));
}
void storeTypeTag(ImmType imm, BaseIndex address) {
store32(imm, tagOf(address));
}
void storeTypeTag(RegisterID reg, Address address) {
template <typename T>
void storeTypeTag(RegisterID reg, T address) {
store32(reg, tagOf(address));
}
void storeTypeTag(RegisterID reg, BaseIndex address) {
store32(reg, tagOf(address));
}
void loadPayload(Address address, RegisterID reg) {
template <typename T>
void loadPayload(T address, RegisterID reg) {
load32(payloadOf(address), reg);
}
void loadPayload(BaseIndex address, RegisterID reg) {
load32(payloadOf(address), reg);
}
void storePayload(RegisterID reg, Address address) {
template <typename T>
void storePayload(RegisterID reg, T address) {
store32(reg, payloadOf(address));
}
void storePayload(RegisterID reg, BaseIndex address) {
store32(reg, payloadOf(address));
}
void storePayload(Imm32 imm, Address address) {
template <typename T>
void storePayload(Imm32 imm, T address) {
store32(imm, payloadOf(address));
}
/* Loads type first, then payload, returning label after type load. */
Label loadValueAsComponents(Address address, RegisterID type, RegisterID payload) {
JS_ASSERT(address.base != type);
loadTypeTag(address, type);
Label l = label();
loadPayload(address, payload);
return l;
}
Label loadValueAsComponents(BaseIndex address, RegisterID type, RegisterID payload) {
JS_ASSERT(address.base != type);
JS_ASSERT(address.index != type);
template <typename T>
Label loadValueAsComponents(T address, RegisterID type, RegisterID payload) {
JS_ASSERT(!addressUsesRegister(address, type));
loadTypeTag(address, type);
Label l = label();
loadPayload(address, payload);
@ -173,7 +148,8 @@ class Assembler : public BaseAssembler
/*
* Stores type first, then payload.
*/
Label storeValue(const Value &v, Address address) {
template <typename T>
Label storeValue(const Value &v, T address) {
jsval_layout jv;
jv.asBits = JSVAL_BITS(Jsvalify(v));
@ -183,37 +159,20 @@ class Assembler : public BaseAssembler
return l;
}
Label storeValue(const Value &v, BaseIndex address) {
jsval_layout jv;
jv.asBits = JSVAL_BITS(Jsvalify(v));
store32(ImmTag(jv.s.tag), tagOf(address));
Label l = label();
store32(Imm32(jv.s.payload.u32), payloadOf(address));
return l;
}
void storeValueFromComponents(RegisterID type, RegisterID payload, Address address) {
template <typename T>
void storeValueFromComponents(RegisterID type, RegisterID payload, T address) {
storeTypeTag(type, address);
storePayload(payload, address);
}
void storeValueFromComponents(RegisterID type, RegisterID payload, BaseIndex address) {
template <typename T>
void storeValueFromComponents(ImmType type, RegisterID payload, T address) {
storeTypeTag(type, address);
storePayload(payload, address);
}
void storeValueFromComponents(ImmType type, RegisterID payload, Address address) {
storeTypeTag(type, address);
storePayload(payload, address);
}
void storeValueFromComponents(ImmType type, RegisterID payload, BaseIndex address) {
storeTypeTag(type, address);
storePayload(payload, address);
}
Label storeValue(const ValueRemat &vr, Address address) {
template <typename T>
Label storeValue(const ValueRemat &vr, T address) {
if (vr.isConstant) {
return storeValue(Valueify(vr.u.v), address);
} else {

Просмотреть файл

@ -145,6 +145,7 @@ class SetPropCompiler : public PICStubCompiler
JSObject *obj;
JSAtom *atom;
VoidStubUInt32 stub;
int lastStubSecondShapeGuard;
static int32 dslotsLoadOffset(ic::PICInfo &pic) {
#if defined JS_NUNBOX32
@ -209,7 +210,8 @@ class SetPropCompiler : public PICStubCompiler
public:
SetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, JSAtom *atom,
VoidStubUInt32 stub)
: PICStubCompiler("setprop", f, script, pic), obj(obj), atom(atom), stub(stub)
: PICStubCompiler("setprop", f, script, pic), obj(obj), atom(atom), stub(stub),
lastStubSecondShapeGuard(pic.secondShapeGuard)
{ }
bool disable(const char *reason)
@ -291,12 +293,15 @@ class SetPropCompiler : public PICStubCompiler
else
shapeGuardJumpOffset = pic.shapeGuard + inlineShapeJump();
repatcher.relink(shapeGuardJumpOffset, cs);
if (lastStubSecondShapeGuard)
repatcher.relink(lastStubSecondShapeGuard, cs);
}
bool generateStub(uint32 initialShape, const Shape *shape, bool adding)
{
/* Exits to the slow path. */
Vector<Jump, 8> slowExits(f.cx);
Vector<Jump, 8> otherGuards(f.cx);
Assembler masm;
@ -309,8 +314,6 @@ class SetPropCompiler : public PICStubCompiler
Label start = masm.label();
Jump shapeGuard = masm.branch32_force32(Assembler::NotEqual, pic.shapeReg,
Imm32(initialShape));
if (!slowExits.append(shapeGuard))
return false;
#if defined JS_NUNBOX32
DBGLABEL(dbgStubShapeJump);
@ -321,8 +324,7 @@ class SetPropCompiler : public PICStubCompiler
JS_ASSERT_IF(!shape->hasDefaultSetter(), obj->getClass() == &js_CallClass);
Jump rebrand;
Jump skipOver;
MaybeJump skipOver;
if (adding) {
JS_ASSERT(shape->hasSlot());
@ -339,21 +341,16 @@ class SetPropCompiler : public PICStubCompiler
#endif
/* Emit shape guards for the object's prototype chain. */
size_t chainLength = 0;
JSObject *proto = obj->getProto();
RegisterID lastReg = pic.objReg;
while (proto) {
masm.loadPtr(Address(pic.objReg, offsetof(JSObject, proto)), pic.shapeReg);
for (size_t i = 0; i < chainLength; i++)
masm.loadPtr(Address(pic.shapeReg, offsetof(JSObject, proto)), pic.shapeReg);
masm.loadShape(pic.shapeReg, pic.shapeReg);
Jump protoGuard = masm.branch32(Assembler::NotEqual, pic.shapeReg,
Imm32(proto->shape()));
if (!slowExits.append(protoGuard))
masm.loadPtr(Address(lastReg, offsetof(JSObject, proto)), pic.shapeReg);
Jump protoGuard = masm.guardShape(pic.shapeReg, proto->shape());
if (!otherGuards.append(protoGuard))
return false;
proto = proto->getProto();
chainLength++;
lastReg = pic.shapeReg;
}
if (pic.kind == ic::PICInfo::SETMETHOD) {
@ -427,7 +424,9 @@ class SetPropCompiler : public PICStubCompiler
masm.loadTypeTag(address, pic.shapeReg);
Jump skip = masm.testObject(Assembler::NotEqual, pic.shapeReg);
masm.loadPayload(address, pic.shapeReg);
rebrand = masm.testFunction(Assembler::Equal, pic.shapeReg);
Jump rebrand = masm.testFunction(Assembler::Equal, pic.shapeReg);
if (!slowExits.append(rebrand))
return false;
skip.linkTo(masm.label(), &masm);
pic.shapeRegHasBaseShape = false;
}
@ -470,6 +469,17 @@ class SetPropCompiler : public PICStubCompiler
}
Jump done = masm.jump();
// Common all secondary guards into one big exit.
MaybeJump slowExit;
if (otherGuards.length()) {
for (Jump *pj = otherGuards.begin(); pj != otherGuards.end(); ++pj)
pj->linkTo(masm.label(), &masm);
slowExit = masm.jump();
pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
} else {
pic.secondShapeGuard = 0;
}
JSC::ExecutablePool *ep = getExecPool(masm.size());
if (!ep || !pic.execPools.append(ep)) {
if (ep)
@ -479,13 +489,14 @@ class SetPropCompiler : public PICStubCompiler
}
JSC::LinkBuffer buffer(&masm, ep);
buffer.link(shapeGuard, pic.slowPathStart);
if (slowExit.isSet())
buffer.link(slowExit.get(), pic.slowPathStart);
for (Jump *pj = slowExits.begin(); pj != slowExits.end(); ++pj)
buffer.link(*pj, pic.slowPathStart);
buffer.link(done, pic.storeBack);
if (!adding && shape->hasDefaultSetter() && (obj->brandedOrHasMethodBarrier()))
buffer.link(rebrand, pic.slowPathStart);
if (!shape->hasDefaultSetter())
buffer.link(skipOver, pic.storeBack);
if (skipOver.isSet())
buffer.link(skipOver.get(), pic.storeBack);
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generate setprop stub %p %d %d at %p\n",
(void*)&pic,
@ -721,14 +732,14 @@ class GetPropCompiler : public PICStubCompiler
VoidStub stub)
: PICStubCompiler("getprop", f, script, pic), obj(obj), atom(atom),
stub(JS_FUNC_TO_DATA_PTR(void *, stub)),
lastStubSecondShapeGuard(pic.u.get.secondShapeGuard)
lastStubSecondShapeGuard(pic.secondShapeGuard)
{ }
GetPropCompiler(VMFrame &f, JSScript *script, JSObject *obj, ic::PICInfo &pic, JSAtom *atom,
VoidStubUInt32 stub)
: PICStubCompiler("callprop", f, script, pic), obj(obj), atom(atom),
stub(JS_FUNC_TO_DATA_PTR(void *, stub)),
lastStubSecondShapeGuard(pic.u.get.secondShapeGuard)
lastStubSecondShapeGuard(pic.secondShapeGuard)
{ }
static void reset(ic::PICInfo &pic)
@ -1102,10 +1113,10 @@ class GetPropCompiler : public PICStubCompiler
Imm32(holder->shape()));
if (!shapeMismatches.append(j))
return false;
pic.u.get.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
} else {
JS_ASSERT(holder->isNative()); /* Precondition: already checked. */
pic.u.get.secondShapeGuard = 0;
pic.secondShapeGuard = 0;
}
/* Load the value out of the object. */
@ -1289,7 +1300,7 @@ class GetElemCompiler : public PICStubCompiler
VoidStub stub)
: PICStubCompiler("getelem", f, script, pic), obj(obj), id(id),
stub(JS_FUNC_TO_DATA_PTR(void *, stub)),
lastStubSecondShapeGuard(pic.u.get.secondShapeGuard)
lastStubSecondShapeGuard(pic.secondShapeGuard)
{}
static void reset(ic::PICInfo &pic)
@ -1458,10 +1469,10 @@ class GetElemCompiler : public PICStubCompiler
Imm32(holder->shape()));
if (!shapeMismatches.append(j))
return false;
pic.u.get.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
pic.secondShapeGuard = masm.distanceOf(masm.label()) - masm.distanceOf(start);
} else {
JS_ASSERT(holder->isNative()); /* Precondition: already checked. */
pic.u.get.secondShapeGuard = 0;
pic.secondShapeGuard = 0;
}
/* Load the value out of the object. */

Просмотреть файл

@ -220,15 +220,15 @@ struct PICInfo {
RegisterID idReg : 5; // only used in GETELEM PICs.
uint32 idRemat : 20;
bool idNeedsRemat : 1;
// Offset from start of stub to jump target of second shape guard as Nitro
// asm data location. This is 0 if there is only one shape guard in the
// last stub.
int secondShapeGuard;
} get;
ValueRemat vr;
} u;
// Offset from start of stub to jump target of second shape guard as Nitro
// asm data location. This is 0 if there is only one shape guard in the
// last stub.
int secondShapeGuard : 11;
Kind kind : 3;
// True if register R holds the base object shape along exits from the
@ -340,9 +340,9 @@ struct PICInfo {
hit = false;
inlinePathPatched = false;
if (kind == GET || kind == CALL || kind == GETELEM) {
u.get.secondShapeGuard = 0;
u.get.objNeedsRemat = false;
}
secondShapeGuard = 0;
shapeRegHasBaseShape = true;
stubsGenerated = 0;
releasePools();

Просмотреть файл

@ -78,19 +78,13 @@ class Assembler : public BaseAssembler
public:
static const JSC::MacroAssembler::Scale JSVAL_SCALE = JSC::MacroAssembler::TimesEight;
Address payloadOf(Address address) {
template <typename T>
T payloadOf(T address) {
return address;
}
BaseIndex payloadOf(BaseIndex address) {
return address;
}
Address valueOf(Address address) {
return address;
}
BaseIndex valueOf(BaseIndex address) {
template <typename T>
T valueOf(T address) {
return address;
}
@ -105,11 +99,8 @@ class Assembler : public BaseAssembler
loadValueAsComponents(address, type, data);
}
void loadValue(Address address, RegisterID dst) {
loadPtr(address, dst);
}
void loadValue(BaseIndex address, RegisterID dst) {
template <typename T>
void loadValue(T address, RegisterID dst) {
loadPtr(address, dst);
}
@ -122,7 +113,8 @@ class Assembler : public BaseAssembler
}
/* Returns a label after the one Value load. */
Label loadValueAsComponents(Address address, RegisterID type, RegisterID payload) {
template <typename T>
Label loadValueAsComponents(T address, RegisterID type, RegisterID payload) {
loadValue(address, type);
Label l = label();
@ -133,66 +125,36 @@ class Assembler : public BaseAssembler
return l;
}
Label loadValueAsComponents(BaseIndex address, RegisterID type, RegisterID payload) {
loadValue(address, type);
Label l = label();
move(Registers::PayloadMaskReg, payload);
andPtr(type, payload);
xorPtr(payload, type);
return l;
}
void storeValueFromComponents(RegisterID type, RegisterID payload, Address address) {
template <typename T>
void storeValueFromComponents(RegisterID type, RegisterID payload, T address) {
move(type, Registers::ValueReg);
orPtr(payload, Registers::ValueReg);
storeValue(Registers::ValueReg, address);
}
void storeValueFromComponents(RegisterID type, RegisterID payload, BaseIndex address) {
template <typename T>
void storeValueFromComponents(ImmShiftedTag type, RegisterID payload, T address) {
move(type, Registers::ValueReg);
orPtr(payload, Registers::ValueReg);
storeValue(Registers::ValueReg, address);
}
void storeValueFromComponents(ImmShiftedTag type, RegisterID payload, Address address) {
move(type, Registers::ValueReg);
orPtr(payload, Registers::ValueReg);
storeValue(Registers::ValueReg, address);
}
void storeValueFromComponents(ImmShiftedTag type, RegisterID payload, BaseIndex address) {
move(type, Registers::ValueReg);
orPtr(payload, Registers::ValueReg);
storeValue(Registers::ValueReg, address);
}
void loadTypeTag(Address address, RegisterID reg) {
template <typename T>
void loadTypeTag(T address, RegisterID reg) {
loadValue(address, reg);
convertValueToType(reg);
}
void loadTypeTag(BaseIndex address, RegisterID reg) {
loadValue(address, reg);
convertValueToType(reg);
}
void storeTypeTag(ImmShiftedTag imm, Address address) {
template <typename T>
void storeTypeTag(ImmShiftedTag imm, T address) {
loadValue(address, Registers::ValueReg);
convertValueToPayload(Registers::ValueReg);
orPtr(imm, Registers::ValueReg);
storePtr(Registers::ValueReg, valueOf(address));
}
void storeTypeTag(ImmShiftedTag imm, BaseIndex address) {
loadValue(address, Registers::ValueReg);
convertValueToPayload(Registers::ValueReg);
orPtr(imm, Registers::ValueReg);
storePtr(Registers::ValueReg, valueOf(address));
}
void storeTypeTag(RegisterID reg, Address address) {
template <typename T>
void storeTypeTag(RegisterID reg, T address) {
/* The type tag must be stored in shifted format. */
loadValue(address, Registers::ValueReg);
convertValueToPayload(Registers::ValueReg);
@ -200,33 +162,14 @@ class Assembler : public BaseAssembler
storePtr(Registers::ValueReg, valueOf(address));
}
void storeTypeTag(RegisterID reg, BaseIndex address) {
/* The type tag must be stored in shifted format. */
loadValue(address, Registers::ValueReg);
convertValueToPayload(Registers::ValueReg);
orPtr(reg, Registers::ValueReg);
storePtr(Registers::ValueReg, valueOf(address));
}
void loadPayload(Address address, RegisterID reg) {
template <typename T>
void loadPayload(T address, RegisterID reg) {
loadValue(address, reg);
convertValueToPayload(reg);
}
void loadPayload(BaseIndex address, RegisterID reg) {
loadValue(address, reg);
convertValueToPayload(reg);
}
void storePayload(RegisterID reg, Address address) {
/* Not for doubles. */
loadValue(address, Registers::ValueReg);
convertValueToType(Registers::ValueReg);
orPtr(reg, Registers::ValueReg);
storePtr(Registers::ValueReg, valueOf(address));
}
void storePayload(RegisterID reg, BaseIndex address) {
template <typename T>
void storePayload(RegisterID reg, T address) {
/* Not for doubles. */
loadValue(address, Registers::ValueReg);
convertValueToType(Registers::ValueReg);
@ -234,34 +177,27 @@ class Assembler : public BaseAssembler
storePtr(Registers::ValueReg, valueOf(address));
}
void storePayload(Imm64 imm, Address address) {
template <typename T>
void storePayload(Imm64 imm, T address) {
/* Not for doubles. */
storePtr(imm, valueOf(address));
}
void storeValue(RegisterID reg, Address address) {
template <typename T>
void storeValue(RegisterID reg, T address) {
storePtr(reg, valueOf(address));
}
void storeValue(RegisterID reg, BaseIndex address) {
storePtr(reg, valueOf(address));
}
void storeValue(const Value &v, Address address) {
template <typename T>
void storeValue(const Value &v, T address) {
jsval_layout jv;
jv.asBits = JSVAL_BITS(Jsvalify(v));
storePtr(Imm64(jv.asBits), valueOf(address));
}
void storeValue(const Value &v, BaseIndex address) {
jsval_layout jv;
jv.asBits = JSVAL_BITS(Jsvalify(v));
storePtr(Imm64(jv.asBits), valueOf(address));
}
void storeValue(const ValueRemat &vr, Address address) {
template <typename T>
void storeValue(const ValueRemat &vr, T address) {
if (vr.isConstant)
storeValue(Valueify(vr.u.v), address);
else if (vr.u.s.isTypeKnown)
@ -376,8 +312,8 @@ class Assembler : public BaseAssembler
}
};
} /* namespace js */
} /* namespace mjit */
} /* namespace js */
#endif

Просмотреть файл

@ -152,7 +152,7 @@ Recompiler::recompile()
/* If we get this far, the script is live, and we better be safe to re-jit. */
JS_ASSERT(cx->compartment->debugMode);
Compiler c(cx, script, firstFrame->fun(), &firstFrame->scopeChain());
Compiler c(cx, script, firstFrame->maybeFun(), &firstFrame->scopeChain());
if (c.Compile() != Compile_Okay)
return false;

Просмотреть файл

@ -1 +1 @@
3cdd8540979dda01630e0676b273250311e26284
2c8245cb9048cc79ca561ec7a7b014249788247c

Просмотреть файл

@ -1956,6 +1956,7 @@ namespace nanojit
}
break;
case LIR_callv:
case LIR_calli:
CASE64(LIR_callq:)
case LIR_calld:

Просмотреть файл

@ -1483,9 +1483,10 @@ namespace nanojit
live.add(ins->oprnd3(), 0);
break;
case LIR_callv:
case LIR_calli:
case LIR_calld:
CASE64(LIR_callq:)
case LIR_calld:
for (int i = 0, argc = ins->argc(); i < argc; i++)
live.add(ins->arg(i), 0);
break;
@ -1739,9 +1740,10 @@ namespace nanojit
VMPI_snprintf(s, n, "%s", lirNames[op]);
break;
case LIR_callv:
case LIR_calli:
case LIR_calld:
CASE64(LIR_callq:) {
CASE64(LIR_callq:)
case LIR_calld: {
const CallInfo* call = i->callInfo();
int32_t argc = i->argc();
int32_t m = int32_t(n); // Windows doesn't have 'ssize_t'
@ -3394,6 +3396,14 @@ namespace nanojit
return out->insImmD(d);
}
static const char* argtypeNames[] = {
"void", // ARGTYPE_V = 0
"int32_t", // ARGTYPE_I = 1
"uint32_t", // ARGTYPE_UI = 2
"uint64_t", // ARGTYPE_Q = 3
"double" // ARGTYPE_D = 4
};
LIns* ValidateWriter::insCall(const CallInfo *ci, LIns* args0[])
{
ArgType argTypes[MAXARGS];
@ -3402,6 +3412,27 @@ namespace nanojit
LIns* args[MAXARGS]; // in left-to-right order, unlike args0[]
LOpcode op = getCallOpcode(ci);
ArgType retType = ci->returnType();
if ((op == LIR_callv) != (retType == ARGTYPE_V) ||
(op == LIR_calli) != (retType == ARGTYPE_UI ||
retType == ARGTYPE_I) ||
#ifdef NANOJIT_64BIT
(op == LIR_callq) != (retType == ARGTYPE_Q) ||
#endif
(op == LIR_calld) != (retType == ARGTYPE_D)) {
NanoAssertMsgf(0,
"LIR structure error (%s): return type mismatch: opcode %s with %s return type",
whereInPipeline, lirNames[op], argtypeNames[retType]);
}
if (op == LIR_callv && ci->_isPure) {
// Since nobody can use the result of a void call, any pure call
// would just be dead. This is probably a mistake.
NanoAssertMsgf(0,
"LIR structure error (%s): LIR_callv must only be used with nonpure functions.",
whereInPipeline);
}
if (ci->_isPure && ci->_storeAccSet != ACCSET_NONE)
errorAccSet(ci->_name, ci->_storeAccSet, "it should be ACCSET_NONE for pure functions");

Просмотреть файл

@ -509,13 +509,13 @@ namespace nanojit
inline LOpcode getCallOpcode(const CallInfo* ci) {
LOpcode op = LIR_callp;
switch (ci->returnType()) {
case ARGTYPE_V: op = LIR_callp; break;
case ARGTYPE_V: op = LIR_callv; break;
case ARGTYPE_I:
case ARGTYPE_UI: op = LIR_calli; break;
case ARGTYPE_D: op = LIR_calld; break;
#ifdef NANOJIT_64BIT
case ARGTYPE_Q: op = LIR_callq; break;
#endif
case ARGTYPE_D: op = LIR_calld; break;
default: NanoAssert(0); break;
}
return op;
@ -927,7 +927,8 @@ namespace nanojit
return isCmpOpcode(opcode());
}
bool isCall() const {
return isop(LIR_calli) ||
return isop(LIR_callv) ||
isop(LIR_calli) ||
#if defined NANOJIT_64BIT
isop(LIR_callq) ||
#endif

Просмотреть файл

@ -161,11 +161,10 @@ OP_UN(32)
//---------------------------------------------------------------------------
// Calls
//---------------------------------------------------------------------------
OP___(calli, 33, C, I, -1) // call subroutine that returns an int
OP_64(callq, 34, C, Q, -1) // call subroutine that returns a quad
OP___(calld, 35, C, D, -1) // call subroutine that returns a double
OP_UN(36)
OP___(callv, 33, C, V, -1) // call subroutine that returns void
OP___(calli, 34, C, I, -1) // call subroutine that returns an int
OP_64(callq, 35, C, Q, -1) // call subroutine that returns a quad
OP___(calld, 36, C, D, -1) // call subroutine that returns a double
//---------------------------------------------------------------------------
// Branches and labels
@ -210,7 +209,9 @@ OP_UN(52)
// Comparisons
//---------------------------------------------------------------------------
// Within each size group, order must be preserved so that, except for eq*, (op
// All comparisons return an int: 0 on failure and 1 on success.
//
// Within each type group, order must be preserved so that, except for eq*, (op
// ^ 1) gives the opposite one (eg. lt ^ 1 == gt). eq* must have odd numbers
// for this to work. They must also remain contiguous so that opcode range
// checking works correctly. Static assertions in LIR.h check these

Просмотреть файл

@ -818,7 +818,7 @@ Assembler::asm_call(LIns* ins)
* used here with the ultimate VFP register, and not R0/R1, which
* potentially allows for R0/R1 to get corrupted as described.
*/
} else {
} else if (!ins->isop(LIR_callv)) {
prepareResultReg(ins, rmask(retRegs[0]));
// Immediately free the resources as we need to re-use the register for
// the arguments.
@ -837,7 +837,7 @@ Assembler::asm_call(LIns* ins)
// If we aren't using VFP, assert that the LIR operation is an integer
// function call.
NanoAssert(ARM_VFP || ins->isop(LIR_calli));
NanoAssert(ARM_VFP || ins->isop(LIR_callv) || ins->isop(LIR_calli));
// If we're using VFP, and the return type is a double, it'll come back in
// R0/R1. We need to either place it in the result fp reg, or store it.

Просмотреть файл

@ -1712,27 +1712,28 @@ namespace nanojit
void
Assembler::asm_call(LIns* ins)
{
Register rr;
LOpcode op = ins->opcode();
if (!ins->isop(LIR_callv)) {
Register rr;
LOpcode op = ins->opcode();
switch (op) {
case LIR_calld:
NanoAssert(cpu_has_fpu);
rr = FV0;
break;
case LIR_calli:
rr = retRegs[0];
break;
default:
BADOPCODE(op);
return;
switch (op) {
case LIR_calli:
rr = retRegs[0];
break;
case LIR_calld:
NanoAssert(cpu_has_fpu);
rr = FV0;
break;
default:
BADOPCODE(op);
return;
}
deprecated_prepResultReg(ins, rmask(rr));
}
deprecated_prepResultReg(ins, rmask(rr));
// Do this after we've handled the call result, so we don't
// force the call result to be spilled unnecessarily.
evictScratchRegsExcept(0);
const CallInfo* ci = ins->callInfo();

Некоторые файлы не были показаны из-за слишком большого количества измененных файлов Показать больше