зеркало из https://github.com/mozilla/gecko-dev.git
Backed out 13 changesets (bug 1724031) for causing dt failures in js/src/gc/Cell.h
CLOSED TREE Backed out changeset 3a79fb5a7a07 (bug 1724031) Backed out changeset 7991bfb259ae (bug 1724031) Backed out changeset 5673e5c4f996 (bug 1724031) Backed out changeset 75f1f67429e1 (bug 1724031) Backed out changeset 3327c1d290b7 (bug 1724031) Backed out changeset ae06621c5728 (bug 1724031) Backed out changeset bb4c2384f676 (bug 1724031) Backed out changeset 1a4a5c11b9f1 (bug 1724031) Backed out changeset 58dd7d7a733e (bug 1724031) Backed out changeset e4d4251e86a3 (bug 1724031) Backed out changeset a7f3f8eb7ac0 (bug 1724031) Backed out changeset d0fe0b0b2b84 (bug 1724031) Backed out changeset 0cca6901539d (bug 1724031)
This commit is contained in:
Родитель
31b4b6a762
Коммит
c8ea016b87
|
@ -33,9 +33,9 @@ namespace js {
|
|||
|
||||
class PropertyResult;
|
||||
|
||||
// These are equal to js::FunctionClass / js::ExtendedFunctionClass.
|
||||
// This is equal to JSFunction::class_. Use it in places where you don't want
|
||||
// to #include jsfun.h.
|
||||
extern JS_PUBLIC_DATA const JSClass* const FunctionClassPtr;
|
||||
extern JS_PUBLIC_DATA const JSClass* const FunctionExtendedClassPtr;
|
||||
|
||||
} // namespace js
|
||||
|
||||
|
@ -686,9 +686,7 @@ struct alignas(js::gc::JSClassAlignBytes) JSClass {
|
|||
|
||||
bool emulatesUndefined() const { return flags & JSCLASS_EMULATES_UNDEFINED; }
|
||||
|
||||
bool isJSFunction() const {
|
||||
return this == js::FunctionClassPtr || this == js::FunctionExtendedClassPtr;
|
||||
}
|
||||
bool isJSFunction() const { return this == js::FunctionClassPtr; }
|
||||
|
||||
bool nonProxyCallable() const {
|
||||
MOZ_ASSERT(!isProxyObject());
|
||||
|
|
|
@ -43,7 +43,7 @@
|
|||
#define JS_FOR_PROTOTYPES_(REAL, IMAGINARY, REAL_IF_INTL) \
|
||||
IMAGINARY(Null, dummy) \
|
||||
REAL(Object, OCLASP(Plain)) \
|
||||
REAL(Function, &FunctionClass) \
|
||||
REAL(Function, &JSFunction::class_) \
|
||||
REAL(Array, OCLASP(Array)) \
|
||||
REAL(Boolean, OCLASP(Boolean)) \
|
||||
REAL(JSON, CLASP(JSON)) \
|
||||
|
|
|
@ -15,7 +15,6 @@
|
|||
|
||||
#include "js/CallArgs.h" // JSNative
|
||||
#include "js/shadow/Object.h" // JS::shadow::Object
|
||||
#include "js/Value.h" // JS::Value
|
||||
|
||||
class JS_PUBLIC_API JSFunction;
|
||||
class JSJitInfo;
|
||||
|
@ -24,26 +23,24 @@ namespace JS {
|
|||
|
||||
namespace shadow {
|
||||
|
||||
struct Function : shadow::Object {
|
||||
enum {
|
||||
FlagsAndArgCountSlot,
|
||||
NativeFuncOrInterpretedEnvSlot,
|
||||
NativeJitInfoOrInterpretedScriptSlot,
|
||||
AtomSlot
|
||||
};
|
||||
uint32_t flagsAndArgCount() const {
|
||||
return fixedSlots()[FlagsAndArgCountSlot].toPrivateUint32();
|
||||
}
|
||||
|
||||
void* jitInfoOrScript() const {
|
||||
return fixedSlots()[NativeJitInfoOrInterpretedScriptSlot].toPrivate();
|
||||
}
|
||||
|
||||
void setJitInfoOrScript(void* ptr) {
|
||||
fixedSlots()[NativeJitInfoOrInterpretedScriptSlot] = JS::PrivateValue(ptr);
|
||||
}
|
||||
struct Function {
|
||||
shadow::Object base;
|
||||
uint16_t nargs;
|
||||
uint16_t flags;
|
||||
/* Used only for natives */
|
||||
JSNative native;
|
||||
const JSJitInfo* jitinfo;
|
||||
void* _1;
|
||||
};
|
||||
|
||||
inline Function* AsShadowFunction(JSFunction* fun) {
|
||||
return reinterpret_cast<Function*>(fun);
|
||||
}
|
||||
|
||||
inline const Function* AsShadowFunction(const JSFunction* fun) {
|
||||
return reinterpret_cast<const Function*>(fun);
|
||||
}
|
||||
|
||||
} // namespace shadow
|
||||
|
||||
} // namespace JS
|
||||
|
|
|
@ -567,7 +567,7 @@ static MOZ_ALWAYS_INLINE JSString* GetBuiltinTagFast(JSObject* obj,
|
|||
return cx->names().objectArray;
|
||||
}
|
||||
|
||||
if (clasp->isJSFunction()) {
|
||||
if (clasp == &JSFunction::class_) {
|
||||
return cx->names().objectFunction;
|
||||
}
|
||||
|
||||
|
|
|
@ -989,6 +989,10 @@ static JSFunction* CreateFunctionFast(JSContext* cx,
|
|||
fun->initAtom(atom);
|
||||
}
|
||||
|
||||
if (flags.isExtended()) {
|
||||
fun->initializeExtended();
|
||||
}
|
||||
|
||||
return fun;
|
||||
}
|
||||
|
||||
|
@ -1102,13 +1106,6 @@ static bool InstantiateModuleObject(JSContext* cx,
|
|||
return stencil.moduleMetadata->initModule(cx, atomCache, module);
|
||||
}
|
||||
|
||||
static Shape* GetFunctionShape(JSContext* cx, const JSClass* clasp,
|
||||
HandleObject proto, gc::AllocKind kind) {
|
||||
size_t nfixed = GetGCKindSlots(kind);
|
||||
return SharedShape::getInitialShape(
|
||||
cx, clasp, cx->realm(), TaggedProto(proto), nfixed, ObjectFlags());
|
||||
}
|
||||
|
||||
// Instantiate JSFunctions for each FunctionBox.
|
||||
static bool InstantiateFunctions(JSContext* cx, CompilationAtomCache& atomCache,
|
||||
const CompilationStencil& stencil,
|
||||
|
@ -1128,17 +1125,11 @@ static bool InstantiateFunctions(JSContext* cx, CompilationAtomCache& atomCache,
|
|||
if (!proto) {
|
||||
return false;
|
||||
}
|
||||
|
||||
RootedShape functionShape(
|
||||
cx, GetFunctionShape(cx, &FunctionClass, proto, gc::AllocKind::FUNCTION));
|
||||
if (!functionShape) {
|
||||
return false;
|
||||
}
|
||||
|
||||
RootedShape extendedShape(cx,
|
||||
GetFunctionShape(cx, &ExtendedFunctionClass, proto,
|
||||
gc::AllocKind::FUNCTION_EXTENDED));
|
||||
if (!extendedShape) {
|
||||
RootedShape shape(
|
||||
cx, SharedShape::getInitialShape(cx, &JSFunction::class_, cx->realm(),
|
||||
TaggedProto(proto),
|
||||
/* nfixed = */ 0, ObjectFlags()));
|
||||
if (!shape) {
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1156,18 +1147,11 @@ static bool InstantiateFunctions(JSContext* cx, CompilationAtomCache& atomCache,
|
|||
!scriptExtra.immutableFlags.hasFlag(ImmutableFlags::IsGenerator) &&
|
||||
!scriptStencil.functionFlags.isAsmJSNative();
|
||||
|
||||
JSFunction* fun;
|
||||
if (useFastPath) {
|
||||
HandleShape shape = scriptStencil.functionFlags.isExtended()
|
||||
? extendedShape
|
||||
: functionShape;
|
||||
fun =
|
||||
CreateFunctionFast(cx, atomCache, shape, scriptStencil, scriptExtra);
|
||||
} else {
|
||||
fun = CreateFunction(cx, atomCache, stencil, scriptStencil, scriptExtra,
|
||||
index);
|
||||
}
|
||||
|
||||
JSFunction* fun = useFastPath
|
||||
? CreateFunctionFast(cx, atomCache, shape,
|
||||
scriptStencil, scriptExtra)
|
||||
: CreateFunction(cx, atomCache, stencil,
|
||||
scriptStencil, scriptExtra, index);
|
||||
if (!fun) {
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -54,8 +54,8 @@ namespace gc {
|
|||
// clang-format off
|
||||
#define FOR_EACH_OBJECT_ALLOCKIND(D) \
|
||||
/* AllocKind TraceKind TypeName SizedType BGFinal Nursery Compact */ \
|
||||
D(FUNCTION, Object, JSObject, JSObject_Slots4, true, true, true) \
|
||||
D(FUNCTION_EXTENDED, Object, JSObject, JSObject_Slots6, true, true, true) \
|
||||
D(FUNCTION, Object, JSObject, JSFunction, true, true, true) \
|
||||
D(FUNCTION_EXTENDED, Object, JSObject, FunctionExtended, true, true, true) \
|
||||
D(OBJECT0, Object, JSObject, JSObject_Slots0, false, false, true) \
|
||||
D(OBJECT0_BACKGROUND, Object, JSObject, JSObject_Slots0, true, true, true) \
|
||||
D(OBJECT2, Object, JSObject, JSObject_Slots2, false, false, true) \
|
||||
|
|
|
@ -514,12 +514,12 @@ class PreBarriered : public WriteBarriered<T> {
|
|||
|
||||
DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(PreBarriered, T);
|
||||
|
||||
private:
|
||||
void set(const T& v) {
|
||||
AssertTargetIsNotGray(v);
|
||||
setUnchecked(v);
|
||||
}
|
||||
|
||||
private:
|
||||
void setUnchecked(const T& v) {
|
||||
this->pre();
|
||||
this->value = v;
|
||||
|
@ -594,12 +594,12 @@ class GCPtr : public WriteBarriered<T> {
|
|||
|
||||
DECLARE_POINTER_ASSIGN_OPS(GCPtr, T);
|
||||
|
||||
private:
|
||||
void set(const T& v) {
|
||||
AssertTargetIsNotGray(v);
|
||||
setUnchecked(v);
|
||||
}
|
||||
|
||||
private:
|
||||
void setUnchecked(const T& v) {
|
||||
this->pre();
|
||||
T tmp = this->value;
|
||||
|
@ -688,17 +688,17 @@ class HeapPtr : public WriteBarriered<T> {
|
|||
|
||||
DECLARE_POINTER_ASSIGN_AND_MOVE_OPS(HeapPtr, T);
|
||||
|
||||
void set(const T& v) {
|
||||
AssertTargetIsNotGray(v);
|
||||
setUnchecked(v);
|
||||
}
|
||||
|
||||
/* Make this friend so it can access pre() and post(). */
|
||||
template <class T1, class T2>
|
||||
friend inline void BarrieredSetPair(Zone* zone, HeapPtr<T1*>& v1, T1* val1,
|
||||
HeapPtr<T2*>& v2, T2* val2);
|
||||
|
||||
protected:
|
||||
void set(const T& v) {
|
||||
AssertTargetIsNotGray(v);
|
||||
setUnchecked(v);
|
||||
}
|
||||
|
||||
void setUnchecked(const T& v) {
|
||||
this->pre();
|
||||
postBarrieredSet(v);
|
||||
|
|
|
@ -32,9 +32,12 @@ static inline AllocKind GetGCObjectKind(size_t numSlots) {
|
|||
}
|
||||
|
||||
static inline AllocKind GetGCObjectKind(const JSClass* clasp) {
|
||||
if (clasp == FunctionClassPtr) {
|
||||
return AllocKind::FUNCTION;
|
||||
}
|
||||
|
||||
MOZ_ASSERT(!clasp->isProxyObject(),
|
||||
"Proxies should use GetProxyGCObjectKind");
|
||||
MOZ_ASSERT(!clasp->isJSFunction());
|
||||
|
||||
uint32_t nslots = JSCLASS_RESERVED_SLOTS(clasp);
|
||||
return GetGCObjectKind(nslots);
|
||||
|
@ -88,18 +91,17 @@ static inline size_t GetGCKindSlots(AllocKind thingKind) {
|
|||
// Using a switch in hopes that thingKind will usually be a compile-time
|
||||
// constant.
|
||||
switch (thingKind) {
|
||||
case AllocKind::FUNCTION:
|
||||
case AllocKind::OBJECT0:
|
||||
case AllocKind::OBJECT0_BACKGROUND:
|
||||
return 0;
|
||||
case AllocKind::FUNCTION_EXTENDED:
|
||||
case AllocKind::OBJECT2:
|
||||
case AllocKind::OBJECT2_BACKGROUND:
|
||||
return 2;
|
||||
case AllocKind::FUNCTION:
|
||||
case AllocKind::OBJECT4:
|
||||
case AllocKind::OBJECT4_BACKGROUND:
|
||||
return 4;
|
||||
case AllocKind::FUNCTION_EXTENDED:
|
||||
return 6;
|
||||
case AllocKind::OBJECT8:
|
||||
case AllocKind::OBJECT8_BACKGROUND:
|
||||
return 8;
|
||||
|
@ -114,6 +116,18 @@ static inline size_t GetGCKindSlots(AllocKind thingKind) {
|
|||
}
|
||||
}
|
||||
|
||||
static inline size_t GetGCKindSlots(AllocKind thingKind, const JSClass* clasp) {
|
||||
/*
|
||||
* Functions have a larger alloc kind than AllocKind::OBJECT to reserve
|
||||
* space for the extra fields in JSFunction, but have no fixed slots.
|
||||
*/
|
||||
if (clasp == FunctionClassPtr) {
|
||||
return 0;
|
||||
}
|
||||
|
||||
return GetGCKindSlots(thingKind);
|
||||
}
|
||||
|
||||
static inline size_t GetGCKindBytes(AllocKind thingKind) {
|
||||
return sizeof(JSObject_Slots0) + GetGCKindSlots(thingKind) * sizeof(Value);
|
||||
}
|
||||
|
|
|
@ -7,30 +7,24 @@
|
|||
import re
|
||||
import gdb
|
||||
import mozilla.prettyprinters as prettyprinters
|
||||
from mozilla.jsval import JSValue
|
||||
from mozilla.prettyprinters import ptr_pretty_printer, ref_pretty_printer
|
||||
from mozilla.Root import deref
|
||||
from mozilla.CellHeader import get_header_ptr
|
||||
|
||||
prettyprinters.clear_module_printers(__name__)
|
||||
|
||||
|
||||
class JSObjectTypeCache(object):
|
||||
def __init__(self):
|
||||
def __init__(self, value, cache):
|
||||
object_flag = gdb.lookup_type("js::ObjectFlag")
|
||||
self.objectflag_IsUsedAsPrototype = prettyprinters.enum_value(
|
||||
object_flag, "js::ObjectFlag::IsUsedAsPrototype"
|
||||
)
|
||||
self.value_ptr_t = gdb.lookup_type("JS::Value").pointer()
|
||||
self.func_ptr_t = gdb.lookup_type("JSFunction").pointer()
|
||||
self.func_ptr_type = gdb.lookup_type("JSFunction").pointer()
|
||||
self.class_NON_NATIVE = gdb.parse_and_eval("JSClass::NON_NATIVE")
|
||||
self.BaseShape_ptr_t = gdb.lookup_type("js::BaseShape").pointer()
|
||||
self.Shape_ptr_t = gdb.lookup_type("js::Shape").pointer()
|
||||
self.JSClass_ptr_t = gdb.lookup_type("JSClass").pointer()
|
||||
self.JSScript_ptr_t = gdb.lookup_type("JSScript").pointer()
|
||||
self.JSFunction_AtomSlot = gdb.parse_and_eval("JSFunction::AtomSlot")
|
||||
self.JSFunction_NativeJitInfoOrInterpretedScriptSlot = gdb.parse_and_eval(
|
||||
"JSFunction::NativeJitInfoOrInterpretedScriptSlot"
|
||||
)
|
||||
|
||||
|
||||
# There should be no need to register this for JSFunction as well, since we
|
||||
|
@ -46,7 +40,7 @@ class JSObjectPtrOrRef(prettyprinters.Pointer):
|
|||
def __init__(self, value, cache):
|
||||
super(JSObjectPtrOrRef, self).__init__(value, cache)
|
||||
if not cache.mod_JSObject:
|
||||
cache.mod_JSObject = JSObjectTypeCache()
|
||||
cache.mod_JSObject = JSObjectTypeCache(value, cache)
|
||||
self.otc = cache.mod_JSObject
|
||||
|
||||
def summary(self):
|
||||
|
@ -73,7 +67,9 @@ class JSObjectPtrOrRef(prettyprinters.Pointer):
|
|||
concrete_type = function.type.strip_typedefs()
|
||||
if concrete_type.code == gdb.TYPE_CODE_REF:
|
||||
function = function.address
|
||||
name = get_function_name(function, self.cache)
|
||||
function = function.cast(self.otc.func_ptr_type)
|
||||
atom = deref(function["atom_"])
|
||||
name = str(atom) if atom else "<unnamed>"
|
||||
return "[object {}{}]{}".format(
|
||||
class_name,
|
||||
" " + name if name else "",
|
||||
|
@ -81,37 +77,6 @@ class JSObjectPtrOrRef(prettyprinters.Pointer):
|
|||
)
|
||||
|
||||
|
||||
def get_function_name(function, cache):
|
||||
if not cache.mod_JSObject:
|
||||
cache.mod_JSObject = JSObjectTypeCache()
|
||||
otc = cache.mod_JSObject
|
||||
|
||||
function = function.cast(otc.func_ptr_t)
|
||||
fixed_slots = (function + 1).cast(otc.value_ptr_t)
|
||||
atom_value = JSValue(fixed_slots[otc.JSFunction_AtomSlot], cache)
|
||||
|
||||
if atom_value.is_undefined():
|
||||
return "<unnamed>"
|
||||
|
||||
return str(atom_value.get_string())
|
||||
|
||||
|
||||
def get_function_script(function, cache):
|
||||
if not cache.mod_JSObject:
|
||||
cache.mod_JSObject = JSObjectTypeCache()
|
||||
otc = cache.mod_JSObject
|
||||
|
||||
function = function.cast(otc.func_ptr_t)
|
||||
fixed_slots = (function + 1).cast(otc.value_ptr_t)
|
||||
slot = otc.JSFunction_NativeJitInfoOrInterpretedScriptSlot
|
||||
script_value = JSValue(fixed_slots[slot], cache)
|
||||
|
||||
if script_value.is_undefined():
|
||||
return 0
|
||||
|
||||
return script_value.get_private()
|
||||
|
||||
|
||||
@ref_pretty_printer("JSObject")
|
||||
def JSObjectRef(value, cache):
|
||||
return JSObjectPtrOrRef(value, cache)
|
||||
|
|
|
@ -230,14 +230,3 @@ class JSValue(object):
|
|||
else:
|
||||
value = "unrecognized!"
|
||||
return "$JS::Value(%s)" % (value,)
|
||||
|
||||
def is_undefined(self):
|
||||
return self.box.tag() == self.jtc.UNDEFINED
|
||||
|
||||
def get_string(self):
|
||||
assert self.box.tag() == self.jtc.STRING
|
||||
return self.box.as_address().cast(self.cache.JSString_ptr_t)
|
||||
|
||||
def get_private(self):
|
||||
assert self.box.tag() == self.jtc.DOUBLE
|
||||
return self.box.asBits
|
||||
|
|
|
@ -7,8 +7,6 @@
|
|||
import gdb
|
||||
import gdb.types
|
||||
from gdb.FrameDecorator import FrameDecorator
|
||||
from mozilla.JSObject import get_function_name, get_function_script
|
||||
from mozilla.prettyprinters import TypeCache
|
||||
import platform
|
||||
|
||||
# For ease of use in Python 2, we use "long" instead of "int"
|
||||
|
@ -67,7 +65,7 @@ class UnwinderTypeCacheFrameType(object):
|
|||
return self.tc.__getattr__("FrameType::" + name)
|
||||
|
||||
|
||||
class UnwinderTypeCache(TypeCache):
|
||||
class UnwinderTypeCache(object):
|
||||
# All types and symbols that we need are attached to an object that we
|
||||
# can dispose of as needed.
|
||||
|
||||
|
@ -75,7 +73,6 @@ class UnwinderTypeCache(TypeCache):
|
|||
self.d = None
|
||||
self.frame_enum_names = {}
|
||||
self.frame_class_types = {}
|
||||
super(UnwinderTypeCache, self).__init__(None)
|
||||
|
||||
# We take this bizarre approach to defer trying to look up any
|
||||
# symbols until absolutely needed. Without this, the loading
|
||||
|
@ -85,8 +82,6 @@ class UnwinderTypeCache(TypeCache):
|
|||
self.initialize()
|
||||
if name == "frame_type":
|
||||
return UnwinderTypeCacheFrameType(self)
|
||||
if name not in self.d:
|
||||
return None
|
||||
return self.d[name]
|
||||
|
||||
def value(self, name):
|
||||
|
@ -118,6 +113,7 @@ class UnwinderTypeCache(TypeCache):
|
|||
"CalleeToken_FunctionConstructing"
|
||||
)
|
||||
self.d["CalleeToken_Script"] = self.jit_value("CalleeToken_Script")
|
||||
self.d["JSFunction"] = gdb.lookup_type("JSFunction").pointer()
|
||||
self.d["JSScript"] = gdb.lookup_type("JSScript").pointer()
|
||||
self.d["Value"] = gdb.lookup_type("JS::Value")
|
||||
|
||||
|
@ -178,9 +174,14 @@ class JitFrameDecorator(FrameDecorator):
|
|||
tag == self.cache.CalleeToken_Function
|
||||
or tag == self.cache.CalleeToken_FunctionConstructing
|
||||
):
|
||||
value = gdb.Value(calleetoken)
|
||||
function = get_function_name(value, self.cache)
|
||||
script = get_function_script(value, self.cache)
|
||||
fptr = gdb.Value(calleetoken).cast(self.cache.JSFunction)
|
||||
try:
|
||||
atom = fptr["atom_"]
|
||||
if atom:
|
||||
function = str(atom)
|
||||
except gdb.MemoryError:
|
||||
function = "(could not read function name)"
|
||||
script = fptr["u"]["scripted"]["s"]["script_"]
|
||||
elif tag == self.cache.CalleeToken_Script:
|
||||
script = gdb.Value(calleetoken).cast(self.cache.JSScript)
|
||||
return {"function": function, "script": script}
|
||||
|
|
|
@ -76,7 +76,7 @@ assertEq(tByteSize([1, 2, 3, 4, 5, 6, 7]), s(112, 120));
|
|||
assertEq(tByteSize([1, 2, 3, 4, 5, 6, 7, 8]), s(112, 120));
|
||||
|
||||
// Various forms of functions.
|
||||
assertEq(tByteSize(function () {}), s(48, 56));
|
||||
assertEq(tByteSize(function () {}.bind()), s(64, 72));
|
||||
assertEq(tByteSize(() => 1), s(64, 72));
|
||||
assertEq(tByteSize(Math.sin), s(48, 56));
|
||||
assertEq(tByteSize(function () {}), s(32, 56));
|
||||
assertEq(tByteSize(function () {}.bind()), s(48, 72));
|
||||
assertEq(tByteSize(() => 1), s(48, 72));
|
||||
assertEq(tByteSize(Math.sin), s(32, 56));
|
||||
|
|
|
@ -31,7 +31,7 @@ print(JSON.stringify(findPath(c, c.obj)));
|
|||
function f(x) { return function g(y) { return x+y; }; }
|
||||
var o = {}
|
||||
var gc = f(o);
|
||||
Match.Pattern([{node: gc, edge: "**UNKNOWN SLOT 1**"},
|
||||
Match.Pattern([{node: gc, edge: "fun_environment"},
|
||||
{node: Match.Pattern.ANY, edge: "x"}])
|
||||
.assert(findPath(gc, o));
|
||||
print(JSON.stringify(findPath(gc, o)));
|
||||
|
|
|
@ -332,8 +332,7 @@ bool BaselineCacheIRCompiler::emitGuardFunctionScript(
|
|||
}
|
||||
|
||||
Address addr(stubAddress(expectedOffset));
|
||||
masm.loadPrivate(Address(fun, JSFunction::offsetOfJitInfoOrScript()),
|
||||
scratch);
|
||||
masm.loadPtr(Address(fun, JSFunction::offsetOfBaseScript()), scratch);
|
||||
masm.branchPtr(Assembler::NotEqual, addr, scratch, failure->label());
|
||||
return true;
|
||||
}
|
||||
|
@ -498,8 +497,7 @@ bool BaselineCacheIRCompiler::emitCallScriptedGetterShared(
|
|||
|
||||
// Handle arguments underflow.
|
||||
Label noUnderflow;
|
||||
masm.load32(Address(callee, JSFunction::offsetOfFlagsAndArgCount()), callee);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), callee);
|
||||
masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), callee);
|
||||
masm.branch32(Assembler::Equal, callee, Imm32(0), &noUnderflow);
|
||||
|
||||
// Call the arguments rectifier.
|
||||
|
@ -1568,9 +1566,7 @@ bool BaselineCacheIRCompiler::emitCallScriptedSetterShared(
|
|||
// can be used as scratch.
|
||||
Label noUnderflow;
|
||||
Register scratch2 = val.scratchReg();
|
||||
masm.load32(Address(callee, JSFunction::offsetOfFlagsAndArgCount()),
|
||||
scratch2);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), scratch2);
|
||||
masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch2);
|
||||
masm.branch32(Assembler::BelowOrEqual, scratch2, Imm32(1), &noUnderflow);
|
||||
|
||||
// Call the arguments rectifier.
|
||||
|
@ -2569,15 +2565,12 @@ bool BaselineCacheIRCompiler::emitCallNativeShared(
|
|||
masm.callWithABI(redirectedAddr);
|
||||
#else
|
||||
if (*ignoresReturnValue) {
|
||||
masm.loadPrivate(
|
||||
Address(calleeReg, JSFunction::offsetOfJitInfoOrScript()),
|
||||
calleeReg);
|
||||
masm.loadPtr(Address(calleeReg, JSFunction::offsetOfJitInfo()),
|
||||
calleeReg);
|
||||
masm.callWithABI(
|
||||
Address(calleeReg, JSJitInfo::offsetOfIgnoresReturnValueNative()));
|
||||
} else {
|
||||
// This depends on the native function pointer being stored unchanged as
|
||||
// a PrivateValue.
|
||||
masm.callWithABI(Address(calleeReg, JSFunction::offsetOfNativeOrEnv()));
|
||||
masm.callWithABI(Address(calleeReg, JSFunction::offsetOfNative()));
|
||||
}
|
||||
#endif
|
||||
} break;
|
||||
|
@ -2853,9 +2846,8 @@ bool BaselineCacheIRCompiler::emitCallScriptedFunction(ObjOperandId calleeId,
|
|||
|
||||
// Handle arguments underflow.
|
||||
Label noUnderflow;
|
||||
masm.load32(Address(calleeReg, JSFunction::offsetOfFlagsAndArgCount()),
|
||||
calleeReg);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), calleeReg);
|
||||
masm.load16ZeroExtend(Address(calleeReg, JSFunction::offsetOfNargs()),
|
||||
calleeReg);
|
||||
masm.branch32(Assembler::AboveOrEqual, argcReg, calleeReg, &noUnderflow);
|
||||
{
|
||||
// Call the arguments rectifier.
|
||||
|
@ -2965,9 +2957,8 @@ bool BaselineCacheIRCompiler::emitCallInlinedFunction(ObjOperandId calleeId,
|
|||
|
||||
// Handle arguments underflow.
|
||||
Label noUnderflow;
|
||||
masm.load32(Address(calleeReg, JSFunction::offsetOfFlagsAndArgCount()),
|
||||
calleeReg);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), calleeReg);
|
||||
masm.load16ZeroExtend(Address(calleeReg, JSFunction::offsetOfNargs()),
|
||||
calleeReg);
|
||||
masm.branch32(Assembler::AboveOrEqual, argcReg, calleeReg, &noUnderflow);
|
||||
|
||||
// Call the trial-inlining arguments rectifier.
|
||||
|
|
|
@ -1140,8 +1140,7 @@ void BaselineCompilerCodeGen::emitInitFrameFields(Register nonFunctionEnv) {
|
|||
masm.store32(Imm32(0), frame.addressOfFlags());
|
||||
if (handler.function()) {
|
||||
masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), scratch);
|
||||
masm.unboxObject(Address(scratch, JSFunction::offsetOfEnvironment()),
|
||||
scratch);
|
||||
masm.loadPtr(Address(scratch, JSFunction::offsetOfEnvironment()), scratch);
|
||||
masm.storePtr(scratch, frame.addressOfEnvironmentChain());
|
||||
} else {
|
||||
masm.storePtr(nonFunctionEnv, frame.addressOfEnvironmentChain());
|
||||
|
@ -1188,11 +1187,10 @@ void BaselineInterpreterCodeGen::emitInitFrameFields(Register nonFunctionEnv) {
|
|||
{
|
||||
// CalleeToken_Function or CalleeToken_FunctionConstructing.
|
||||
masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), scratch1);
|
||||
masm.unboxObject(Address(scratch1, JSFunction::offsetOfEnvironment()),
|
||||
scratch2);
|
||||
masm.loadPtr(Address(scratch1, JSFunction::offsetOfEnvironment()),
|
||||
scratch2);
|
||||
masm.storePtr(scratch2, frame.addressOfEnvironmentChain());
|
||||
masm.loadPrivate(Address(scratch1, JSFunction::offsetOfJitInfoOrScript()),
|
||||
scratch1);
|
||||
masm.loadPtr(Address(scratch1, JSFunction::offsetOfScript()), scratch1);
|
||||
masm.jump(&done);
|
||||
}
|
||||
masm.bind(¬Function);
|
||||
|
@ -4240,8 +4238,7 @@ void BaselineCompilerCodeGen::loadNumFormalArguments(Register dest) {
|
|||
template <>
|
||||
void BaselineInterpreterCodeGen::loadNumFormalArguments(Register dest) {
|
||||
masm.loadFunctionFromCalleeToken(frame.addressOfCalleeToken(), dest);
|
||||
masm.load32(Address(dest, JSFunction::offsetOfFlagsAndArgCount()), dest);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), dest);
|
||||
masm.load16ZeroExtend(Address(dest, JSFunction::offsetOfNargs()), dest);
|
||||
}
|
||||
|
||||
template <typename Handler>
|
||||
|
@ -5550,8 +5547,8 @@ bool BaselineCodeGen<Handler>::emit_SuperFun() {
|
|||
|
||||
#ifdef DEBUG
|
||||
Label classCheckDone;
|
||||
masm.branchTestObjIsFunction(Assembler::Equal, callee, scratch, callee,
|
||||
&classCheckDone);
|
||||
masm.branchTestObjClass(Assembler::Equal, callee, &JSFunction::class_,
|
||||
scratch, callee, &classCheckDone);
|
||||
masm.assumeUnreachable("Unexpected non-JSFunction callee in JSOp::SuperFun");
|
||||
masm.bind(&classCheckDone);
|
||||
#endif
|
||||
|
@ -5896,8 +5893,7 @@ bool BaselineCodeGen<Handler>::emit_Resume() {
|
|||
// script does not have a JitScript.
|
||||
Label interpret;
|
||||
Register scratch1 = regs.takeAny();
|
||||
masm.loadPrivate(Address(callee, JSFunction::offsetOfJitInfoOrScript()),
|
||||
scratch1);
|
||||
masm.loadPtr(Address(callee, JSFunction::offsetOfScript()), scratch1);
|
||||
masm.branchIfScriptHasNoJitScript(scratch1, &interpret);
|
||||
|
||||
#ifdef JS_TRACE_LOGGING
|
||||
|
@ -5916,9 +5912,7 @@ bool BaselineCodeGen<Handler>::emit_Resume() {
|
|||
// Push |undefined| for all formals.
|
||||
Register scratch2 = regs.takeAny();
|
||||
Label loop, loopDone;
|
||||
masm.load32(Address(callee, JSFunction::offsetOfFlagsAndArgCount()),
|
||||
scratch2);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), scratch2);
|
||||
masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch2);
|
||||
|
||||
static_assert(sizeof(Value) == 8);
|
||||
static_assert(JitStackAlignment == 16 || JitStackAlignment == 8);
|
||||
|
@ -6081,8 +6075,7 @@ bool BaselineCodeGen<Handler>::emit_Resume() {
|
|||
// Load script in scratch1.
|
||||
masm.unboxObject(
|
||||
Address(genObj, AbstractGeneratorObject::offsetOfCalleeSlot()), scratch1);
|
||||
masm.loadPrivate(Address(scratch1, JSFunction::offsetOfJitInfoOrScript()),
|
||||
scratch1);
|
||||
masm.loadPtr(Address(scratch1, JSFunction::offsetOfScript()), scratch1);
|
||||
|
||||
// Load resume index in scratch2 and mark generator as running.
|
||||
Address resumeIndexSlot(genObj,
|
||||
|
|
|
@ -4647,7 +4647,8 @@ AttachDecision InstanceOfIRGenerator::tryAttachStub() {
|
|||
}
|
||||
|
||||
uint32_t slot = prop->slot();
|
||||
MOZ_ASSERT(slot >= fun->numFixedSlots(), "Stub code relies on this");
|
||||
|
||||
MOZ_ASSERT(fun->numFixedSlots() == 0, "Stub code relies on this");
|
||||
if (!fun->getSlot(slot).isObject()) {
|
||||
trackAttached(IRGenerator::NotAttached);
|
||||
return AttachDecision::NoAction;
|
||||
|
@ -4674,8 +4675,7 @@ AttachDecision InstanceOfIRGenerator::tryAttachStub() {
|
|||
// Load prototypeObject into the cache -- consumed twice in the IC
|
||||
ObjOperandId protoId = writer.loadObject(prototypeObject);
|
||||
// Ensure that rhs[slot] == prototypeObject.
|
||||
writer.guardDynamicSlotIsSpecificObject(rhsId, protoId,
|
||||
slot - fun->numFixedSlots());
|
||||
writer.guardDynamicSlotIsSpecificObject(rhsId, protoId, slot);
|
||||
|
||||
// Needn't guard LHS is object, because the actual stub can handle that
|
||||
// and correctly return false.
|
||||
|
@ -9408,9 +9408,8 @@ AttachDecision CallIRGenerator::tryAttachCallScripted(
|
|||
JSFunction* newTarget = &newTarget_.toObject().as<JSFunction>();
|
||||
Maybe<PropertyInfo> prop = newTarget->lookupPure(cx_->names().prototype);
|
||||
MOZ_ASSERT(prop.isSome());
|
||||
MOZ_ASSERT(newTarget->numFixedSlots() == 0, "Stub code relies on this");
|
||||
uint32_t slot = prop->slot();
|
||||
MOZ_ASSERT(slot >= newTarget->numFixedSlots(),
|
||||
"Stub code relies on this");
|
||||
JSObject* prototypeObject = &newTarget->getSlot(slot).toObject();
|
||||
|
||||
ValOperandId newTargetValId = writer.loadArgumentDynamicSlot(
|
||||
|
@ -9418,8 +9417,7 @@ AttachDecision CallIRGenerator::tryAttachCallScripted(
|
|||
ObjOperandId newTargetObjId = writer.guardToObject(newTargetValId);
|
||||
writer.guardShape(newTargetObjId, newTarget->shape());
|
||||
ObjOperandId protoId = writer.loadObject(prototypeObject);
|
||||
writer.guardDynamicSlotIsSpecificObject(
|
||||
newTargetObjId, protoId, slot - newTarget->numFixedSlots());
|
||||
writer.guardDynamicSlotIsSpecificObject(newTargetObjId, protoId, slot);
|
||||
|
||||
// Call metaScriptedTemplateObject before emitting the call, so that Warp
|
||||
// can use this template object before transpiling the call.
|
||||
|
|
|
@ -890,12 +890,18 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
|
|||
}
|
||||
|
||||
public:
|
||||
static uint32_t encodeNargsAndFlags(JSFunction* fun) {
|
||||
static_assert(JSFunction::NArgsBits == 16);
|
||||
static_assert(sizeof(decltype(fun->flags().toRaw())) == sizeof(uint16_t));
|
||||
return (uint32_t(fun->nargs()) << 16) | fun->flags().toRaw();
|
||||
}
|
||||
|
||||
void guardSpecificFunction(ObjOperandId obj, JSFunction* expected) {
|
||||
// Guard object is a specific function. This implies immutable fields on
|
||||
// the JSFunction struct itself are unchanged.
|
||||
// Bake in the nargs and FunctionFlags so Warp can use them off-main thread,
|
||||
// instead of directly using the JSFunction fields.
|
||||
uint32_t nargsAndFlags = expected->flagsAndArgCountRaw();
|
||||
uint32_t nargsAndFlags = encodeNargsAndFlags(expected);
|
||||
guardSpecificFunction_(obj, expected, nargsAndFlags);
|
||||
}
|
||||
|
||||
|
@ -905,7 +911,7 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
|
|||
// lambda clones.
|
||||
// Bake in the nargs and FunctionFlags so Warp can use them off-main thread,
|
||||
// instead of directly using the JSFunction fields.
|
||||
uint32_t nargsAndFlags = expected->function()->flagsAndArgCountRaw();
|
||||
uint32_t nargsAndFlags = encodeNargsAndFlags(expected->function());
|
||||
guardFunctionScript_(fun, expected, nargsAndFlags);
|
||||
}
|
||||
|
||||
|
@ -1035,7 +1041,7 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
|
|||
void callScriptedGetterResult(ValOperandId receiver, JSFunction* getter,
|
||||
bool sameRealm) {
|
||||
MOZ_ASSERT(getter->hasJitEntry());
|
||||
uint32_t nargsAndFlags = getter->flagsAndArgCountRaw();
|
||||
uint32_t nargsAndFlags = encodeNargsAndFlags(getter);
|
||||
callScriptedGetterResult_(receiver, getter, sameRealm, nargsAndFlags);
|
||||
trialInliningState_ = TrialInliningState::Candidate;
|
||||
}
|
||||
|
@ -1043,7 +1049,7 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
|
|||
void callInlinedGetterResult(ValOperandId receiver, JSFunction* getter,
|
||||
ICScript* icScript, bool sameRealm) {
|
||||
MOZ_ASSERT(getter->hasJitEntry());
|
||||
uint32_t nargsAndFlags = getter->flagsAndArgCountRaw();
|
||||
uint32_t nargsAndFlags = encodeNargsAndFlags(getter);
|
||||
callInlinedGetterResult_(receiver, getter, icScript, sameRealm,
|
||||
nargsAndFlags);
|
||||
trialInliningState_ = TrialInliningState::Inlined;
|
||||
|
@ -1052,14 +1058,14 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
|
|||
void callNativeGetterResult(ValOperandId receiver, JSFunction* getter,
|
||||
bool sameRealm) {
|
||||
MOZ_ASSERT(getter->isNativeWithoutJitEntry());
|
||||
uint32_t nargsAndFlags = getter->flagsAndArgCountRaw();
|
||||
uint32_t nargsAndFlags = encodeNargsAndFlags(getter);
|
||||
callNativeGetterResult_(receiver, getter, sameRealm, nargsAndFlags);
|
||||
}
|
||||
|
||||
void callScriptedSetter(ObjOperandId receiver, JSFunction* setter,
|
||||
ValOperandId rhs, bool sameRealm) {
|
||||
MOZ_ASSERT(setter->hasJitEntry());
|
||||
uint32_t nargsAndFlags = setter->flagsAndArgCountRaw();
|
||||
uint32_t nargsAndFlags = encodeNargsAndFlags(setter);
|
||||
callScriptedSetter_(receiver, setter, rhs, sameRealm, nargsAndFlags);
|
||||
trialInliningState_ = TrialInliningState::Candidate;
|
||||
}
|
||||
|
@ -1067,7 +1073,7 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
|
|||
void callInlinedSetter(ObjOperandId receiver, JSFunction* setter,
|
||||
ValOperandId rhs, ICScript* icScript, bool sameRealm) {
|
||||
MOZ_ASSERT(setter->hasJitEntry());
|
||||
uint32_t nargsAndFlags = setter->flagsAndArgCountRaw();
|
||||
uint32_t nargsAndFlags = encodeNargsAndFlags(setter);
|
||||
callInlinedSetter_(receiver, setter, rhs, icScript, sameRealm,
|
||||
nargsAndFlags);
|
||||
trialInliningState_ = TrialInliningState::Inlined;
|
||||
|
@ -1076,7 +1082,7 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
|
|||
void callNativeSetter(ObjOperandId receiver, JSFunction* setter,
|
||||
ValOperandId rhs, bool sameRealm) {
|
||||
MOZ_ASSERT(setter->isNativeWithoutJitEntry());
|
||||
uint32_t nargsAndFlags = setter->flagsAndArgCountRaw();
|
||||
uint32_t nargsAndFlags = encodeNargsAndFlags(setter);
|
||||
callNativeSetter_(receiver, setter, rhs, sameRealm, nargsAndFlags);
|
||||
}
|
||||
|
||||
|
|
|
@ -1922,17 +1922,6 @@ bool CacheIRCompiler::emitGuardClass(ObjOperandId objId, GuardClassKind kind) {
|
|||
return false;
|
||||
}
|
||||
|
||||
if (kind == GuardClassKind::JSFunction) {
|
||||
if (objectGuardNeedsSpectreMitigations(objId)) {
|
||||
masm.branchTestObjIsFunction(Assembler::NotEqual, obj, scratch, obj,
|
||||
failure->label());
|
||||
} else {
|
||||
masm.branchTestObjIsFunctionNoSpectreMitigations(
|
||||
Assembler::NotEqual, obj, scratch, failure->label());
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
const JSClass* clasp = nullptr;
|
||||
switch (kind) {
|
||||
case GuardClassKind::Array:
|
||||
|
@ -1956,14 +1945,15 @@ bool CacheIRCompiler::emitGuardClass(ObjOperandId objId, GuardClassKind kind) {
|
|||
case GuardClassKind::WindowProxy:
|
||||
clasp = cx_->runtime()->maybeWindowProxyClass();
|
||||
break;
|
||||
case GuardClassKind::JSFunction:
|
||||
clasp = &JSFunction::class_;
|
||||
break;
|
||||
case GuardClassKind::Set:
|
||||
clasp = &SetObject::class_;
|
||||
break;
|
||||
case GuardClassKind::Map:
|
||||
clasp = &MapObject::class_;
|
||||
break;
|
||||
default:
|
||||
MOZ_ASSERT_UNREACHABLE();
|
||||
}
|
||||
MOZ_ASSERT(clasp);
|
||||
|
||||
|
@ -3313,8 +3303,8 @@ bool CacheIRCompiler::emitLoadFunctionLengthResult(ObjOperandId objId) {
|
|||
return false;
|
||||
}
|
||||
|
||||
// Get the JSFunction flags and arg count.
|
||||
masm.load32(Address(obj, JSFunction::offsetOfFlagsAndArgCount()), scratch);
|
||||
// Get the JSFunction flags.
|
||||
masm.load16ZeroExtend(Address(obj, JSFunction::offsetOfFlags()), scratch);
|
||||
|
||||
// Functions with a SelfHostedLazyScript must be compiled with the slow-path
|
||||
// before the function length is known. If the length was previously resolved,
|
||||
|
|
|
@ -3523,22 +3523,28 @@ void CodeGenerator::visitLambdaArrow(LLambdaArrow* lir) {
|
|||
|
||||
void CodeGenerator::emitLambdaInit(Register output, Register envChain,
|
||||
const LambdaFunctionInfo& info) {
|
||||
uint32_t flagsAndArgs =
|
||||
info.flags.toRaw() | (info.nargs << JSFunction::ArgCountShift);
|
||||
masm.storeValue(JS::PrivateUint32Value(flagsAndArgs),
|
||||
Address(output, JSFunction::offsetOfFlagsAndArgCount()));
|
||||
masm.storePrivateValue(
|
||||
ImmGCPtr(info.baseScript),
|
||||
Address(output, JSFunction::offsetOfJitInfoOrScript()));
|
||||
// Initialize nargs and flags. We do this with a single uint32 to avoid
|
||||
// 16-bit writes.
|
||||
union {
|
||||
struct S {
|
||||
uint16_t nargs;
|
||||
uint16_t flags;
|
||||
} s;
|
||||
uint32_t word;
|
||||
} u;
|
||||
u.s.nargs = info.nargs;
|
||||
u.s.flags = info.flags.toRaw();
|
||||
|
||||
masm.storeValue(JSVAL_TYPE_OBJECT, envChain,
|
||||
Address(output, JSFunction::offsetOfEnvironment()));
|
||||
static_assert(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2,
|
||||
"the code below needs to be adapted");
|
||||
masm.store32(Imm32(u.word), Address(output, JSFunction::offsetOfNargs()));
|
||||
masm.storePtr(ImmGCPtr(info.baseScript),
|
||||
Address(output, JSFunction::offsetOfBaseScript()));
|
||||
masm.storePtr(envChain, Address(output, JSFunction::offsetOfEnvironment()));
|
||||
// No post barrier needed because output is guaranteed to be allocated in
|
||||
// the nursery.
|
||||
|
||||
JSAtom* atom = info.funUnsafe()->displayAtom();
|
||||
JS::Value atomValue = atom ? JS::StringValue(atom) : JS::UndefinedValue();
|
||||
masm.storeValue(atomValue, Address(output, JSFunction::offsetOfAtom()));
|
||||
masm.storePtr(ImmGCPtr(info.funUnsafe()->displayAtom()),
|
||||
Address(output, JSFunction::offsetOfAtom()));
|
||||
}
|
||||
|
||||
void CodeGenerator::visitFunctionWithProto(LFunctionWithProto* lir) {
|
||||
|
@ -3958,7 +3964,7 @@ void CodeGenerator::visitElements(LElements* lir) {
|
|||
void CodeGenerator::visitFunctionEnvironment(LFunctionEnvironment* lir) {
|
||||
Address environment(ToRegister(lir->function()),
|
||||
JSFunction::offsetOfEnvironment());
|
||||
masm.unboxObject(environment, ToRegister(lir->output()));
|
||||
masm.loadPtr(environment, ToRegister(lir->output()));
|
||||
}
|
||||
|
||||
void CodeGenerator::visitHomeObject(LHomeObject* lir) {
|
||||
|
@ -4720,7 +4726,7 @@ void CodeGenerator::visitGuardFunctionScript(LGuardFunctionScript* lir) {
|
|||
Register function = ToRegister(lir->function());
|
||||
|
||||
Label bail;
|
||||
Address scriptAddr(function, JSFunction::offsetOfJitInfoOrScript());
|
||||
Address scriptAddr(function, JSFunction::offsetOfBaseScript());
|
||||
masm.branchPtr(Assembler::NotEqual, scriptAddr,
|
||||
ImmGCPtr(lir->mir()->expected()), &bail);
|
||||
bailoutFrom(&bail, lir->snapshot());
|
||||
|
@ -5351,8 +5357,8 @@ void CodeGenerator::visitCallGeneric(LCallGeneric* call) {
|
|||
|
||||
// Guard that calleereg is actually a function object.
|
||||
if (call->mir()->needsClassCheck()) {
|
||||
masm.branchTestObjIsFunction(Assembler::NotEqual, calleereg, nargsreg,
|
||||
calleereg, &invoke);
|
||||
masm.branchTestObjClass(Assembler::NotEqual, calleereg, &JSFunction::class_,
|
||||
nargsreg, calleereg, &invoke);
|
||||
}
|
||||
|
||||
// Guard that callee allows the [[Call]] or [[Construct]] operation required.
|
||||
|
@ -5408,9 +5414,8 @@ void CodeGenerator::visitCallGeneric(LCallGeneric* call) {
|
|||
DebugOnly<unsigned> numNonArgsOnStack = 1 + call->isConstructing();
|
||||
MOZ_ASSERT(call->numActualArgs() ==
|
||||
call->mir()->numStackArgs() - numNonArgsOnStack);
|
||||
masm.load32(Address(calleereg, JSFunction::offsetOfFlagsAndArgCount()),
|
||||
nargsreg);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), nargsreg);
|
||||
masm.load16ZeroExtend(Address(calleereg, JSFunction::offsetOfNargs()),
|
||||
nargsreg);
|
||||
masm.branch32(Assembler::Above, nargsreg, Imm32(call->numActualArgs()),
|
||||
&thunk);
|
||||
masm.jump(&makeCall);
|
||||
|
@ -5936,8 +5941,8 @@ void CodeGenerator::emitApplyGeneric(T* apply) {
|
|||
|
||||
// Unless already known, guard that calleereg is actually a function object.
|
||||
if (!apply->hasSingleTarget()) {
|
||||
masm.branchTestObjIsFunction(Assembler::NotEqual, calleereg, objreg,
|
||||
calleereg, &invoke);
|
||||
masm.branchTestObjClass(Assembler::NotEqual, calleereg, &JSFunction::class_,
|
||||
objreg, calleereg, &invoke);
|
||||
}
|
||||
|
||||
// Guard that calleereg is an interpreted function with a JSScript.
|
||||
|
@ -5983,9 +5988,8 @@ void CodeGenerator::emitApplyGeneric(T* apply) {
|
|||
// Check whether the provided arguments satisfy target argc.
|
||||
if (!apply->hasSingleTarget()) {
|
||||
Register nformals = extraStackSpace;
|
||||
masm.load32(Address(calleereg, JSFunction::offsetOfFlagsAndArgCount()),
|
||||
nformals);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), nformals);
|
||||
masm.load16ZeroExtend(Address(calleereg, JSFunction::offsetOfNargs()),
|
||||
nformals);
|
||||
masm.branch32(Assembler::Below, argcreg, nformals, &underflow);
|
||||
} else {
|
||||
masm.branch32(Assembler::Below, argcreg,
|
||||
|
@ -7633,8 +7637,7 @@ void CodeGenerator::visitFunctionLength(LFunctionLength* lir) {
|
|||
Label bail;
|
||||
|
||||
// Get the JSFunction flags.
|
||||
masm.load32(Address(function, JSFunction::offsetOfFlagsAndArgCount()),
|
||||
output);
|
||||
masm.load16ZeroExtend(Address(function, JSFunction::offsetOfFlags()), output);
|
||||
|
||||
// Functions with a SelfHostedLazyScript must be compiled with the slow-path
|
||||
// before the function length is known. If the length was previously resolved,
|
||||
|
@ -14027,23 +14030,6 @@ void CodeGenerator::visitGuardToClass(LGuardToClass* ins) {
|
|||
bailoutFrom(¬Equal, ins->snapshot());
|
||||
}
|
||||
|
||||
void CodeGenerator::visitGuardToFunction(LGuardToFunction* ins) {
|
||||
Register lhs = ToRegister(ins->lhs());
|
||||
Register temp = ToRegister(ins->temp0());
|
||||
|
||||
// branchTestObjClass may zero the object register on speculative paths
|
||||
// (we should have a defineReuseInput allocation in this case).
|
||||
Register spectreRegToZero = lhs;
|
||||
|
||||
Label notEqual;
|
||||
|
||||
masm.branchTestObjIsFunction(Assembler::NotEqual, lhs, temp, spectreRegToZero,
|
||||
¬Equal);
|
||||
|
||||
// Can't return null-return here, so bail.
|
||||
bailoutFrom(¬Equal, ins->snapshot());
|
||||
}
|
||||
|
||||
void CodeGenerator::visitObjectClassToString(LObjectClassToString* lir) {
|
||||
Register obj = ToRegister(lir->lhs());
|
||||
Register temp = ToRegister(lir->temp0());
|
||||
|
@ -14220,14 +14206,8 @@ void CodeGenerator::visitAssertClass(LAssertClass* ins) {
|
|||
Register temp = ToRegister(ins->getTemp(0));
|
||||
|
||||
Label success;
|
||||
if (ins->mir()->getClass() == &FunctionClass) {
|
||||
// Allow both possible function classes here.
|
||||
masm.branchTestObjIsFunctionNoSpectreMitigations(Assembler::Equal, obj,
|
||||
temp, &success);
|
||||
} else {
|
||||
masm.branchTestObjClassNoSpectreMitigations(
|
||||
Assembler::Equal, obj, ins->mir()->getClass(), temp, &success);
|
||||
}
|
||||
masm.branchTestObjClassNoSpectreMitigations(
|
||||
Assembler::Equal, obj, ins->mir()->getClass(), temp, &success);
|
||||
masm.assumeUnreachable("Wrong KnownClass during run-time");
|
||||
masm.bind(&success);
|
||||
}
|
||||
|
@ -14779,10 +14759,9 @@ void CodeGenerator::visitNaNToZero(LNaNToZero* lir) {
|
|||
}
|
||||
|
||||
static void BoundFunctionLength(MacroAssembler& masm, Register target,
|
||||
Register targetFlagsAndArgCount,
|
||||
Register argCount, Register output,
|
||||
Label* slowPath) {
|
||||
masm.loadFunctionLength(target, targetFlagsAndArgCount, output, slowPath);
|
||||
Register targetFlags, Register argCount,
|
||||
Register output, Label* slowPath) {
|
||||
masm.loadFunctionLength(target, targetFlags, output, slowPath);
|
||||
|
||||
// Compute the bound function length: Max(0, target.length - argCount).
|
||||
Label nonNegative;
|
||||
|
@ -14818,13 +14797,9 @@ static void BoundFunctionName(MacroAssembler& masm, Register target,
|
|||
Label guessed, hasName;
|
||||
masm.branchTest32(Assembler::NonZero, targetFlags,
|
||||
Imm32(FunctionFlags::HAS_GUESSED_ATOM), &guessed);
|
||||
|
||||
masm.bind(&loadName);
|
||||
Address atom(Address(target, JSFunction::offsetOfAtom()));
|
||||
masm.branchTestUndefined(Assembler::Equal, atom, &guessed);
|
||||
masm.unboxString(atom, output);
|
||||
masm.jump(&hasName);
|
||||
|
||||
masm.loadPtr(Address(target, JSFunction::offsetOfAtom()), output);
|
||||
masm.branchTestPtr(Assembler::NonZero, output, output, &hasName);
|
||||
{
|
||||
masm.bind(&guessed);
|
||||
|
||||
|
@ -14834,13 +14809,12 @@ static void BoundFunctionName(MacroAssembler& masm, Register target,
|
|||
masm.bind(&hasName);
|
||||
}
|
||||
|
||||
static void BoundFunctionFlagsAndArgCount(MacroAssembler& masm,
|
||||
Register targetFlags, Register bound,
|
||||
Register output) {
|
||||
static void BoundFunctionFlags(MacroAssembler& masm, Register targetFlags,
|
||||
Register bound, Register output) {
|
||||
// Set the BOUND_FN flag and, if the target is a constructor, the
|
||||
// CONSTRUCTOR flag.
|
||||
Label isConstructor, boundFlagsComputed;
|
||||
masm.load32(Address(bound, JSFunction::offsetOfFlagsAndArgCount()), output);
|
||||
masm.load16ZeroExtend(Address(bound, JSFunction::offsetOfFlags()), output);
|
||||
masm.branchTest32(Assembler::NonZero, targetFlags,
|
||||
Imm32(FunctionFlags::CONSTRUCTOR), &isConstructor);
|
||||
{
|
||||
|
@ -14873,16 +14847,16 @@ void CodeGenerator::visitFinishBoundFunctionInit(
|
|||
FunctionExtended::offsetOfBoundFunctionLengthSlot();
|
||||
|
||||
// Take the slow path if the target is not a JSFunction.
|
||||
masm.branchTestObjIsFunction(Assembler::NotEqual, target, temp0, target,
|
||||
slowPath);
|
||||
masm.branchTestObjClass(Assembler::NotEqual, target, &JSFunction::class_,
|
||||
temp0, target, slowPath);
|
||||
|
||||
// Take the slow path if we'd need to adjust the [[Prototype]].
|
||||
masm.loadObjProto(bound, temp0);
|
||||
masm.loadObjProto(target, temp1);
|
||||
masm.branchPtr(Assembler::NotEqual, temp0, temp1, slowPath);
|
||||
|
||||
// Get the function flags and arg count.
|
||||
masm.load32(Address(target, JSFunction::offsetOfFlagsAndArgCount()), temp0);
|
||||
// Get the function flags.
|
||||
masm.load16ZeroExtend(Address(target, JSFunction::offsetOfFlags()), temp0);
|
||||
|
||||
// Functions with a SelfHostedLazyScript must be compiled with the slow-path
|
||||
// before the function length is known. If the length or name property is
|
||||
|
@ -14900,12 +14874,11 @@ void CodeGenerator::visitFinishBoundFunctionInit(
|
|||
// Store the target's name atom in the bound function as is.
|
||||
BoundFunctionName(masm, target, temp0, temp1, gen->runtime->names(),
|
||||
slowPath);
|
||||
masm.storeValue(JSVAL_TYPE_STRING, temp1,
|
||||
Address(bound, JSFunction::offsetOfAtom()));
|
||||
masm.storePtr(temp1, Address(bound, JSFunction::offsetOfAtom()));
|
||||
|
||||
// Update the bound function's flags.
|
||||
BoundFunctionFlagsAndArgCount(masm, temp0, bound, temp1);
|
||||
masm.store32(temp1, Address(bound, JSFunction::offsetOfFlagsAndArgCount()));
|
||||
BoundFunctionFlags(masm, temp0, bound, temp1);
|
||||
masm.store16(temp1, Address(bound, JSFunction::offsetOfFlags()));
|
||||
|
||||
masm.bind(ool->rejoin());
|
||||
}
|
||||
|
@ -14994,8 +14967,8 @@ void CodeGenerator::visitSuperFunction(LSuperFunction* lir) {
|
|||
|
||||
#ifdef DEBUG
|
||||
Label classCheckDone;
|
||||
masm.branchTestObjIsFunction(Assembler::Equal, callee, temp, callee,
|
||||
&classCheckDone);
|
||||
masm.branchTestObjClass(Assembler::Equal, callee, &JSFunction::class_, temp,
|
||||
callee, &classCheckDone);
|
||||
masm.assumeUnreachable("Unexpected non-JSFunction callee in JSOp::SuperFun");
|
||||
masm.bind(&classCheckDone);
|
||||
#endif
|
||||
|
|
|
@ -88,7 +88,7 @@ const JSClass* jit::GetObjectKnownJSClass(const MDefinition* def) {
|
|||
case KnownClass::Array:
|
||||
return &ArrayObject::class_;
|
||||
case KnownClass::Function:
|
||||
return &FunctionClass;
|
||||
return &JSFunction::class_;
|
||||
case KnownClass::RegExp:
|
||||
return &RegExpObject::class_;
|
||||
case KnownClass::ArrayIterator:
|
||||
|
|
|
@ -2480,13 +2480,6 @@
|
|||
num_temps: 1
|
||||
mir_op: true
|
||||
|
||||
- name: GuardToFunction
|
||||
result_type: WordSized
|
||||
operands:
|
||||
lhs: WordSized
|
||||
num_temps: 1
|
||||
mir_op: true
|
||||
|
||||
- name: ObjectClassToString
|
||||
result_type: WordSized
|
||||
operands:
|
||||
|
|
|
@ -4684,15 +4684,6 @@ void LIRGenerator::visitGuardToClass(MGuardToClass* ins) {
|
|||
defineReuseInput(lir, ins, 0);
|
||||
}
|
||||
|
||||
void LIRGenerator::visitGuardToFunction(MGuardToFunction* ins) {
|
||||
MOZ_ASSERT(ins->object()->type() == MIRType::Object);
|
||||
MOZ_ASSERT(ins->type() == MIRType::Object);
|
||||
LGuardToFunction* lir =
|
||||
new (alloc()) LGuardToFunction(useRegisterAtStart(ins->object()), temp());
|
||||
assignSnapshot(lir, ins->bailoutKind());
|
||||
defineReuseInput(lir, ins, 0);
|
||||
}
|
||||
|
||||
void LIRGenerator::visitObjectClassToString(MObjectClassToString* ins) {
|
||||
MOZ_ASSERT(ins->object()->type() == MIRType::Object);
|
||||
MOZ_ASSERT(ins->type() == MIRType::String);
|
||||
|
|
|
@ -5720,15 +5720,6 @@ MDefinition* MGuardToClass::foldsTo(TempAllocator& alloc) {
|
|||
return object();
|
||||
}
|
||||
|
||||
MDefinition* MGuardToFunction::foldsTo(TempAllocator& alloc) {
|
||||
if (GetObjectKnownClass(object()) != KnownClass::Function) {
|
||||
return this;
|
||||
}
|
||||
|
||||
AssertKnownClass(alloc, this, object());
|
||||
return object();
|
||||
}
|
||||
|
||||
MDefinition* MHasClass::foldsTo(TempAllocator& alloc) {
|
||||
const JSClass* clasp = GetObjectKnownJSClass(object());
|
||||
if (!clasp) {
|
||||
|
|
|
@ -8589,7 +8589,6 @@ class MGuardToClass : public MUnaryInstruction,
|
|||
MGuardToClass(MDefinition* object, const JSClass* clasp)
|
||||
: MUnaryInstruction(classOpcode, object), class_(clasp) {
|
||||
MOZ_ASSERT(object->type() == MIRType::Object);
|
||||
MOZ_ASSERT(!clasp->isJSFunction(), "Use MGuardToFunction instead");
|
||||
setResultType(MIRType::Object);
|
||||
setMovable();
|
||||
|
||||
|
@ -8622,34 +8621,6 @@ class MGuardToClass : public MUnaryInstruction,
|
|||
}
|
||||
};
|
||||
|
||||
class MGuardToFunction : public MUnaryInstruction,
|
||||
public SingleObjectPolicy::Data {
|
||||
explicit MGuardToFunction(MDefinition* object)
|
||||
: MUnaryInstruction(classOpcode, object) {
|
||||
MOZ_ASSERT(object->type() == MIRType::Object);
|
||||
setResultType(MIRType::Object);
|
||||
setMovable();
|
||||
|
||||
// We will bail out if the class type is incorrect, so we need to ensure we
|
||||
// don't eliminate this instruction
|
||||
setGuard();
|
||||
}
|
||||
|
||||
public:
|
||||
INSTRUCTION_HEADER(GuardToFunction)
|
||||
TRIVIAL_NEW_WRAPPERS
|
||||
NAMED_OPERANDS((0, object))
|
||||
|
||||
MDefinition* foldsTo(TempAllocator& alloc) override;
|
||||
AliasSet getAliasSet() const override { return AliasSet::None(); }
|
||||
bool congruentTo(const MDefinition* ins) const override {
|
||||
if (!ins->isGuardToFunction()) {
|
||||
return false;
|
||||
}
|
||||
return congruentIfOperandsEqual(ins);
|
||||
}
|
||||
};
|
||||
|
||||
// Note: we might call a proxy trap, so this instruction is effectful.
|
||||
class MIsArray : public MUnaryInstruction,
|
||||
public BoxExceptPolicy<0, MIRType::Object>::Data {
|
||||
|
|
|
@ -1894,9 +1894,6 @@
|
|||
- name: GuardToClass
|
||||
gen_boilerplate: false
|
||||
|
||||
- name: GuardToFunction
|
||||
gen_boilerplate: false
|
||||
|
||||
- name: IsArray
|
||||
gen_boilerplate: false
|
||||
|
||||
|
|
|
@ -415,13 +415,28 @@ void MacroAssembler::branchIfBigIntIsNonZero(Register bigInt, Label* label) {
|
|||
|
||||
void MacroAssembler::branchTestFunctionFlags(Register fun, uint32_t flags,
|
||||
Condition cond, Label* label) {
|
||||
Address address(fun, JSFunction::offsetOfFlagsAndArgCount());
|
||||
branchTest32(cond, address, Imm32(flags), label);
|
||||
// 16-bit loads are slow and unaligned 32-bit loads may be too so
|
||||
// perform an aligned 32-bit load and adjust the bitmask accordingly.
|
||||
|
||||
static_assert(JSFunction::offsetOfNargs() % sizeof(uint32_t) == 0,
|
||||
"The code in this function and the ones below must change");
|
||||
static_assert(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2,
|
||||
"The code in this function and the ones below must change");
|
||||
|
||||
int32_t bit = Imm32_16Adj(flags);
|
||||
Address address(fun, JSFunction::offsetOfNargs());
|
||||
branchTest32(cond, address, Imm32(bit), label);
|
||||
}
|
||||
|
||||
void MacroAssembler::branchIfNotFunctionIsNonBuiltinCtor(Register fun,
|
||||
Register scratch,
|
||||
Label* label) {
|
||||
// 16-bit loads are slow and unaligned 32-bit loads may be too so
|
||||
// perform an aligned 32-bit load and adjust the bitmask accordingly.
|
||||
|
||||
static_assert(JSFunction::offsetOfNargs() % sizeof(uint32_t) == 0);
|
||||
static_assert(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2);
|
||||
|
||||
// Guard the function has the BASESCRIPT and CONSTRUCTOR flags and does NOT
|
||||
// have the SELF_HOSTED flag.
|
||||
// This is equivalent to JSFunction::isNonBuiltinConstructor.
|
||||
|
@ -431,7 +446,7 @@ void MacroAssembler::branchIfNotFunctionIsNonBuiltinCtor(Register fun,
|
|||
constexpr int32_t expected =
|
||||
Imm32_16Adj(FunctionFlags::BASESCRIPT | FunctionFlags::CONSTRUCTOR);
|
||||
|
||||
load32(Address(fun, JSFunction::offsetOfFlagsAndArgCount()), scratch);
|
||||
load32(Address(fun, JSFunction::offsetOfNargs()), scratch);
|
||||
and32(Imm32(mask), scratch);
|
||||
branch32(Assembler::NotEqual, scratch, Imm32(expected), label);
|
||||
}
|
||||
|
@ -503,10 +518,18 @@ void MacroAssembler::branchFunctionKind(Condition cond,
|
|||
FunctionFlags::FunctionKind kind,
|
||||
Register fun, Register scratch,
|
||||
Label* label) {
|
||||
Address address(fun, JSFunction::offsetOfFlagsAndArgCount());
|
||||
// 16-bit loads are slow and unaligned 32-bit loads may be too so
|
||||
// perform an aligned 32-bit load and adjust the bitmask accordingly.
|
||||
|
||||
static_assert(JSFunction::offsetOfNargs() % sizeof(uint32_t) == 0);
|
||||
static_assert(JSFunction::offsetOfFlags() == JSFunction::offsetOfNargs() + 2);
|
||||
|
||||
Address address(fun, JSFunction::offsetOfNargs());
|
||||
int32_t mask = Imm32_16Adj(FunctionFlags::FUNCTION_KIND_MASK);
|
||||
int32_t bit = Imm32_16Adj(kind << FunctionFlags::FUNCTION_KIND_SHIFT);
|
||||
load32(address, scratch);
|
||||
and32(Imm32(FunctionFlags::FUNCTION_KIND_MASK), scratch);
|
||||
branch32(cond, scratch, Imm32(kind), label);
|
||||
and32(Imm32(mask), scratch);
|
||||
branch32(cond, scratch, Imm32(bit), label);
|
||||
}
|
||||
|
||||
void MacroAssembler::branchTestObjClass(Condition cond, Register obj,
|
||||
|
@ -579,46 +602,6 @@ void MacroAssembler::branchTestObjClass(Condition cond, Register obj,
|
|||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::branchTestClassIsFunction(Condition cond, Register clasp,
|
||||
Label* label) {
|
||||
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
|
||||
|
||||
if (cond == Assembler::Equal) {
|
||||
branchPtr(Assembler::Equal, clasp, ImmPtr(&FunctionClass), label);
|
||||
branchPtr(Assembler::Equal, clasp, ImmPtr(&ExtendedFunctionClass), label);
|
||||
return;
|
||||
}
|
||||
|
||||
Label isFunction;
|
||||
branchPtr(Assembler::Equal, clasp, ImmPtr(&FunctionClass), &isFunction);
|
||||
branchPtr(Assembler::NotEqual, clasp, ImmPtr(&ExtendedFunctionClass), label);
|
||||
bind(&isFunction);
|
||||
}
|
||||
|
||||
void MacroAssembler::branchTestObjIsFunction(Condition cond, Register obj,
|
||||
Register scratch,
|
||||
Register spectreRegToZero,
|
||||
Label* label) {
|
||||
MOZ_ASSERT(scratch != spectreRegToZero);
|
||||
|
||||
branchTestObjIsFunctionNoSpectreMitigations(cond, obj, scratch, label);
|
||||
|
||||
if (JitOptions.spectreObjectMitigations) {
|
||||
spectreZeroRegister(cond, scratch, spectreRegToZero);
|
||||
}
|
||||
}
|
||||
|
||||
void MacroAssembler::branchTestObjIsFunctionNoSpectreMitigations(
|
||||
Condition cond, Register obj, Register scratch, Label* label) {
|
||||
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
|
||||
MOZ_ASSERT(obj != scratch);
|
||||
|
||||
loadPtr(Address(obj, JSObject::offsetOfShape()), scratch);
|
||||
loadPtr(Address(scratch, Shape::offsetOfBaseShape()), scratch);
|
||||
loadPtr(Address(scratch, BaseShape::offsetOfClasp()), scratch);
|
||||
branchTestClassIsFunction(cond, scratch, label);
|
||||
}
|
||||
|
||||
void MacroAssembler::branchTestObjShape(Condition cond, Register obj,
|
||||
const Shape* shape, Register scratch,
|
||||
Register spectreRegToZero,
|
||||
|
|
|
@ -1676,7 +1676,7 @@ void MacroAssembler::typeOfObject(Register obj, Register scratch, Label* slow,
|
|||
branchTestClassIsProxy(true, scratch, slow);
|
||||
|
||||
// JSFunctions are always callable.
|
||||
branchTestClassIsFunction(Assembler::Equal, scratch, isCallable);
|
||||
branchPtr(Assembler::Equal, scratch, ImmPtr(&JSFunction::class_), isCallable);
|
||||
|
||||
// Objects that emulate undefined.
|
||||
Address flags(scratch, JSClass::offsetOfFlags());
|
||||
|
@ -1706,14 +1706,15 @@ void MacroAssembler::isCallableOrConstructor(bool isCallable, Register obj,
|
|||
// An object is constructor iff:
|
||||
// ((is<JSFunction>() && as<JSFunction>().isConstructor) ||
|
||||
// (getClass()->cOps && getClass()->cOps->construct)).
|
||||
branchTestClassIsFunction(Assembler::NotEqual, output, ¬Function);
|
||||
branchPtr(Assembler::NotEqual, output, ImmPtr(&JSFunction::class_),
|
||||
¬Function);
|
||||
if (isCallable) {
|
||||
move32(Imm32(1), output);
|
||||
} else {
|
||||
static_assert(mozilla::IsPowerOfTwo(uint32_t(FunctionFlags::CONSTRUCTOR)),
|
||||
"FunctionFlags::CONSTRUCTOR has only one bit set");
|
||||
|
||||
load32(Address(obj, JSFunction::offsetOfFlagsAndArgCount()), output);
|
||||
load16ZeroExtend(Address(obj, JSFunction::offsetOfFlags()), output);
|
||||
rshift32(Imm32(mozilla::FloorLog2(uint32_t(FunctionFlags::CONSTRUCTOR))),
|
||||
output);
|
||||
and32(Imm32(1), output);
|
||||
|
@ -1813,7 +1814,8 @@ void MacroAssembler::setIsCrossRealmArrayConstructor(Register obj,
|
|||
&isFalse);
|
||||
|
||||
// The object must be a function.
|
||||
branchTestObjIsFunction(Assembler::NotEqual, obj, output, obj, &isFalse);
|
||||
branchTestObjClass(Assembler::NotEqual, obj, &JSFunction::class_, output, obj,
|
||||
&isFalse);
|
||||
|
||||
// The function must be the ArrayConstructor native.
|
||||
branchPtr(Assembler::NotEqual,
|
||||
|
@ -1834,7 +1836,8 @@ void MacroAssembler::setIsDefinitelyTypedArrayConstructor(Register obj,
|
|||
Label isFalse, isTrue, done;
|
||||
|
||||
// The object must be a function. (Wrappers are not supported.)
|
||||
branchTestObjIsFunction(Assembler::NotEqual, obj, output, obj, &isFalse);
|
||||
branchTestObjClass(Assembler::NotEqual, obj, &JSFunction::class_, output, obj,
|
||||
&isFalse);
|
||||
|
||||
// Load the native into |output|.
|
||||
loadPtr(Address(obj, JSFunction::offsetOfNativeOrEnv()), output);
|
||||
|
@ -2112,14 +2115,14 @@ void MacroAssembler::loadJitCodeRaw(Register func, Register dest) {
|
|||
SelfHostedLazyScript::offsetOfJitCodeRaw(),
|
||||
"SelfHostedLazyScript and BaseScript must use same layout for "
|
||||
"jitCodeRaw_");
|
||||
loadPrivate(Address(func, JSFunction::offsetOfJitInfoOrScript()), dest);
|
||||
loadPtr(Address(func, JSFunction::offsetOfScript()), dest);
|
||||
loadPtr(Address(dest, BaseScript::offsetOfJitCodeRaw()), dest);
|
||||
}
|
||||
|
||||
void MacroAssembler::loadBaselineJitCodeRaw(Register func, Register dest,
|
||||
Label* failure) {
|
||||
// Load JitScript
|
||||
loadPrivate(Address(func, JSFunction::offsetOfJitInfoOrScript()), dest);
|
||||
loadPtr(Address(func, JSFunction::offsetOfScript()), dest);
|
||||
if (failure) {
|
||||
branchIfScriptHasNoJitScript(dest, failure);
|
||||
}
|
||||
|
@ -3547,8 +3550,7 @@ void MacroAssembler::branchIfNotRegExpInstanceOptimizable(Register regexp,
|
|||
// ===============================================================
|
||||
// Branch functions
|
||||
|
||||
void MacroAssembler::loadFunctionLength(Register func,
|
||||
Register funFlagsAndArgCount,
|
||||
void MacroAssembler::loadFunctionLength(Register func, Register funFlags,
|
||||
Register output, Label* slowPath) {
|
||||
#ifdef DEBUG
|
||||
{
|
||||
|
@ -3556,25 +3558,23 @@ void MacroAssembler::loadFunctionLength(Register func,
|
|||
Label ok;
|
||||
uint32_t FlagsToCheck =
|
||||
FunctionFlags::SELFHOSTLAZY | FunctionFlags::RESOLVED_LENGTH;
|
||||
branchTest32(Assembler::Zero, funFlagsAndArgCount, Imm32(FlagsToCheck),
|
||||
&ok);
|
||||
branchTest32(Assembler::Zero, funFlags, Imm32(FlagsToCheck), &ok);
|
||||
assumeUnreachable("The function flags should already have been checked.");
|
||||
bind(&ok);
|
||||
}
|
||||
#endif // DEBUG
|
||||
|
||||
// NOTE: `funFlagsAndArgCount` and `output` must be allowed to alias.
|
||||
// NOTE: `funFlags` and `output` must be allowed to alias.
|
||||
|
||||
// Load the target function's length.
|
||||
Label isInterpreted, isBound, lengthLoaded;
|
||||
branchTest32(Assembler::NonZero, funFlagsAndArgCount,
|
||||
Imm32(FunctionFlags::BOUND_FUN), &isBound);
|
||||
branchTest32(Assembler::NonZero, funFlagsAndArgCount,
|
||||
Imm32(FunctionFlags::BASESCRIPT), &isInterpreted);
|
||||
branchTest32(Assembler::NonZero, funFlags, Imm32(FunctionFlags::BOUND_FUN),
|
||||
&isBound);
|
||||
branchTest32(Assembler::NonZero, funFlags, Imm32(FunctionFlags::BASESCRIPT),
|
||||
&isInterpreted);
|
||||
{
|
||||
// The length property of a native function stored with the flags.
|
||||
move32(funFlagsAndArgCount, output);
|
||||
rshift32(Imm32(JSFunction::ArgCountShift), output);
|
||||
// Load the length property of a native function.
|
||||
load16ZeroExtend(Address(func, JSFunction::offsetOfNargs()), output);
|
||||
jump(&lengthLoaded);
|
||||
}
|
||||
bind(&isBound);
|
||||
|
@ -3588,7 +3588,7 @@ void MacroAssembler::loadFunctionLength(Register func,
|
|||
bind(&isInterpreted);
|
||||
{
|
||||
// Load the length property of an interpreted function.
|
||||
loadPrivate(Address(func, JSFunction::offsetOfJitInfoOrScript()), output);
|
||||
loadPtr(Address(func, JSFunction::offsetOfScript()), output);
|
||||
loadPtr(Address(output, JSScript::offsetOfSharedData()), output);
|
||||
branchTestPtr(Assembler::Zero, output, output, slowPath);
|
||||
loadPtr(Address(output, SharedImmutableScriptData::offsetOfISD()), output);
|
||||
|
@ -3603,7 +3603,7 @@ void MacroAssembler::loadFunctionName(Register func, Register output,
|
|||
MOZ_ASSERT(func != output);
|
||||
|
||||
// Get the JSFunction flags.
|
||||
load32(Address(func, JSFunction::offsetOfFlagsAndArgCount()), output);
|
||||
load16ZeroExtend(Address(func, JSFunction::offsetOfFlags()), output);
|
||||
|
||||
// If the name was previously resolved, the name property may be shadowed.
|
||||
branchTest32(Assembler::NonZero, output, Imm32(FunctionFlags::RESOLVED_NAME),
|
||||
|
@ -3629,24 +3629,19 @@ void MacroAssembler::loadFunctionName(Register func, Register output,
|
|||
}
|
||||
bind(¬BoundTarget);
|
||||
|
||||
Label noName, done;
|
||||
Label guessed, hasName;
|
||||
branchTest32(Assembler::NonZero, output,
|
||||
Imm32(FunctionFlags::HAS_GUESSED_ATOM), &noName);
|
||||
|
||||
Imm32(FunctionFlags::HAS_GUESSED_ATOM), &guessed);
|
||||
bind(&loadName);
|
||||
Address atomAddr(func, JSFunction::offsetOfAtom());
|
||||
branchTestUndefined(Assembler::Equal, atomAddr, &noName);
|
||||
unboxString(atomAddr, output);
|
||||
jump(&done);
|
||||
|
||||
loadPtr(Address(func, JSFunction::offsetOfAtom()), output);
|
||||
branchTestPtr(Assembler::NonZero, output, output, &hasName);
|
||||
{
|
||||
bind(&noName);
|
||||
bind(&guessed);
|
||||
|
||||
// An absent name property defaults to the empty string.
|
||||
movePtr(emptyString, output);
|
||||
}
|
||||
|
||||
bind(&done);
|
||||
bind(&hasName);
|
||||
}
|
||||
|
||||
void MacroAssembler::branchTestType(Condition cond, Register tag,
|
||||
|
|
|
@ -1602,8 +1602,8 @@ class MacroAssembler : public MacroAssemblerSpecific {
|
|||
// Loads the function length. This handles interpreted, native, and bound
|
||||
// functions. The caller is responsible for checking that INTERPRETED_LAZY and
|
||||
// RESOLVED_LENGTH flags are not set.
|
||||
void loadFunctionLength(Register func, Register funFlagsAndArgCount,
|
||||
Register output, Label* slowPath);
|
||||
void loadFunctionLength(Register func, Register funFlags, Register output,
|
||||
Label* slowPath);
|
||||
|
||||
// Loads the function name. This handles interpreted, native, and bound
|
||||
// functions.
|
||||
|
@ -1652,16 +1652,6 @@ class MacroAssembler : public MacroAssemblerSpecific {
|
|||
const Shape* shape,
|
||||
Label* label);
|
||||
|
||||
inline void branchTestClassIsFunction(Condition cond, Register clasp,
|
||||
Label* label);
|
||||
inline void branchTestObjIsFunction(Condition cond, Register obj,
|
||||
Register scratch,
|
||||
Register spectreRegToZero, Label* label);
|
||||
inline void branchTestObjIsFunctionNoSpectreMitigations(Condition cond,
|
||||
Register obj,
|
||||
Register scratch,
|
||||
Label* label);
|
||||
|
||||
inline void branchTestObjShape(Condition cond, Register obj, Register shape,
|
||||
Register scratch, Register spectreRegToZero,
|
||||
Label* label);
|
||||
|
|
|
@ -193,8 +193,6 @@ class MOZ_RAII WarpCacheIRTranspiler : public WarpBuilderShared {
|
|||
// Returns either MConstant or MNurseryIndex. See WarpObjectField.
|
||||
MInstruction* objectStubField(uint32_t offset);
|
||||
|
||||
const JSClass* classForGuardClassKind(GuardClassKind kind);
|
||||
|
||||
[[nodiscard]] bool emitGuardTo(ValOperandId inputId, MIRType type);
|
||||
|
||||
[[nodiscard]] bool emitToString(OperandId inputId, StringOperandId resultId);
|
||||
|
@ -345,46 +343,50 @@ bool WarpCacheIRTranspiler::emitGuardClass(ObjOperandId objId,
|
|||
GuardClassKind kind) {
|
||||
MDefinition* def = getOperand(objId);
|
||||
|
||||
MInstruction* ins;
|
||||
if (kind == GuardClassKind::JSFunction) {
|
||||
ins = MGuardToFunction::New(alloc(), def);
|
||||
} else {
|
||||
const JSClass* classp = classForGuardClassKind(kind);
|
||||
ins = MGuardToClass::New(alloc(), def, classp);
|
||||
const JSClass* classp = nullptr;
|
||||
switch (kind) {
|
||||
case GuardClassKind::Array:
|
||||
classp = &ArrayObject::class_;
|
||||
break;
|
||||
case GuardClassKind::ArrayBuffer:
|
||||
classp = &ArrayBufferObject::class_;
|
||||
break;
|
||||
case GuardClassKind::SharedArrayBuffer:
|
||||
classp = &SharedArrayBufferObject::class_;
|
||||
break;
|
||||
case GuardClassKind::DataView:
|
||||
classp = &DataViewObject::class_;
|
||||
break;
|
||||
case GuardClassKind::MappedArguments:
|
||||
classp = &MappedArgumentsObject::class_;
|
||||
break;
|
||||
case GuardClassKind::UnmappedArguments:
|
||||
classp = &UnmappedArgumentsObject::class_;
|
||||
break;
|
||||
case GuardClassKind::WindowProxy:
|
||||
classp = mirGen().runtime->maybeWindowProxyClass();
|
||||
break;
|
||||
case GuardClassKind::JSFunction:
|
||||
classp = &JSFunction::class_;
|
||||
break;
|
||||
case GuardClassKind::Set:
|
||||
classp = &SetObject::class_;
|
||||
break;
|
||||
case GuardClassKind::Map:
|
||||
classp = &MapObject::class_;
|
||||
break;
|
||||
default:
|
||||
MOZ_CRASH("not yet supported");
|
||||
}
|
||||
MOZ_ASSERT(classp);
|
||||
|
||||
auto* ins = MGuardToClass::New(alloc(), def, classp);
|
||||
add(ins);
|
||||
|
||||
setOperand(objId, ins);
|
||||
return true;
|
||||
}
|
||||
|
||||
const JSClass* WarpCacheIRTranspiler::classForGuardClassKind(
|
||||
GuardClassKind kind) {
|
||||
switch (kind) {
|
||||
case GuardClassKind::Array:
|
||||
return &ArrayObject::class_;
|
||||
case GuardClassKind::ArrayBuffer:
|
||||
return &ArrayBufferObject::class_;
|
||||
case GuardClassKind::SharedArrayBuffer:
|
||||
return &SharedArrayBufferObject::class_;
|
||||
case GuardClassKind::DataView:
|
||||
return &DataViewObject::class_;
|
||||
case GuardClassKind::MappedArguments:
|
||||
return &MappedArgumentsObject::class_;
|
||||
case GuardClassKind::UnmappedArguments:
|
||||
return &UnmappedArgumentsObject::class_;
|
||||
case GuardClassKind::WindowProxy:
|
||||
return mirGen().runtime->maybeWindowProxyClass();
|
||||
case GuardClassKind::Set:
|
||||
return &SetObject::class_;
|
||||
case GuardClassKind::Map:
|
||||
return &MapObject::class_;
|
||||
default:
|
||||
MOZ_CRASH("not yet supported");
|
||||
}
|
||||
}
|
||||
|
||||
bool WarpCacheIRTranspiler::emitGuardAnyClass(ObjOperandId objId,
|
||||
uint32_t claspOffset) {
|
||||
MDefinition* def = getOperand(objId);
|
||||
|
|
|
@ -1059,10 +1059,6 @@ class MacroAssemblerARMCompat : public MacroAssemblerARM {
|
|||
store32(Imm32(0), ToType(dest));
|
||||
store32(src, ToPayload(dest));
|
||||
}
|
||||
void storePrivateValue(ImmGCPtr imm, const Address& dest) {
|
||||
store32(Imm32(0), ToType(dest));
|
||||
storePtr(imm, ToPayload(dest));
|
||||
}
|
||||
|
||||
void loadValue(Address src, ValueOperand val);
|
||||
void loadValue(Operand dest, ValueOperand val) {
|
||||
|
|
|
@ -477,9 +477,7 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
|
|||
ScratchRegisterScope scratch(masm);
|
||||
masm.ma_and(Imm32(CalleeTokenMask), r1, r6, scratch);
|
||||
}
|
||||
masm.ma_ldr(DTRAddr(r6, DtrOffImm(JSFunction::offsetOfFlagsAndArgCount())),
|
||||
r6);
|
||||
masm.ma_lsr(Imm32(JSFunction::ArgCountShift), r6, r6);
|
||||
masm.ma_ldrh(EDtrAddr(r6, EDtrOffImm(JSFunction::offsetOfNargs())), r6);
|
||||
|
||||
masm.ma_sub(r6, r8, r2);
|
||||
|
||||
|
|
|
@ -325,9 +325,6 @@ class MacroAssemblerCompat : public vixl::MacroAssembler {
|
|||
void storePrivateValue(Register src, const Address& dest) {
|
||||
storePtr(src, dest);
|
||||
}
|
||||
void storePrivateValue(ImmGCPtr imm, const Address& dest) {
|
||||
storePtr(imm, dest);
|
||||
}
|
||||
|
||||
void loadValue(Address src, Register val) {
|
||||
Ldr(ARMRegister(val, 64), MemOperand(src));
|
||||
|
|
|
@ -417,9 +417,7 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
|
|||
masm.And(x5, x1, Operand(CalleeTokenMask));
|
||||
|
||||
// Get the arguments from the function object.
|
||||
masm.Ldr(ARMRegister(x6.code(), 32),
|
||||
MemOperand(x5, JSFunction::offsetOfFlagsAndArgCount()));
|
||||
masm.Lsr(x6, x6, JSFunction::ArgCountShift);
|
||||
masm.Ldrh(x6, MemOperand(x5, JSFunction::offsetOfNargs()));
|
||||
|
||||
static_assert(CalleeToken_FunctionConstructing == 0x1,
|
||||
"Constructing must be low-order bit");
|
||||
|
|
|
@ -454,9 +454,8 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
|
|||
|
||||
masm.mov(calleeTokenReg, numArgsReg);
|
||||
masm.andPtr(Imm32(CalleeTokenMask), numArgsReg);
|
||||
masm.load32(Address(numArgsReg, JSFunction::offsetOfFlagsAndArgCount()),
|
||||
numArgsReg);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), numArgsReg);
|
||||
masm.load16ZeroExtend(Address(numArgsReg, JSFunction::offsetOfNargs()),
|
||||
numArgsReg);
|
||||
|
||||
masm.as_subu(t1, numArgsReg, s3);
|
||||
|
||||
|
|
|
@ -587,9 +587,6 @@ class MacroAssemblerMIPS64Compat : public MacroAssemblerMIPS64 {
|
|||
void storePrivateValue(Register src, const Address& dest) {
|
||||
storePtr(src, dest);
|
||||
}
|
||||
void storePrivateValue(ImmGCPtr imm, const Address& dest) {
|
||||
storePtr(imm, dest);
|
||||
}
|
||||
|
||||
void loadValue(Address src, ValueOperand val);
|
||||
void loadValue(Operand dest, ValueOperand val) {
|
||||
|
|
|
@ -461,9 +461,8 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
|
|||
calleeTokenReg);
|
||||
masm.mov(calleeTokenReg, numArgsReg);
|
||||
masm.andPtr(Imm32(uint32_t(CalleeTokenMask)), numArgsReg);
|
||||
masm.load32(Address(numArgsReg, JSFunction::offsetOfFlagsAndArgCount()),
|
||||
numArgsReg);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), numArgsReg);
|
||||
masm.load16ZeroExtend(Address(numArgsReg, JSFunction::offsetOfNargs()),
|
||||
numArgsReg);
|
||||
|
||||
// Stash another copy in t3, since we are going to do destructive operations
|
||||
// on numArgsReg
|
||||
|
|
|
@ -172,9 +172,6 @@ class MacroAssemblerX64 : public MacroAssemblerX86Shared {
|
|||
void storePrivateValue(Register src, const Address& dest) {
|
||||
storePtr(src, dest);
|
||||
}
|
||||
void storePrivateValue(ImmGCPtr imm, const Address& dest) {
|
||||
storePtr(imm, dest);
|
||||
}
|
||||
void loadValue(Operand src, ValueOperand val) { movq(src, val.valueReg()); }
|
||||
void loadValue(Address src, ValueOperand val) {
|
||||
loadValue(Operand(src), val);
|
||||
|
|
|
@ -490,8 +490,7 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
|
|||
masm.loadPtr(Address(rsp, RectifierFrameLayout::offsetOfCalleeToken()), rax);
|
||||
masm.mov(rax, rcx);
|
||||
masm.andq(Imm32(uint32_t(CalleeTokenMask)), rcx);
|
||||
masm.load32(Operand(rcx, JSFunction::offsetOfFlagsAndArgCount()), rcx);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), rcx);
|
||||
masm.movzwl(Operand(rcx, JSFunction::offsetOfNargs()), rcx);
|
||||
|
||||
// Stash another copy in r11, since we are going to do destructive operations
|
||||
// on rcx
|
||||
|
|
|
@ -183,10 +183,6 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared {
|
|||
store32(Imm32(0), ToType(dest));
|
||||
store32(src, ToPayload(dest));
|
||||
}
|
||||
void storePrivateValue(ImmGCPtr imm, const Address& dest) {
|
||||
store32(Imm32(0), ToType(dest));
|
||||
movl(imm, Operand(ToPayload(dest)));
|
||||
}
|
||||
void loadValue(Operand src, ValueOperand val) {
|
||||
Operand payload = ToPayload(src);
|
||||
Operand type = ToType(src);
|
||||
|
|
|
@ -416,8 +416,7 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
|
|||
masm.loadPtr(Address(esp, RectifierFrameLayout::offsetOfCalleeToken()), eax);
|
||||
masm.mov(eax, ecx);
|
||||
masm.andl(Imm32(CalleeTokenMask), ecx);
|
||||
masm.mov(Operand(ecx, JSFunction::offsetOfFlagsAndArgCount()), ecx);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), ecx);
|
||||
masm.movzwl(Operand(ecx, JSFunction::offsetOfNargs()), ecx);
|
||||
|
||||
// The frame pointer and its padding are pushed on the stack.
|
||||
// Including |this|, there are (|nformals| + 1) arguments to push to the
|
||||
|
@ -515,8 +514,7 @@ void JitRuntime::generateArgumentsRectifier(MacroAssembler& masm,
|
|||
sizeof(RectifierFrameLayout) + sizeof(Value) + sizeof(void*));
|
||||
|
||||
masm.andl(Imm32(CalleeTokenMask), ebx);
|
||||
masm.movl(Operand(ebx, JSFunction::offsetOfFlagsAndArgCount()), ebx);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), ebx);
|
||||
masm.movzwl(Operand(ebx, JSFunction::offsetOfNargs()), ebx);
|
||||
|
||||
BaseValueIndex dst(esp, ebx, sizeof(Value));
|
||||
|
||||
|
|
|
@ -1826,7 +1826,7 @@ JS_PUBLIC_API JSObject* JS_NewObject(JSContext* cx, const JSClass* clasp) {
|
|||
clasp = &PlainObject::class_; /* default class is Object */
|
||||
}
|
||||
|
||||
MOZ_ASSERT(!clasp->isJSFunction());
|
||||
MOZ_ASSERT(clasp != &JSFunction::class_);
|
||||
MOZ_ASSERT(!(clasp->flags & JSCLASS_IS_GLOBAL));
|
||||
|
||||
return NewBuiltinClassInstance(cx, clasp);
|
||||
|
@ -1844,7 +1844,7 @@ JS_PUBLIC_API JSObject* JS_NewObjectWithGivenProto(JSContext* cx,
|
|||
clasp = &PlainObject::class_; /* default class is Object */
|
||||
}
|
||||
|
||||
MOZ_ASSERT(!clasp->isJSFunction());
|
||||
MOZ_ASSERT(clasp != &JSFunction::class_);
|
||||
MOZ_ASSERT(!(clasp->flags & JSCLASS_IS_GLOBAL));
|
||||
|
||||
return NewObjectWithGivenProto(cx, clasp, proto);
|
||||
|
|
|
@ -547,21 +547,20 @@ static const unsigned JS_FUNCTION_INTERPRETED_BITS = 0x0060;
|
|||
static MOZ_ALWAYS_INLINE const JSJitInfo* FUNCTION_VALUE_TO_JITINFO(
|
||||
const JS::Value& v) {
|
||||
JSObject* obj = &v.toObject();
|
||||
MOZ_ASSERT(JS::GetClass(obj)->isJSFunction());
|
||||
MOZ_ASSERT(JS::GetClass(obj) == js::FunctionClassPtr);
|
||||
|
||||
auto* fun = reinterpret_cast<JS::shadow::Function*>(obj);
|
||||
MOZ_ASSERT(!(fun->flagsAndArgCount() & js::JS_FUNCTION_INTERPRETED_BITS),
|
||||
MOZ_ASSERT(!(fun->flags & js::JS_FUNCTION_INTERPRETED_BITS),
|
||||
"Unexpected non-native function");
|
||||
|
||||
return static_cast<const JSJitInfo*>(fun->jitInfoOrScript());
|
||||
return fun->jitinfo;
|
||||
}
|
||||
|
||||
static MOZ_ALWAYS_INLINE void SET_JITINFO(JSFunction* func,
|
||||
const JSJitInfo* info) {
|
||||
auto* fun = reinterpret_cast<JS::shadow::Function*>(func);
|
||||
MOZ_ASSERT(!(fun->flagsAndArgCount() & js::JS_FUNCTION_INTERPRETED_BITS));
|
||||
|
||||
fun->setJitInfoOrScript(const_cast<JSJitInfo*>(info));
|
||||
MOZ_ASSERT(!(fun->flags & js::JS_FUNCTION_INTERPRETED_BITS));
|
||||
fun->jitinfo = info;
|
||||
}
|
||||
|
||||
static_assert(sizeof(jsid) == sizeof(void*));
|
||||
|
|
|
@ -3256,7 +3256,7 @@ static bool GetScriptAndPCArgs(JSContext* cx, CallArgs& args,
|
|||
if (!args.get(0).isUndefined()) {
|
||||
HandleValue v = args[0];
|
||||
unsigned intarg = 0;
|
||||
if (v.isObject() && JS::GetClass(&v.toObject())->isJSFunction()) {
|
||||
if (v.isObject() && JS::GetClass(&v.toObject()) == &JSFunction::class_) {
|
||||
script = TestingFunctionArgumentToScript(cx, v);
|
||||
if (!script) {
|
||||
return false;
|
||||
|
|
|
@ -155,21 +155,14 @@ class FunctionFlags {
|
|||
|
||||
// For flag combinations the type is int.
|
||||
bool hasFlags(uint16_t flags) const { return flags_ & flags; }
|
||||
FunctionFlags& setFlags(uint16_t flags) {
|
||||
flags_ |= flags;
|
||||
return *this;
|
||||
}
|
||||
FunctionFlags& clearFlags(uint16_t flags) {
|
||||
flags_ &= ~flags;
|
||||
return *this;
|
||||
}
|
||||
FunctionFlags& setFlags(uint16_t flags, bool set) {
|
||||
void setFlags(uint16_t flags) { flags_ |= flags; }
|
||||
void clearFlags(uint16_t flags) { flags_ &= ~flags; }
|
||||
void setFlags(uint16_t flags, bool set) {
|
||||
if (set) {
|
||||
setFlags(flags);
|
||||
} else {
|
||||
clearFlags(flags);
|
||||
}
|
||||
return *this;
|
||||
}
|
||||
|
||||
FunctionKind kind() const {
|
||||
|
@ -272,61 +265,60 @@ class FunctionFlags {
|
|||
return isSelfHostedOrIntrinsic() && isNativeFun();
|
||||
}
|
||||
|
||||
FunctionFlags& setKind(FunctionKind kind) {
|
||||
void setKind(FunctionKind kind) {
|
||||
this->flags_ &= ~FUNCTION_KIND_MASK;
|
||||
this->flags_ |= static_cast<uint16_t>(kind) << FUNCTION_KIND_SHIFT;
|
||||
return *this;
|
||||
}
|
||||
|
||||
// Make the function constructible.
|
||||
FunctionFlags& setIsConstructor() {
|
||||
void setIsConstructor() {
|
||||
MOZ_ASSERT(!isConstructor());
|
||||
MOZ_ASSERT(isSelfHostedBuiltin());
|
||||
return setFlags(CONSTRUCTOR);
|
||||
setFlags(CONSTRUCTOR);
|
||||
}
|
||||
|
||||
FunctionFlags& setIsBoundFunction() {
|
||||
void setIsBoundFunction() {
|
||||
MOZ_ASSERT(!isBoundFunction());
|
||||
return setFlags(BOUND_FUN);
|
||||
setFlags(BOUND_FUN);
|
||||
}
|
||||
|
||||
FunctionFlags& setIsSelfHostedBuiltin() {
|
||||
void setIsSelfHostedBuiltin() {
|
||||
MOZ_ASSERT(isInterpreted());
|
||||
MOZ_ASSERT(!isSelfHostedBuiltin());
|
||||
setFlags(SELF_HOSTED);
|
||||
// Self-hosted functions should not be constructable.
|
||||
return clearFlags(CONSTRUCTOR);
|
||||
clearFlags(CONSTRUCTOR);
|
||||
}
|
||||
FunctionFlags& setIsIntrinsic() {
|
||||
void setIsIntrinsic() {
|
||||
MOZ_ASSERT(isNativeFun());
|
||||
MOZ_ASSERT(!isIntrinsic());
|
||||
return setFlags(SELF_HOSTED);
|
||||
setFlags(SELF_HOSTED);
|
||||
}
|
||||
|
||||
FunctionFlags& setResolvedLength() { return setFlags(RESOLVED_LENGTH); }
|
||||
FunctionFlags& setResolvedName() { return setFlags(RESOLVED_NAME); }
|
||||
void setResolvedLength() { setFlags(RESOLVED_LENGTH); }
|
||||
void setResolvedName() { setFlags(RESOLVED_NAME); }
|
||||
|
||||
FunctionFlags& setInferredName() { return setFlags(HAS_INFERRED_NAME); }
|
||||
void setInferredName() { setFlags(HAS_INFERRED_NAME); }
|
||||
|
||||
FunctionFlags& setGuessedAtom() { return setFlags(HAS_GUESSED_ATOM); }
|
||||
void setGuessedAtom() { setFlags(HAS_GUESSED_ATOM); }
|
||||
|
||||
FunctionFlags& setPrefixedBoundFunctionName() {
|
||||
return setFlags(HAS_BOUND_FUNCTION_NAME_PREFIX);
|
||||
void setPrefixedBoundFunctionName() {
|
||||
setFlags(HAS_BOUND_FUNCTION_NAME_PREFIX);
|
||||
}
|
||||
|
||||
FunctionFlags& setSelfHostedLazy() { return setFlags(SELFHOSTLAZY); }
|
||||
FunctionFlags& clearSelfHostedLazy() { return clearFlags(SELFHOSTLAZY); }
|
||||
FunctionFlags& setBaseScript() { return setFlags(BASESCRIPT); }
|
||||
FunctionFlags& clearBaseScript() { return clearFlags(BASESCRIPT); }
|
||||
void setSelfHostedLazy() { setFlags(SELFHOSTLAZY); }
|
||||
void clearSelfHostedLazy() { clearFlags(SELFHOSTLAZY); }
|
||||
void setBaseScript() { setFlags(BASESCRIPT); }
|
||||
void clearBaseScript() { clearFlags(BASESCRIPT); }
|
||||
|
||||
FunctionFlags& setWasmJitEntry() { return setFlags(WASM_JIT_ENTRY); }
|
||||
void setWasmJitEntry() { setFlags(WASM_JIT_ENTRY); }
|
||||
|
||||
bool isExtended() const { return hasFlags(EXTENDED); }
|
||||
FunctionFlags& setIsExtended() { return setFlags(EXTENDED); }
|
||||
void setIsExtended() { setFlags(EXTENDED); }
|
||||
|
||||
bool isNativeConstructor() const { return hasFlags(NATIVE_CTOR); }
|
||||
|
||||
FunctionFlags& setIsGhost() { return setFlags(GHOST_FUNCTION); }
|
||||
void setIsGhost() { setFlags(GHOST_FUNCTION); }
|
||||
bool isGhost() const { return hasFlags(GHOST_FUNCTION); }
|
||||
|
||||
static uint16_t HasJitEntryFlags(bool isConstructing) {
|
||||
|
|
|
@ -816,7 +816,7 @@ JSFunction* GlobalObject::createConstructor(JSContext* cx, Native ctor,
|
|||
|
||||
static NativeObject* CreateBlankProto(JSContext* cx, const JSClass* clasp,
|
||||
HandleObject proto) {
|
||||
MOZ_ASSERT(!clasp->isJSFunction());
|
||||
MOZ_ASSERT(clasp != &JSFunction::class_);
|
||||
|
||||
RootedObject blankProto(cx, NewTenuredObjectWithGivenProto(cx, clasp, proto));
|
||||
if (!blankProto) {
|
||||
|
|
|
@ -573,8 +573,7 @@ bool js::Call(JSContext* cx, HandleValue fval, HandleValue thisv,
|
|||
static bool InternalConstruct(JSContext* cx, const AnyConstructArgs& args) {
|
||||
MOZ_ASSERT(args.array() + args.length() + 1 == args.end(),
|
||||
"must pass constructing arguments to a construction attempt");
|
||||
MOZ_ASSERT(!FunctionClass.getConstruct());
|
||||
MOZ_ASSERT(!ExtendedFunctionClass.getConstruct());
|
||||
MOZ_ASSERT(!JSFunction::class_.getConstruct());
|
||||
|
||||
// Callers are responsible for enforcing these preconditions.
|
||||
MOZ_ASSERT(IsConstructor(args.calleev()),
|
||||
|
|
|
@ -56,10 +56,6 @@ inline JSFunction* JSFunction::create(JSContext* cx, js::gc::AllocKind kind,
|
|||
const JSClass* clasp = shape->getObjectClass();
|
||||
MOZ_ASSERT(clasp->isNativeObject());
|
||||
MOZ_ASSERT(clasp->isJSFunction());
|
||||
MOZ_ASSERT_IF(kind == js::gc::AllocKind::FUNCTION,
|
||||
clasp == js::FunctionClassPtr);
|
||||
MOZ_ASSERT_IF(kind == js::gc::AllocKind::FUNCTION_EXTENDED,
|
||||
clasp == js::FunctionExtendedClassPtr);
|
||||
|
||||
static constexpr size_t NumDynamicSlots = 0;
|
||||
MOZ_ASSERT(calculateDynamicSlots(shape->numFixedSlots(), shape->slotSpan(),
|
||||
|
@ -76,14 +72,24 @@ inline JSFunction* JSFunction::create(JSContext* cx, js::gc::AllocKind kind,
|
|||
nobj->initEmptyDynamicSlots();
|
||||
nobj->setEmptyElements();
|
||||
|
||||
MOZ_ASSERT(shape->slotSpan() == 0);
|
||||
|
||||
JSFunction* fun = static_cast<JSFunction*>(nobj);
|
||||
fun->initializeSlotRange(0, shape->slotSpan());
|
||||
fun->initFlagsAndArgCount();
|
||||
fun->initFixedSlot(NativeJitInfoOrInterpretedScriptSlot,
|
||||
JS::PrivateValue(nullptr));
|
||||
fun->nargs_ = 0;
|
||||
|
||||
// This must be overwritten by some ultimate caller: there's no default
|
||||
// value to which we could sensibly initialize this.
|
||||
MOZ_MAKE_MEM_UNDEFINED(&fun->u, sizeof(u));
|
||||
|
||||
fun->atom_.init(nullptr);
|
||||
|
||||
if (kind == js::gc::AllocKind::FUNCTION_EXTENDED) {
|
||||
fun->setFlags(FunctionFlags::EXTENDED);
|
||||
for (js::GCPtrValue& extendedSlot : fun->toExtended()->extendedSlots) {
|
||||
extendedSlot.init(JS::UndefinedValue());
|
||||
}
|
||||
} else {
|
||||
fun->setFlags(0);
|
||||
}
|
||||
|
||||
MOZ_ASSERT(!clasp->shouldDelayMetadataBuilder(),
|
||||
|
|
|
@ -761,20 +761,33 @@ bool JS::OrdinaryHasInstance(JSContext* cx, HandleObject objArg, HandleValue v,
|
|||
}
|
||||
|
||||
inline void JSFunction::trace(JSTracer* trc) {
|
||||
// Functions can be be marked as interpreted despite having no script yet at
|
||||
// some points when parsing, and can be lazy with no lazy script for
|
||||
// self-hosted code.
|
||||
MOZ_ASSERT(!getFixedSlot(NativeJitInfoOrInterpretedScriptSlot).isGCThing());
|
||||
if (isInterpreted() && hasBaseScript()) {
|
||||
if (BaseScript* script = baseScript()) {
|
||||
if (isExtended()) {
|
||||
TraceRange(trc, std::size(toExtended()->extendedSlots),
|
||||
(GCPtrValue*)toExtended()->extendedSlots, "nativeReserved");
|
||||
}
|
||||
|
||||
TraceNullableEdge(trc, &atom_, "atom");
|
||||
|
||||
if (isInterpreted()) {
|
||||
// Functions can be be marked as interpreted despite having no script
|
||||
// yet at some points when parsing, and can be lazy with no lazy script
|
||||
// for self-hosted code.
|
||||
if (isIncomplete()) {
|
||||
MOZ_ASSERT(u.scripted.s.script_ == nullptr);
|
||||
} else if (hasBaseScript()) {
|
||||
BaseScript* script = u.scripted.s.script_;
|
||||
TraceManuallyBarrieredEdge(trc, &script, "script");
|
||||
// Self-hosted scripts are shared with workers but are never
|
||||
// relocated. Skip unnecessary writes to prevent the possible data race.
|
||||
if (baseScript() != script) {
|
||||
setFixedSlot(NativeJitInfoOrInterpretedScriptSlot,
|
||||
JS::PrivateValue(script));
|
||||
if (u.scripted.s.script_ != script) {
|
||||
u.scripted.s.script_ = script;
|
||||
}
|
||||
}
|
||||
// NOTE: The u.scripted.s.selfHostedLazy_ does not point to GC things.
|
||||
|
||||
if (u.scripted.env_) {
|
||||
TraceManuallyBarrieredEdge(trc, &u.scripted.env_, "fun_environment");
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -1063,7 +1076,7 @@ bool js::fun_call(JSContext* cx, unsigned argc, Value* vp) {
|
|||
// |Function.prototype.call| and would conclude, "Function.prototype.call
|
||||
// is not a function". Grotesque.)
|
||||
if (!IsCallable(func)) {
|
||||
ReportIncompatibleMethod(cx, args, &FunctionClass);
|
||||
ReportIncompatibleMethod(cx, args, &JSFunction::class_);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1095,7 +1108,7 @@ bool js::fun_apply(JSContext* cx, unsigned argc, Value* vp) {
|
|||
// have side effects or throw an exception.
|
||||
HandleValue fval = args.thisv();
|
||||
if (!IsCallable(fval)) {
|
||||
ReportIncompatibleMethod(cx, args, &FunctionClass);
|
||||
ReportIncompatibleMethod(cx, args, &JSFunction::class_);
|
||||
return false;
|
||||
}
|
||||
|
||||
|
@ -1164,20 +1177,11 @@ static const ClassSpec JSFunctionClassSpec = {
|
|||
CreateFunctionConstructor, CreateFunctionPrototype, nullptr, nullptr,
|
||||
function_methods, function_properties};
|
||||
|
||||
const JSClass js::FunctionClass = {
|
||||
js_Function_str,
|
||||
JSCLASS_HAS_CACHED_PROTO(JSProto_Function) |
|
||||
JSCLASS_HAS_RESERVED_SLOTS(JSFunction::SlotCount),
|
||||
&JSFunctionClassOps, &JSFunctionClassSpec};
|
||||
const JSClass JSFunction::class_ = {js_Function_str,
|
||||
JSCLASS_HAS_CACHED_PROTO(JSProto_Function),
|
||||
&JSFunctionClassOps, &JSFunctionClassSpec};
|
||||
|
||||
const JSClass js::ExtendedFunctionClass = {
|
||||
js_Function_str,
|
||||
JSCLASS_HAS_CACHED_PROTO(JSProto_Function) |
|
||||
JSCLASS_HAS_RESERVED_SLOTS(FunctionExtended::SlotCount),
|
||||
&JSFunctionClassOps, &JSFunctionClassSpec};
|
||||
|
||||
const JSClass* const js::FunctionClassPtr = &FunctionClass;
|
||||
const JSClass* const js::FunctionExtendedClassPtr = &ExtendedFunctionClass;
|
||||
const JSClass* const js::FunctionClassPtr = &JSFunction::class_;
|
||||
|
||||
bool JSFunction::isDerivedClassConstructor() const {
|
||||
bool derived = hasBaseScript() && baseScript()->isDerivedClassConstructor();
|
||||
|
@ -1906,12 +1910,6 @@ static bool NewFunctionEnvironmentIsWellFormed(JSContext* cx,
|
|||
}
|
||||
#endif
|
||||
|
||||
static inline const JSClass* FunctionClassForAllocKind(
|
||||
gc::AllocKind allocKind) {
|
||||
return (allocKind == gc::AllocKind::FUNCTION) ? FunctionClassPtr
|
||||
: FunctionExtendedClassPtr;
|
||||
}
|
||||
|
||||
JSFunction* js::NewFunctionWithProto(
|
||||
JSContext* cx, Native native, unsigned nargs, FunctionFlags flags,
|
||||
HandleObject enclosingEnv, HandleAtom atom, HandleObject proto,
|
||||
|
@ -1924,10 +1922,8 @@ JSFunction* js::NewFunctionWithProto(
|
|||
|
||||
// NOTE: Keep this in sync with `CreateFunctionFast` in Stencil.cpp
|
||||
|
||||
const JSClass* clasp = FunctionClassForAllocKind(allocKind);
|
||||
|
||||
JSFunction* fun = static_cast<JSFunction*>(
|
||||
NewObjectWithClassProto(cx, clasp, proto, allocKind, newKind));
|
||||
JSFunction* fun =
|
||||
NewObjectWithClassProto<JSFunction>(cx, proto, allocKind, newKind);
|
||||
if (!fun) {
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -1950,6 +1946,9 @@ JSFunction* js::NewFunctionWithProto(
|
|||
MOZ_ASSERT(fun->isNativeFun());
|
||||
fun->initNative(native, nullptr);
|
||||
}
|
||||
if (allocKind == gc::AllocKind::FUNCTION_EXTENDED) {
|
||||
fun->initializeExtended();
|
||||
}
|
||||
fun->initAtom(atom);
|
||||
|
||||
return fun;
|
||||
|
@ -2018,11 +2017,9 @@ static inline JSFunction* NewFunctionClone(JSContext* cx, HandleFunction fun,
|
|||
}
|
||||
}
|
||||
|
||||
const JSClass* clasp = FunctionClassForAllocKind(allocKind);
|
||||
|
||||
RootedFunction clone(cx);
|
||||
clone = static_cast<JSFunction*>(
|
||||
NewObjectWithClassProto(cx, clasp, cloneProto, allocKind, newKind));
|
||||
clone =
|
||||
NewObjectWithClassProto<JSFunction>(cx, cloneProto, allocKind, newKind);
|
||||
if (!clone) {
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -2052,6 +2049,8 @@ static inline JSFunction* NewFunctionClone(JSContext* cx, HandleFunction fun,
|
|||
for (unsigned i = 0; i < FunctionExtended::NUM_EXTENDED_SLOTS; i++) {
|
||||
clone->initExtendedSlot(i, fun->getExtendedSlot(i));
|
||||
}
|
||||
} else {
|
||||
clone->initializeExtended();
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -38,109 +38,97 @@ static constexpr uint32_t BoundFunctionEnvArgsSlot = 4;
|
|||
static const char FunctionConstructorMedialSigils[] = ") {\n";
|
||||
static const char FunctionConstructorFinalBrace[] = "\n}";
|
||||
|
||||
// JSFunctions can have one of two classes:
|
||||
extern const JSClass FunctionClass;
|
||||
extern const JSClass ExtendedFunctionClass;
|
||||
|
||||
} // namespace js
|
||||
|
||||
class JSFunction : public js::NativeObject {
|
||||
public:
|
||||
static_assert(sizeof(js::FunctionFlags) == sizeof(uint16_t));
|
||||
static constexpr size_t ArgCountShift = 16;
|
||||
static constexpr size_t FlagsMask = js::BitMask(ArgCountShift);
|
||||
static constexpr size_t ArgCountMask = js::BitMask(16) << ArgCountShift;
|
||||
|
||||
enum {
|
||||
/*
|
||||
* Bitfield composed of FunctionFlags and argument count, stored as a
|
||||
* PrivateUint32Value.
|
||||
*
|
||||
* If any of these flags needs to be accessed in off-thread JIT compilation,
|
||||
* copy it to js::jit::WrappedFunction.
|
||||
*/
|
||||
FlagsAndArgCountSlot,
|
||||
|
||||
/*
|
||||
* For native functions, the native method pointer stored as a private
|
||||
* value, or undefined.
|
||||
*
|
||||
* For interpreted functions, the environment object for new activations or
|
||||
* null.
|
||||
*/
|
||||
NativeFuncOrInterpretedEnvSlot,
|
||||
|
||||
/*
|
||||
* For native functions this is one of:
|
||||
*
|
||||
* - JSJitInfo* to be used by the JIT, only used if isBuiltinNative() for
|
||||
* builtin natives
|
||||
*
|
||||
* - wasm function index for wasm/asm.js without a jit entry. Always has
|
||||
* the low bit set to ensure it's never identical to a BaseScript*
|
||||
* pointer
|
||||
*
|
||||
* - a wasm JIT entry
|
||||
*
|
||||
* The JIT depends on none of the above being a valid BaseScript pointer.
|
||||
*
|
||||
* For interpreted functions this is either a BaseScript or the
|
||||
* SelfHostedLazyScript pointer.
|
||||
*
|
||||
* These are all stored as private values, because the JIT assumes that it
|
||||
* can access the SelfHostedLazyScript and BaseScript pointer in the same
|
||||
* way.
|
||||
*/
|
||||
NativeJitInfoOrInterpretedScriptSlot,
|
||||
|
||||
// The `atom_` field can have different meanings depending on the function
|
||||
// type and flags. It is used for diagnostics, decompiling, and
|
||||
//
|
||||
// 1. If the function is not a bound function:
|
||||
// a. If HAS_GUESSED_ATOM is not set, to store the initial value of the
|
||||
// "name" property of functions. But also see RESOLVED_NAME.
|
||||
// b. If HAS_GUESSED_ATOM is set, `atom_` is only used for diagnostics,
|
||||
// but must not be used for the "name" property.
|
||||
// c. If HAS_INFERRED_NAME is set, the function wasn't given an explicit
|
||||
// name in the source text, e.g. `function fn(){}`, but instead it
|
||||
// was inferred based on how the function was defined in the source
|
||||
// text. The exact name inference rules are defined in the ECMAScript
|
||||
// specification.
|
||||
// Name inference can happen at compile-time, for example in
|
||||
// `var fn = function(){}`, or it can happen at runtime, for example
|
||||
// in `var o = {[Symbol.iterator]: function(){}}`. When it happens at
|
||||
// compile-time, the HAS_INFERRED_NAME is set directly in the
|
||||
// bytecode emitter, when it happens at runtime, the flag is set when
|
||||
// evaluating the JSOp::SetFunName bytecode.
|
||||
// d. HAS_GUESSED_ATOM and HAS_INFERRED_NAME cannot both be set.
|
||||
// e. `atom_` can be null if neither an explicit, nor inferred, nor a
|
||||
// guessed name was set.
|
||||
//
|
||||
// 2. If the function is a bound function:
|
||||
// a. To store the initial value of the "name" property.
|
||||
// b. If HAS_BOUND_FUNCTION_NAME_PREFIX is not set, `atom_` doesn't
|
||||
// contain the "bound " prefix which is prepended to the "name"
|
||||
// property of bound functions per ECMAScript.
|
||||
// c. Bound functions can never have an inferred or guessed name.
|
||||
// d. `atom_` is never null for bound functions.
|
||||
//
|
||||
// Self-hosted functions have two names. For example, Array.prototype.sort
|
||||
// has the standard name "sort", but the implementation in Array.js is named
|
||||
// "ArraySort".
|
||||
//
|
||||
// - In the self-hosting realm, these functions have `_atom` set to the
|
||||
// implementation name.
|
||||
//
|
||||
// - When we clone these functions into normal realms, we set `_atom` to
|
||||
// the standard name. (The self-hosted name is also stored on the clone,
|
||||
// in another slot; see GetClonedSelfHostedFunctionName().)
|
||||
AtomSlot,
|
||||
|
||||
SlotCount
|
||||
};
|
||||
static const JSClass class_;
|
||||
|
||||
private:
|
||||
/*
|
||||
* number of formal arguments
|
||||
* (including defaults and the rest parameter unlike f.length)
|
||||
*/
|
||||
uint16_t nargs_;
|
||||
|
||||
/*
|
||||
* Bitfield composed of the above Flags enum, as well as the kind.
|
||||
*
|
||||
* If any of these flags needs to be accessed in off-thread JIT
|
||||
* compilation, copy it to js::jit::WrappedFunction.
|
||||
*/
|
||||
using FunctionFlags = js::FunctionFlags;
|
||||
FunctionFlags flags_;
|
||||
|
||||
union U {
|
||||
class {
|
||||
friend class JSFunction;
|
||||
js::Native func_; /* native method pointer or null */
|
||||
// Warning: this |extra| union MUST NOT store a value that could be a
|
||||
// valid BaseScript* pointer! JIT guards depend on this.
|
||||
union {
|
||||
// Information about this function to be used by the JIT, only
|
||||
// used if isBuiltinNative(); use the accessor!
|
||||
const JSJitInfo* jitInfo_;
|
||||
// For wasm/asm.js without a jit entry. Always has the low bit set to
|
||||
// ensure it's never identical to a BaseScript* pointer. See warning
|
||||
// above.
|
||||
uintptr_t taggedWasmFuncIndex_;
|
||||
// for wasm that has been given a jit entry
|
||||
void** wasmJitEntry_;
|
||||
} extra;
|
||||
} native;
|
||||
struct {
|
||||
JSObject* env_; /* environment for new activations */
|
||||
union {
|
||||
js::BaseScript* script_;
|
||||
js::SelfHostedLazyScript* selfHostedLazy_;
|
||||
} s;
|
||||
} scripted;
|
||||
} u;
|
||||
|
||||
// The `atom_` field can have different meanings depending on the function
|
||||
// type and flags. It is used for diagnostics, decompiling, and
|
||||
//
|
||||
// 1. If the function is not a bound function:
|
||||
// a. If HAS_GUESSED_ATOM is not set, to store the initial value of the
|
||||
// "name" property of functions. But also see RESOLVED_NAME.
|
||||
// b. If HAS_GUESSED_ATOM is set, `atom_` is only used for diagnostics,
|
||||
// but must not be used for the "name" property.
|
||||
// c. If HAS_INFERRED_NAME is set, the function wasn't given an explicit
|
||||
// name in the source text, e.g. `function fn(){}`, but instead it
|
||||
// was inferred based on how the function was defined in the source
|
||||
// text. The exact name inference rules are defined in the ECMAScript
|
||||
// specification.
|
||||
// Name inference can happen at compile-time, for example in
|
||||
// `var fn = function(){}`, or it can happen at runtime, for example
|
||||
// in `var o = {[Symbol.iterator]: function(){}}`. When it happens at
|
||||
// compile-time, the HAS_INFERRED_NAME is set directly in the
|
||||
// bytecode emitter, when it happens at runtime, the flag is set when
|
||||
// evaluating the JSOp::SetFunName bytecode.
|
||||
// d. HAS_GUESSED_ATOM and HAS_INFERRED_NAME cannot both be set.
|
||||
// e. `atom_` can be null if neither an explicit, nor inferred, nor a
|
||||
// guessed name was set.
|
||||
//
|
||||
// 2. If the function is a bound function:
|
||||
// a. To store the initial value of the "name" property.
|
||||
// b. If HAS_BOUND_FUNCTION_NAME_PREFIX is not set, `atom_` doesn't
|
||||
// contain the "bound " prefix which is prepended to the "name"
|
||||
// property of bound functions per ECMAScript.
|
||||
// c. Bound functions can never have an inferred or guessed name.
|
||||
// d. `atom_` is never null for bound functions.
|
||||
//
|
||||
// Self-hosted functions have two names. For example, Array.prototype.sort
|
||||
// has the standard name "sort", but the implementation in Array.js is named
|
||||
// "ArraySort".
|
||||
//
|
||||
// - In the self-hosting realm, these functions have `_atom` set to the
|
||||
// implementation name.
|
||||
//
|
||||
// - When we clone these functions into normal realms, we set `_atom` to
|
||||
// the standard name. (The self-hosted name is also stored on the clone,
|
||||
// in another slot; see GetClonedSelfHostedFunctionName().)
|
||||
js::GCPtrAtom atom_;
|
||||
|
||||
public:
|
||||
static inline JSFunction* create(JSContext* cx, js::gc::AllocKind kind,
|
||||
|
@ -163,91 +151,81 @@ class JSFunction : public js::NativeObject {
|
|||
return needsFunctionEnvironmentObjects() || needsExtraBodyVarEnvironment();
|
||||
}
|
||||
|
||||
uint32_t flagsAndArgCountRaw() const {
|
||||
return getFixedSlot(FlagsAndArgCountSlot).toPrivateUint32();
|
||||
}
|
||||
static constexpr size_t NArgsBits = sizeof(nargs_) * CHAR_BIT;
|
||||
size_t nargs() const { return nargs_; }
|
||||
|
||||
void initFlagsAndArgCount() {
|
||||
initFixedSlot(FlagsAndArgCountSlot, JS::PrivateUint32Value(0));
|
||||
}
|
||||
FunctionFlags flags() { return flags_; }
|
||||
|
||||
size_t nargs() const { return flagsAndArgCountRaw() >> ArgCountShift; }
|
||||
|
||||
FunctionFlags flags() const {
|
||||
return FunctionFlags(uint16_t(flagsAndArgCountRaw() & FlagsMask));
|
||||
}
|
||||
|
||||
FunctionFlags::FunctionKind kind() const { return flags().kind(); }
|
||||
FunctionFlags::FunctionKind kind() const { return flags_.kind(); }
|
||||
|
||||
/* A function can be classified as either native (C++) or interpreted (JS): */
|
||||
bool isInterpreted() const { return flags().isInterpreted(); }
|
||||
bool isNativeFun() const { return flags().isNativeFun(); }
|
||||
bool isInterpreted() const { return flags_.isInterpreted(); }
|
||||
bool isNativeFun() const { return flags_.isNativeFun(); }
|
||||
|
||||
bool isConstructor() const { return flags().isConstructor(); }
|
||||
bool isConstructor() const { return flags_.isConstructor(); }
|
||||
|
||||
bool isNonBuiltinConstructor() const {
|
||||
return flags().isNonBuiltinConstructor();
|
||||
return flags_.isNonBuiltinConstructor();
|
||||
}
|
||||
|
||||
/* Possible attributes of a native function: */
|
||||
bool isAsmJSNative() const { return flags().isAsmJSNative(); }
|
||||
bool isAsmJSNative() const { return flags_.isAsmJSNative(); }
|
||||
|
||||
bool isWasm() const { return flags().isWasm(); }
|
||||
bool isWasmWithJitEntry() const { return flags().isWasmWithJitEntry(); }
|
||||
bool isWasm() const { return flags_.isWasm(); }
|
||||
bool isWasmWithJitEntry() const { return flags_.isWasmWithJitEntry(); }
|
||||
bool isNativeWithoutJitEntry() const {
|
||||
return flags().isNativeWithoutJitEntry();
|
||||
return flags_.isNativeWithoutJitEntry();
|
||||
}
|
||||
bool isBuiltinNative() const { return flags().isBuiltinNative(); }
|
||||
bool isBuiltinNative() const { return flags_.isBuiltinNative(); }
|
||||
|
||||
bool hasJitEntry() const { return flags().hasJitEntry(); }
|
||||
bool hasJitEntry() const { return flags_.hasJitEntry(); }
|
||||
|
||||
/* Possible attributes of an interpreted function: */
|
||||
bool isBoundFunction() const { return flags().isBoundFunction(); }
|
||||
bool hasInferredName() const { return flags().hasInferredName(); }
|
||||
bool hasGuessedAtom() const { return flags().hasGuessedAtom(); }
|
||||
bool isBoundFunction() const { return flags_.isBoundFunction(); }
|
||||
bool hasInferredName() const { return flags_.hasInferredName(); }
|
||||
bool hasGuessedAtom() const { return flags_.hasGuessedAtom(); }
|
||||
bool hasBoundFunctionNamePrefix() const {
|
||||
return flags().hasBoundFunctionNamePrefix();
|
||||
return flags_.hasBoundFunctionNamePrefix();
|
||||
}
|
||||
|
||||
bool isLambda() const { return flags().isLambda(); }
|
||||
bool isLambda() const { return flags_.isLambda(); }
|
||||
|
||||
// These methods determine which kind of script we hold.
|
||||
//
|
||||
// For live JSFunctions the pointer values will always be non-null, but due to
|
||||
// partial initialization the GC (and other features that scan the heap
|
||||
// These methods determine which of the u.scripted.s union arms are active.
|
||||
// For live JSFunctions the pointer values will always be non-null, but due
|
||||
// to partial initialization the GC (and other features that scan the heap
|
||||
// directly) may still return a null pointer.
|
||||
bool hasSelfHostedLazyScript() const {
|
||||
return flags().hasSelfHostedLazyScript();
|
||||
return flags_.hasSelfHostedLazyScript();
|
||||
}
|
||||
bool hasBaseScript() const { return flags().hasBaseScript(); }
|
||||
bool hasBaseScript() const { return flags_.hasBaseScript(); }
|
||||
|
||||
bool hasBytecode() const {
|
||||
MOZ_ASSERT(!isIncomplete());
|
||||
return hasBaseScript() && baseScript()->hasBytecode();
|
||||
}
|
||||
|
||||
bool isGhost() const { return flags().isGhost(); }
|
||||
bool isGhost() const { return flags_.isGhost(); }
|
||||
|
||||
// Arrow functions store their lexical new.target in the first extended slot.
|
||||
bool isArrow() const { return flags().isArrow(); }
|
||||
bool isArrow() const { return flags_.isArrow(); }
|
||||
// Every class-constructor is also a method.
|
||||
bool isMethod() const { return flags().isMethod(); }
|
||||
bool isClassConstructor() const { return flags().isClassConstructor(); }
|
||||
bool isMethod() const { return flags_.isMethod(); }
|
||||
bool isClassConstructor() const { return flags_.isClassConstructor(); }
|
||||
|
||||
bool isGetter() const { return flags().isGetter(); }
|
||||
bool isSetter() const { return flags().isSetter(); }
|
||||
bool isGetter() const { return flags_.isGetter(); }
|
||||
bool isSetter() const { return flags_.isSetter(); }
|
||||
|
||||
bool allowSuperProperty() const { return flags().allowSuperProperty(); }
|
||||
bool allowSuperProperty() const { return flags_.allowSuperProperty(); }
|
||||
|
||||
bool hasResolvedLength() const { return flags().hasResolvedLength(); }
|
||||
bool hasResolvedName() const { return flags().hasResolvedName(); }
|
||||
bool hasResolvedLength() const { return flags_.hasResolvedLength(); }
|
||||
bool hasResolvedName() const { return flags_.hasResolvedName(); }
|
||||
|
||||
bool isSelfHostedOrIntrinsic() const {
|
||||
return flags().isSelfHostedOrIntrinsic();
|
||||
return flags_.isSelfHostedOrIntrinsic();
|
||||
}
|
||||
bool isSelfHostedBuiltin() const { return flags().isSelfHostedBuiltin(); }
|
||||
bool isSelfHostedBuiltin() const { return flags_.isSelfHostedBuiltin(); }
|
||||
|
||||
bool isIntrinsic() const { return flags().isIntrinsic(); }
|
||||
bool isIntrinsic() const { return flags_.isIntrinsic(); }
|
||||
|
||||
bool hasJitScript() const {
|
||||
if (!hasBaseScript()) {
|
||||
|
@ -261,7 +239,7 @@ class JSFunction : public js::NativeObject {
|
|||
bool isBuiltin() const { return isBuiltinNative() || isSelfHostedBuiltin(); }
|
||||
|
||||
bool isNamedLambda() const {
|
||||
return flags().isNamedLambda(displayAtom() != nullptr);
|
||||
return flags_.isNamedLambda(displayAtom() != nullptr);
|
||||
}
|
||||
|
||||
bool hasLexicalThis() const { return isArrow(); }
|
||||
|
@ -286,33 +264,21 @@ class JSFunction : public js::NativeObject {
|
|||
/* Returns the strictness of this function, which must be interpreted. */
|
||||
bool strict() const { return baseScript()->strict(); }
|
||||
|
||||
void setFlags(FunctionFlags flags) { setFlags(flags.toRaw()); }
|
||||
void setFlags(uint16_t flags) {
|
||||
uint32_t flagsAndArgCount = flagsAndArgCountRaw();
|
||||
flagsAndArgCount &= ~FlagsMask;
|
||||
flagsAndArgCount |= flags;
|
||||
js::HeapSlot& slot = getFixedSlotRef(FlagsAndArgCountSlot);
|
||||
slot.unbarrieredSet(JS::PrivateUint32Value(flagsAndArgCount));
|
||||
}
|
||||
void setFlags(uint16_t flags) { flags_ = FunctionFlags(flags); }
|
||||
void setFlags(FunctionFlags flags) { flags_ = flags; }
|
||||
|
||||
// Make the function constructible.
|
||||
void setIsConstructor() { setFlags(flags().setIsConstructor()); }
|
||||
void setIsConstructor() { flags_.setIsConstructor(); }
|
||||
|
||||
// Can be called multiple times by the parser.
|
||||
void setArgCount(uint16_t nargs) {
|
||||
uint32_t flagsAndArgCount = flagsAndArgCountRaw();
|
||||
flagsAndArgCount &= ~ArgCountMask;
|
||||
flagsAndArgCount |= nargs << ArgCountShift;
|
||||
js::HeapSlot& slot = getFixedSlotRef(FlagsAndArgCountSlot);
|
||||
slot.unbarrieredSet(JS::PrivateUint32Value(flagsAndArgCount));
|
||||
}
|
||||
void setArgCount(uint16_t nargs) { this->nargs_ = nargs; }
|
||||
|
||||
void setIsBoundFunction() { setFlags(flags().setIsBoundFunction()); }
|
||||
void setIsSelfHostedBuiltin() { setFlags(flags().setIsSelfHostedBuiltin()); }
|
||||
void setIsIntrinsic() { setFlags(flags().setIsIntrinsic()); }
|
||||
void setIsBoundFunction() { flags_.setIsBoundFunction(); }
|
||||
void setIsSelfHostedBuiltin() { flags_.setIsSelfHostedBuiltin(); }
|
||||
void setIsIntrinsic() { flags_.setIsIntrinsic(); }
|
||||
|
||||
void setResolvedLength() { setFlags(flags().setResolvedLength()); }
|
||||
void setResolvedName() { setFlags(flags().setResolvedName()); }
|
||||
void setResolvedLength() { flags_.setResolvedLength(); }
|
||||
void setResolvedName() { flags_.setResolvedName(); }
|
||||
|
||||
static bool getUnresolvedLength(JSContext* cx, js::HandleFunction fun,
|
||||
js::MutableHandleValue v);
|
||||
|
@ -326,60 +292,52 @@ class JSFunction : public js::NativeObject {
|
|||
js::HandleFunction fun);
|
||||
|
||||
JSAtom* explicitName() const {
|
||||
return (hasInferredName() || hasGuessedAtom()) ? nullptr : rawAtom();
|
||||
return (hasInferredName() || hasGuessedAtom()) ? nullptr : atom_.get();
|
||||
}
|
||||
|
||||
JSAtom* explicitOrInferredName() const {
|
||||
return hasGuessedAtom() ? nullptr : rawAtom();
|
||||
return hasGuessedAtom() ? nullptr : atom_.get();
|
||||
}
|
||||
|
||||
void initAtom(JSAtom* atom) {
|
||||
MOZ_ASSERT_IF(atom, js::AtomIsMarked(zone(), atom));
|
||||
MOZ_ASSERT(getFixedSlot(AtomSlot).isUndefined());
|
||||
if (atom) {
|
||||
initFixedSlot(AtomSlot, JS::StringValue(atom));
|
||||
}
|
||||
atom_.init(atom);
|
||||
}
|
||||
|
||||
void setAtom(JSAtom* atom) {
|
||||
MOZ_ASSERT_IF(atom, js::AtomIsMarked(zone(), atom));
|
||||
setFixedSlot(AtomSlot, atom ? JS::StringValue(atom) : JS::UndefinedValue());
|
||||
atom_ = atom;
|
||||
}
|
||||
|
||||
JSAtom* displayAtom() const { return rawAtom(); }
|
||||
|
||||
JSAtom* rawAtom() const {
|
||||
JS::Value value = getFixedSlot(AtomSlot);
|
||||
return value.isUndefined() ? nullptr : &value.toString()->asAtom();
|
||||
}
|
||||
JSAtom* displayAtom() const { return atom_; }
|
||||
|
||||
void setInferredName(JSAtom* atom) {
|
||||
MOZ_ASSERT(!rawAtom());
|
||||
MOZ_ASSERT(!atom_);
|
||||
MOZ_ASSERT(atom);
|
||||
MOZ_ASSERT(!hasGuessedAtom());
|
||||
setAtom(atom);
|
||||
setFlags(flags().setInferredName());
|
||||
flags_.setInferredName();
|
||||
}
|
||||
JSAtom* inferredName() const {
|
||||
MOZ_ASSERT(hasInferredName());
|
||||
MOZ_ASSERT(rawAtom());
|
||||
return rawAtom();
|
||||
MOZ_ASSERT(atom_);
|
||||
return atom_;
|
||||
}
|
||||
|
||||
void setGuessedAtom(JSAtom* atom) {
|
||||
MOZ_ASSERT(!rawAtom());
|
||||
MOZ_ASSERT(!atom_);
|
||||
MOZ_ASSERT(atom);
|
||||
MOZ_ASSERT(!hasInferredName());
|
||||
MOZ_ASSERT(!hasGuessedAtom());
|
||||
MOZ_ASSERT(!isBoundFunction());
|
||||
setAtom(atom);
|
||||
setFlags(flags().setGuessedAtom());
|
||||
flags_.setGuessedAtom();
|
||||
}
|
||||
|
||||
void setPrefixedBoundFunctionName(JSAtom* atom) {
|
||||
MOZ_ASSERT(!hasBoundFunctionNamePrefix());
|
||||
MOZ_ASSERT(atom);
|
||||
setFlags(flags().setPrefixedBoundFunctionName());
|
||||
flags_.setPrefixedBoundFunctionName();
|
||||
setAtom(atom);
|
||||
}
|
||||
|
||||
|
@ -392,20 +350,25 @@ class JSFunction : public js::NativeObject {
|
|||
*/
|
||||
JSObject* environment() const {
|
||||
MOZ_ASSERT(isInterpreted());
|
||||
return getFixedSlot(NativeFuncOrInterpretedEnvSlot).toObjectOrNull();
|
||||
return u.scripted.env_;
|
||||
}
|
||||
|
||||
void initEnvironment(JSObject* obj) {
|
||||
MOZ_ASSERT(isInterpreted());
|
||||
initFixedSlot(NativeFuncOrInterpretedEnvSlot, JS::ObjectOrNullValue(obj));
|
||||
reinterpret_cast<js::GCPtrObject*>(&u.scripted.env_)->init(obj);
|
||||
}
|
||||
|
||||
public:
|
||||
static constexpr size_t offsetOfFlagsAndArgCount() {
|
||||
return getFixedSlotOffset(FlagsAndArgCountSlot);
|
||||
static constexpr size_t offsetOfNargs() {
|
||||
return offsetof(JSFunction, nargs_);
|
||||
}
|
||||
static size_t offsetOfEnvironment() { return offsetOfNativeOrEnv(); }
|
||||
static size_t offsetOfAtom() { return getFixedSlotOffset(AtomSlot); }
|
||||
static constexpr size_t offsetOfFlags() {
|
||||
return offsetof(JSFunction, flags_);
|
||||
}
|
||||
static size_t offsetOfEnvironment() {
|
||||
return offsetof(JSFunction, u.scripted.env_);
|
||||
}
|
||||
static size_t offsetOfAtom() { return offsetof(JSFunction, atom_); }
|
||||
|
||||
static bool delazifyLazilyInterpretedFunction(JSContext* cx,
|
||||
js::HandleFunction fun);
|
||||
|
@ -459,42 +422,31 @@ class JSFunction : public js::NativeObject {
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
private:
|
||||
void* nativeJitInfoOrInterpretedScript() const {
|
||||
return getFixedSlot(NativeJitInfoOrInterpretedScriptSlot).toPrivate();
|
||||
}
|
||||
void setNativeJitInfoOrInterpretedScript(void* ptr) {
|
||||
// This always stores a PrivateValue and so doesn't require a barrier.
|
||||
js::HeapSlot& slot = getFixedSlotRef(NativeJitInfoOrInterpretedScriptSlot);
|
||||
slot.unbarrieredSet(JS::PrivateValue(ptr));
|
||||
}
|
||||
|
||||
public:
|
||||
// The default state of a JSFunction that is not ready for execution. If
|
||||
// observed outside initialization, this is the result of failure during
|
||||
// bytecode compilation.
|
||||
//
|
||||
// A BaseScript is fully initialized before u.script.s.script_ is initialized
|
||||
// with a reference to it.
|
||||
bool isIncomplete() const {
|
||||
return isInterpreted() && !nativeJitInfoOrInterpretedScript();
|
||||
}
|
||||
bool isIncomplete() const { return isInterpreted() && !u.scripted.s.script_; }
|
||||
|
||||
JSScript* nonLazyScript() const {
|
||||
MOZ_ASSERT(hasBytecode());
|
||||
return static_cast<JSScript*>(baseScript());
|
||||
MOZ_ASSERT(u.scripted.s.script_);
|
||||
return static_cast<JSScript*>(u.scripted.s.script_);
|
||||
}
|
||||
|
||||
js::SelfHostedLazyScript* selfHostedLazyScript() const {
|
||||
MOZ_ASSERT(hasSelfHostedLazyScript());
|
||||
return static_cast<js::SelfHostedLazyScript*>(
|
||||
nativeJitInfoOrInterpretedScript());
|
||||
MOZ_ASSERT(u.scripted.s.selfHostedLazy_);
|
||||
return u.scripted.s.selfHostedLazy_;
|
||||
}
|
||||
|
||||
// Access fields defined on both lazy and non-lazy scripts.
|
||||
js::BaseScript* baseScript() const {
|
||||
MOZ_ASSERT(hasBaseScript());
|
||||
return static_cast<JSScript*>(nativeJitInfoOrInterpretedScript());
|
||||
MOZ_ASSERT(u.scripted.s.script_);
|
||||
return u.scripted.s.script_;
|
||||
}
|
||||
|
||||
static bool getLength(JSContext* cx, js::HandleFunction fun,
|
||||
|
@ -538,45 +490,34 @@ class JSFunction : public js::NativeObject {
|
|||
void initScript(js::BaseScript* script) {
|
||||
MOZ_ASSERT_IF(script, realm() == script->realm());
|
||||
MOZ_ASSERT(isInterpreted());
|
||||
MOZ_ASSERT_IF(hasBaseScript(),
|
||||
!baseScript()); // No write barrier required.
|
||||
setNativeJitInfoOrInterpretedScript(script);
|
||||
u.scripted.s.script_ = script;
|
||||
}
|
||||
|
||||
void initSelfHostedLazyScript(js::SelfHostedLazyScript* lazy) {
|
||||
MOZ_ASSERT(isSelfHostedBuiltin());
|
||||
MOZ_ASSERT(isInterpreted());
|
||||
if (hasBaseScript()) {
|
||||
js::gc::PreWriteBarrier(baseScript());
|
||||
}
|
||||
FunctionFlags f = flags();
|
||||
f.clearBaseScript();
|
||||
f.setSelfHostedLazy();
|
||||
setFlags(f);
|
||||
setNativeJitInfoOrInterpretedScript(lazy);
|
||||
flags_.clearBaseScript();
|
||||
flags_.setSelfHostedLazy();
|
||||
u.scripted.s.selfHostedLazy_ = lazy;
|
||||
MOZ_ASSERT(hasSelfHostedLazyScript());
|
||||
}
|
||||
|
||||
void clearSelfHostedLazyScript() {
|
||||
MOZ_ASSERT(isSelfHostedBuiltin());
|
||||
MOZ_ASSERT(isInterpreted());
|
||||
MOZ_ASSERT(!hasBaseScript()); // No write barrier required.
|
||||
FunctionFlags f = flags();
|
||||
f.clearSelfHostedLazy();
|
||||
f.setBaseScript();
|
||||
setFlags(f);
|
||||
setNativeJitInfoOrInterpretedScript(nullptr);
|
||||
// Note: The selfHostedLazy_ field is not a GC-thing pointer so we don't
|
||||
// need to trigger barriers.
|
||||
flags_.clearSelfHostedLazy();
|
||||
flags_.setBaseScript();
|
||||
u.scripted.s.script_ = nullptr;
|
||||
MOZ_ASSERT(isIncomplete());
|
||||
}
|
||||
|
||||
JSNative native() const {
|
||||
MOZ_ASSERT(isNativeFun());
|
||||
return nativeUnchecked();
|
||||
return u.native.func_;
|
||||
}
|
||||
JSNative nativeUnchecked() const {
|
||||
// Can be called by Ion off-main thread.
|
||||
JS::Value value = getFixedSlot(NativeFuncOrInterpretedEnvSlot);
|
||||
return reinterpret_cast<JSNative>(value.toPrivate());
|
||||
// Called by Ion off-main thread.
|
||||
return u.native.func_;
|
||||
}
|
||||
|
||||
JSNative maybeNative() const { return isInterpreted() ? nullptr : native(); }
|
||||
|
@ -585,23 +526,23 @@ class JSFunction : public js::NativeObject {
|
|||
MOZ_ASSERT(isNativeFun());
|
||||
MOZ_ASSERT_IF(jitInfo, isBuiltinNative());
|
||||
MOZ_ASSERT(native);
|
||||
initFixedSlot(NativeFuncOrInterpretedEnvSlot,
|
||||
JS::PrivateValue(reinterpret_cast<void*>(native)));
|
||||
setNativeJitInfoOrInterpretedScript(const_cast<JSJitInfo*>(jitInfo));
|
||||
u.native.func_ = native;
|
||||
u.native.extra.jitInfo_ = jitInfo;
|
||||
}
|
||||
bool hasJitInfo() const {
|
||||
return isBuiltinNative() && u.native.extra.jitInfo_;
|
||||
}
|
||||
bool hasJitInfo() const { return isBuiltinNative() && jitInfoUnchecked(); }
|
||||
const JSJitInfo* jitInfo() const {
|
||||
MOZ_ASSERT(hasJitInfo());
|
||||
return jitInfoUnchecked();
|
||||
return u.native.extra.jitInfo_;
|
||||
}
|
||||
const JSJitInfo* jitInfoUnchecked() const {
|
||||
// Can be called by Ion off-main thread.
|
||||
return static_cast<const JSJitInfo*>(nativeJitInfoOrInterpretedScript());
|
||||
// Called by Ion off-main thread.
|
||||
return u.native.extra.jitInfo_;
|
||||
}
|
||||
void setJitInfo(const JSJitInfo* data) {
|
||||
MOZ_ASSERT(isBuiltinNative());
|
||||
MOZ_ASSERT(data);
|
||||
setNativeJitInfoOrInterpretedScript(const_cast<JSJitInfo*>(data));
|
||||
u.native.extra.jitInfo_ = data;
|
||||
}
|
||||
|
||||
// wasm functions are always natives and either:
|
||||
|
@ -613,39 +554,55 @@ class JSFunction : public js::NativeObject {
|
|||
void setWasmFuncIndex(uint32_t funcIndex) {
|
||||
MOZ_ASSERT(isWasm() || isAsmJSNative());
|
||||
MOZ_ASSERT(!isWasmWithJitEntry());
|
||||
MOZ_ASSERT(!nativeJitInfoOrInterpretedScript());
|
||||
MOZ_ASSERT(!u.native.extra.taggedWasmFuncIndex_);
|
||||
// See wasmFuncIndex_ comment for why we set the low bit.
|
||||
uintptr_t tagged = (uintptr_t(funcIndex) << 1) | 1;
|
||||
setNativeJitInfoOrInterpretedScript(reinterpret_cast<void*>(tagged));
|
||||
u.native.extra.taggedWasmFuncIndex_ = (uintptr_t(funcIndex) << 1) | 1;
|
||||
}
|
||||
uint32_t wasmFuncIndex() const {
|
||||
MOZ_ASSERT(isWasm() || isAsmJSNative());
|
||||
MOZ_ASSERT(!isWasmWithJitEntry());
|
||||
uintptr_t tagged = uintptr_t(nativeJitInfoOrInterpretedScript());
|
||||
MOZ_ASSERT(tagged & 1);
|
||||
return tagged >> 1;
|
||||
MOZ_ASSERT(u.native.extra.taggedWasmFuncIndex_ & 1);
|
||||
return u.native.extra.taggedWasmFuncIndex_ >> 1;
|
||||
}
|
||||
void setWasmJitEntry(void** entry) {
|
||||
MOZ_ASSERT(*entry);
|
||||
MOZ_ASSERT(isWasm());
|
||||
MOZ_ASSERT(!isWasmWithJitEntry());
|
||||
setFlags(flags().setWasmJitEntry());
|
||||
setNativeJitInfoOrInterpretedScript(entry);
|
||||
flags_.setWasmJitEntry();
|
||||
u.native.extra.wasmJitEntry_ = entry;
|
||||
MOZ_ASSERT(isWasmWithJitEntry());
|
||||
}
|
||||
void** wasmJitEntry() const {
|
||||
MOZ_ASSERT(isWasmWithJitEntry());
|
||||
return static_cast<void**>(nativeJitInfoOrInterpretedScript());
|
||||
MOZ_ASSERT(u.native.extra.wasmJitEntry_);
|
||||
return u.native.extra.wasmJitEntry_;
|
||||
}
|
||||
|
||||
bool isDerivedClassConstructor() const;
|
||||
bool isSyntheticFunction() const;
|
||||
|
||||
static unsigned offsetOfNativeOrEnv() {
|
||||
return getFixedSlotOffset(NativeFuncOrInterpretedEnvSlot);
|
||||
static unsigned offsetOfNative() {
|
||||
return offsetof(JSFunction, u.native.func_);
|
||||
}
|
||||
static unsigned offsetOfJitInfoOrScript() {
|
||||
return getFixedSlotOffset(NativeJitInfoOrInterpretedScriptSlot);
|
||||
static unsigned offsetOfScript() {
|
||||
static_assert(offsetof(U, scripted.s.script_) ==
|
||||
offsetof(U, native.extra.wasmJitEntry_),
|
||||
"scripted.s.script_ must be at the same offset as "
|
||||
"native.extra.wasmJitEntry_");
|
||||
return offsetof(JSFunction, u.scripted.s.script_);
|
||||
}
|
||||
static unsigned offsetOfNativeOrEnv() {
|
||||
static_assert(
|
||||
offsetof(U, native.func_) == offsetof(U, scripted.env_),
|
||||
"U.native.func_ must be at the same offset as U.scripted.env_");
|
||||
return offsetOfNative();
|
||||
}
|
||||
static unsigned offsetOfBaseScript() {
|
||||
return offsetof(JSFunction, u.scripted.s.script_);
|
||||
}
|
||||
|
||||
static unsigned offsetOfJitInfo() {
|
||||
return offsetof(JSFunction, u.native.extra.jitInfo_);
|
||||
}
|
||||
|
||||
inline void trace(JSTracer* trc);
|
||||
|
@ -665,9 +622,13 @@ class JSFunction : public js::NativeObject {
|
|||
js::HandleObject targetObj,
|
||||
int32_t argCount);
|
||||
|
||||
private:
|
||||
inline js::FunctionExtended* toExtended();
|
||||
inline const js::FunctionExtended* toExtended() const;
|
||||
|
||||
public:
|
||||
inline bool isExtended() const {
|
||||
bool extended = flags().isExtended();
|
||||
bool extended = flags_.isExtended();
|
||||
MOZ_ASSERT_IF(isTenured(),
|
||||
extended == (asTenured().getAllocKind() ==
|
||||
js::gc::AllocKind::FUNCTION_EXTENDED));
|
||||
|
@ -675,12 +636,28 @@ class JSFunction : public js::NativeObject {
|
|||
}
|
||||
|
||||
/*
|
||||
* Accessors for data stored in extended functions. Use setExtendedSlot if the
|
||||
* function has already been initialized. Otherwise use initExtendedSlot.
|
||||
* Accessors for data stored in extended functions. Use setExtendedSlot if
|
||||
* the function has already been initialized. Otherwise use
|
||||
* initExtendedSlot.
|
||||
*/
|
||||
inline void initExtendedSlot(uint32_t slot, const js::Value& val);
|
||||
inline void setExtendedSlot(uint32_t slot, const js::Value& val);
|
||||
inline const js::Value& getExtendedSlot(uint32_t slot) const;
|
||||
inline void initializeExtended();
|
||||
inline void initExtendedSlot(size_t which, const js::Value& val);
|
||||
inline void setExtendedSlot(size_t which, const js::Value& val);
|
||||
inline const js::Value& getExtendedSlot(size_t which) const;
|
||||
|
||||
/*
|
||||
* Same as `toExtended` and `getExtendedSlot`, but `this` is guaranteed to be
|
||||
* an extended function.
|
||||
*
|
||||
* This function is supposed to be used off-thread, especially the JIT
|
||||
* compilation thread, that cannot access JSFunction.flags_, because of
|
||||
* a race condition.
|
||||
*
|
||||
* See Also: WrappedFunction.isExtended_
|
||||
*/
|
||||
inline js::FunctionExtended* toExtendedOffMainThread();
|
||||
inline const js::FunctionExtended* toExtendedOffMainThread() const;
|
||||
inline const js::Value& getExtendedSlotOffMainThread(size_t which) const;
|
||||
|
||||
/* GC support. */
|
||||
js::gc::AllocKind getAllocKind() const {
|
||||
|
@ -701,16 +678,6 @@ class JSFunction : public js::NativeObject {
|
|||
static_assert(sizeof(JSFunction) == sizeof(JS::shadow::Function),
|
||||
"shadow interface must match actual interface");
|
||||
|
||||
static_assert(unsigned(JSFunction::FlagsAndArgCountSlot) ==
|
||||
unsigned(JS::shadow::Function::FlagsAndArgCountSlot));
|
||||
static_assert(unsigned(JSFunction::NativeFuncOrInterpretedEnvSlot) ==
|
||||
unsigned(JS::shadow::Function::NativeFuncOrInterpretedEnvSlot));
|
||||
static_assert(
|
||||
unsigned(JSFunction::NativeJitInfoOrInterpretedScriptSlot) ==
|
||||
unsigned(JS::shadow::Function::NativeJitInfoOrInterpretedScriptSlot));
|
||||
static_assert(unsigned(JSFunction::AtomSlot) ==
|
||||
unsigned(JS::shadow::Function::AtomSlot));
|
||||
|
||||
extern JSString* fun_toStringHelper(JSContext* cx, js::HandleObject obj,
|
||||
bool isToSource);
|
||||
|
||||
|
@ -794,42 +761,36 @@ extern void ThrowTypeErrorBehavior(JSContext* cx);
|
|||
*/
|
||||
class FunctionExtended : public JSFunction {
|
||||
public:
|
||||
enum {
|
||||
FirstExtendedSlot = JSFunction::SlotCount,
|
||||
SecondExtendedSlot,
|
||||
|
||||
SlotCount
|
||||
};
|
||||
|
||||
static const uint32_t NUM_EXTENDED_SLOTS = 2;
|
||||
static const unsigned NUM_EXTENDED_SLOTS = 2;
|
||||
|
||||
// Arrow functions store their lexical new.target in the first extended
|
||||
// slot.
|
||||
static const uint32_t ARROW_NEWTARGET_SLOT = 0;
|
||||
static const unsigned ARROW_NEWTARGET_SLOT = 0;
|
||||
|
||||
static const uint32_t METHOD_HOMEOBJECT_SLOT = 0;
|
||||
static const unsigned METHOD_HOMEOBJECT_SLOT = 0;
|
||||
|
||||
// Stores the length for bound functions, so the .length property doesn't need
|
||||
// to be resolved eagerly.
|
||||
static const uint32_t BOUND_FUNCTION_LENGTH_SLOT = 1;
|
||||
static const unsigned BOUND_FUNCTION_LENGTH_SLOT = 1;
|
||||
|
||||
// Exported asm.js/wasm functions store their WasmInstanceObject in the
|
||||
// first slot.
|
||||
static const uint32_t WASM_INSTANCE_SLOT = 0;
|
||||
static const unsigned WASM_INSTANCE_SLOT = 0;
|
||||
|
||||
// wasm/asm.js exported functions store the wasm::TlsData pointer of their
|
||||
// instance.
|
||||
static const uint32_t WASM_TLSDATA_SLOT = 1;
|
||||
static const unsigned WASM_TLSDATA_SLOT = 1;
|
||||
|
||||
// asm.js module functions store their WasmModuleObject in the first slot.
|
||||
static const uint32_t ASMJS_MODULE_SLOT = 0;
|
||||
static const unsigned ASMJS_MODULE_SLOT = 0;
|
||||
|
||||
// Async module callback handlers store their ModuleObject in the first slot.
|
||||
static const uint32_t MODULE_SLOT = 0;
|
||||
static const unsigned MODULE_SLOT = 0;
|
||||
|
||||
static inline size_t offsetOfExtendedSlot(uint32_t which) {
|
||||
static inline size_t offsetOfExtendedSlot(unsigned which) {
|
||||
MOZ_ASSERT(which < NUM_EXTENDED_SLOTS);
|
||||
return getFixedSlotOffset(FirstExtendedSlot + which);
|
||||
return offsetof(FunctionExtended, extendedSlots) +
|
||||
which * sizeof(GCPtrValue);
|
||||
}
|
||||
static inline size_t offsetOfArrowNewTargetSlot() {
|
||||
return offsetOfExtendedSlot(ARROW_NEWTARGET_SLOT);
|
||||
|
@ -843,6 +804,9 @@ class FunctionExtended : public JSFunction {
|
|||
|
||||
private:
|
||||
friend class JSFunction;
|
||||
|
||||
/* Reserved slots available for storage by particular native functions. */
|
||||
GCPtrValue extendedSlots[NUM_EXTENDED_SLOTS];
|
||||
};
|
||||
|
||||
extern bool CanReuseScriptForClone(JS::Realm* realm, HandleFunction fun,
|
||||
|
@ -857,29 +821,53 @@ extern JSFunction* CloneAsmJSModuleFunction(JSContext* cx, HandleFunction fun);
|
|||
|
||||
} // namespace js
|
||||
|
||||
template <>
|
||||
inline bool JSObject::is<JSFunction>() const {
|
||||
return getClass()->isJSFunction();
|
||||
inline js::FunctionExtended* JSFunction::toExtended() {
|
||||
MOZ_ASSERT(isExtended());
|
||||
return static_cast<js::FunctionExtended*>(this);
|
||||
}
|
||||
|
||||
inline void JSFunction::initExtendedSlot(uint32_t which, const js::Value& val) {
|
||||
inline const js::FunctionExtended* JSFunction::toExtended() const {
|
||||
MOZ_ASSERT(isExtended());
|
||||
MOZ_ASSERT(which < js::FunctionExtended::NUM_EXTENDED_SLOTS);
|
||||
return static_cast<const js::FunctionExtended*>(this);
|
||||
}
|
||||
|
||||
inline js::FunctionExtended* JSFunction::toExtendedOffMainThread() {
|
||||
return static_cast<js::FunctionExtended*>(this);
|
||||
}
|
||||
|
||||
inline const js::FunctionExtended* JSFunction::toExtendedOffMainThread() const {
|
||||
return static_cast<const js::FunctionExtended*>(this);
|
||||
}
|
||||
|
||||
inline void JSFunction::initializeExtended() {
|
||||
MOZ_ASSERT(isExtended());
|
||||
|
||||
MOZ_ASSERT(std::size(toExtended()->extendedSlots) == 2);
|
||||
toExtended()->extendedSlots[0].init(js::UndefinedValue());
|
||||
toExtended()->extendedSlots[1].init(js::UndefinedValue());
|
||||
}
|
||||
|
||||
inline void JSFunction::initExtendedSlot(size_t which, const js::Value& val) {
|
||||
MOZ_ASSERT(which < std::size(toExtended()->extendedSlots));
|
||||
MOZ_ASSERT(js::IsObjectValueInCompartment(val, compartment()));
|
||||
initFixedSlot(js::FunctionExtended::FirstExtendedSlot + which, val);
|
||||
toExtended()->extendedSlots[which].init(val);
|
||||
}
|
||||
|
||||
inline void JSFunction::setExtendedSlot(uint32_t which, const js::Value& val) {
|
||||
MOZ_ASSERT(isExtended());
|
||||
MOZ_ASSERT(which < js::FunctionExtended::NUM_EXTENDED_SLOTS);
|
||||
inline void JSFunction::setExtendedSlot(size_t which, const js::Value& val) {
|
||||
MOZ_ASSERT(which < std::size(toExtended()->extendedSlots));
|
||||
MOZ_ASSERT(js::IsObjectValueInCompartment(val, compartment()));
|
||||
setFixedSlot(js::FunctionExtended::FirstExtendedSlot + which, val);
|
||||
toExtended()->extendedSlots[which] = val;
|
||||
}
|
||||
|
||||
inline const js::Value& JSFunction::getExtendedSlot(uint32_t which) const {
|
||||
MOZ_ASSERT(isExtended());
|
||||
MOZ_ASSERT(which < js::FunctionExtended::NUM_EXTENDED_SLOTS);
|
||||
return getFixedSlot(js::FunctionExtended::FirstExtendedSlot + which);
|
||||
inline const js::Value& JSFunction::getExtendedSlot(size_t which) const {
|
||||
MOZ_ASSERT(which < std::size(toExtended()->extendedSlots));
|
||||
return toExtended()->extendedSlots[which];
|
||||
}
|
||||
|
||||
inline const js::Value& JSFunction::getExtendedSlotOffMainThread(
|
||||
size_t which) const {
|
||||
MOZ_ASSERT(which < std::size(toExtendedOffMainThread()->extendedSlots));
|
||||
return toExtendedOffMainThread()->extendedSlots[which];
|
||||
}
|
||||
|
||||
namespace js {
|
||||
|
|
|
@ -733,7 +733,7 @@ static inline NativeObject* NewObject(JSContext* cx, Handle<TaggedProto> proto,
|
|||
NewObjectKind newKind,
|
||||
ObjectFlags objectFlags = {}) {
|
||||
MOZ_ASSERT(clasp != &ArrayObject::class_);
|
||||
MOZ_ASSERT_IF(clasp->isJSFunction(),
|
||||
MOZ_ASSERT_IF(clasp == &JSFunction::class_,
|
||||
kind == gc::AllocKind::FUNCTION ||
|
||||
kind == gc::AllocKind::FUNCTION_EXTENDED);
|
||||
MOZ_ASSERT(clasp->isNativeObject());
|
||||
|
@ -742,8 +742,8 @@ static inline NativeObject* NewObject(JSContext* cx, Handle<TaggedProto> proto,
|
|||
// enough fixed slots to cover the number of reserved slots in the object,
|
||||
// regardless of the allocation kind specified.
|
||||
size_t nfixed = ClassCanHaveFixedData(clasp)
|
||||
? GetGCKindSlots(gc::GetGCObjectKind(clasp))
|
||||
: GetGCKindSlots(kind);
|
||||
? GetGCKindSlots(gc::GetGCObjectKind(clasp), clasp)
|
||||
: GetGCKindSlots(kind, clasp);
|
||||
|
||||
RootedShape shape(
|
||||
cx, SharedShape::getInitialShape(cx, clasp, cx->realm(), proto, nfixed,
|
||||
|
@ -1292,7 +1292,8 @@ bool NativeObject::fillInAfterSwap(JSContext* cx, HandleNativeObject obj,
|
|||
MOZ_ASSERT(!IsInsideNursery(obj));
|
||||
|
||||
// Make sure the shape's numFixedSlots() is correct.
|
||||
size_t nfixed = gc::GetGCKindSlots(obj->asTenured().getAllocKind());
|
||||
size_t nfixed =
|
||||
gc::GetGCKindSlots(obj->asTenured().getAllocKind(), obj->getClass());
|
||||
if (nfixed != obj->shape()->numFixedSlots()) {
|
||||
if (!NativeObject::changeNumFixedSlotsAfterSwap(cx, obj, nfixed)) {
|
||||
return false;
|
||||
|
@ -3768,7 +3769,8 @@ void JSObject::debugCheckNewObject(Shape* shape, js::gc::AllocKind allocKind,
|
|||
// Arrays can store the ObjectElements header inline.
|
||||
MOZ_ASSERT(shape->numFixedSlots() == 0);
|
||||
} else {
|
||||
MOZ_ASSERT(gc::GetGCKindSlots(allocKind) == shape->numFixedSlots());
|
||||
MOZ_ASSERT(gc::GetGCKindSlots(allocKind, clasp) ==
|
||||
shape->numFixedSlots());
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -689,12 +689,6 @@ struct JSObject_Slots4 : JSObject {
|
|||
void* data[2];
|
||||
js::Value fslots[4];
|
||||
};
|
||||
struct JSObject_Slots6 : JSObject {
|
||||
// Only used for extended functions which are required to have exactly six
|
||||
// fixed slots due to JIT assumptions.
|
||||
void* data[2];
|
||||
js::Value fslots[6];
|
||||
};
|
||||
struct JSObject_Slots8 : JSObject {
|
||||
void* data[2];
|
||||
js::Value fslots[8];
|
||||
|
|
|
@ -905,6 +905,15 @@ js::PropertyName* js::GetClonedSelfHostedFunctionName(const JSFunction* fun) {
|
|||
return name.toString()->asAtom().asPropertyName();
|
||||
}
|
||||
|
||||
js::PropertyName* js::GetClonedSelfHostedFunctionNameOffMainThread(
|
||||
JSFunction* fun) {
|
||||
Value name = fun->getExtendedSlotOffMainThread(LAZY_FUNCTION_NAME_SLOT);
|
||||
if (!name.isString()) {
|
||||
return nullptr;
|
||||
}
|
||||
return name.toString()->asAtom().asPropertyName();
|
||||
}
|
||||
|
||||
bool js::IsExtendedUnclonedSelfHostedFunctionName(JSAtom* name) {
|
||||
if (name->length() < 2) {
|
||||
return false;
|
||||
|
|
|
@ -30,6 +30,18 @@ bool IsSelfHostedFunctionWithName(JSFunction* fun, JSAtom* name);
|
|||
PropertyName* GetClonedSelfHostedFunctionName(const JSFunction* fun);
|
||||
void SetClonedSelfHostedFunctionName(JSFunction* fun, PropertyName* name);
|
||||
|
||||
/*
|
||||
* Same as GetClonedSelfHostedFunctionName, but `fun` is guaranteed to be an
|
||||
* extended function.
|
||||
*
|
||||
* This function is supposed to be used off-thread, especially the JIT
|
||||
* compilation thread, that cannot access JSFunction.flags_, because of
|
||||
* a race condition.
|
||||
*
|
||||
* See Also: WrappedFunction.isExtended_
|
||||
*/
|
||||
PropertyName* GetClonedSelfHostedFunctionNameOffMainThread(JSFunction* fun);
|
||||
|
||||
constexpr char ExtendedUnclonedSelfHostedFunctionNamePrefix = '$';
|
||||
|
||||
/*
|
||||
|
|
|
@ -1098,7 +1098,7 @@ Shape* SharedShape::getInitialShape(JSContext* cx, const JSClass* clasp,
|
|||
JS::Realm* realm, TaggedProto proto,
|
||||
gc::AllocKind kind,
|
||||
ObjectFlags objectFlags) {
|
||||
return getInitialShape(cx, clasp, realm, proto, GetGCKindSlots(kind),
|
||||
return getInitialShape(cx, clasp, realm, proto, GetGCKindSlots(kind, clasp),
|
||||
objectFlags);
|
||||
}
|
||||
|
||||
|
|
|
@ -2293,8 +2293,7 @@ static bool GenerateImportJitExit(MacroAssembler& masm, const FuncImport& fi,
|
|||
masm.storePtr(callee, Address(masm.getStackPointer(), calleeArgOffset));
|
||||
|
||||
// 6. Check if we need to rectify arguments.
|
||||
masm.load32(Address(callee, JSFunction::offsetOfFlagsAndArgCount()), scratch);
|
||||
masm.rshift32(Imm32(JSFunction::ArgCountShift), scratch);
|
||||
masm.load16ZeroExtend(Address(callee, JSFunction::offsetOfNargs()), scratch);
|
||||
|
||||
Label rectify;
|
||||
masm.branch32(Assembler::Above, scratch, Imm32(fi.funcType().args().length()),
|
||||
|
|
Загрузка…
Ссылка в новой задаче