Bug 1629791 part 6 - Auto-generate boilerplate for more ops. r=iain

Differential Revision: https://phabricator.services.mozilla.com/D71280
This commit is contained in:
Jan de Mooij 2020-04-17 14:46:52 +00:00
Родитель 35f2245f17
Коммит e63590e32b
6 изменённых файлов: 52 добавлений и 52 удалений

Просмотреть файл

@ -255,9 +255,10 @@ bool BaselineCacheIRCompiler::emitGuardGroup(ObjOperandId objId,
return true;
}
bool BaselineCacheIRCompiler::emitGuardProto() {
bool BaselineCacheIRCompiler::emitGuardProto(ObjOperandId objId,
uint32_t protoOffset) {
JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
Register obj = allocator.useRegister(masm, reader.objOperandId());
Register obj = allocator.useRegister(masm, objId);
AutoScratchRegister scratch(allocator, masm);
FailurePath* failure;
@ -265,15 +266,17 @@ bool BaselineCacheIRCompiler::emitGuardProto() {
return false;
}
Address addr(stubAddress(reader.stubOffset()));
Address addr(stubAddress(protoOffset));
masm.loadObjProto(obj, scratch);
masm.branchPtr(Assembler::NotEqual, addr, scratch, failure->label());
return true;
}
bool BaselineCacheIRCompiler::emitGuardCompartment() {
bool BaselineCacheIRCompiler::emitGuardCompartment(ObjOperandId objId,
uint32_t globalOffset,
uint32_t compartmentOffset) {
JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
Register obj = allocator.useRegister(masm, reader.objOperandId());
Register obj = allocator.useRegister(masm, objId);
AutoScratchRegister scratch(allocator, masm);
FailurePath* failure;
@ -283,21 +286,21 @@ bool BaselineCacheIRCompiler::emitGuardCompartment() {
// Verify that the global wrapper is still valid, as
// it is pre-requisite for doing the compartment check.
Address globalWrapper(stubAddress(reader.stubOffset()));
Address globalWrapper(stubAddress(globalOffset));
masm.loadPtr(globalWrapper, scratch);
Address handlerAddr(scratch, ProxyObject::offsetOfHandler());
masm.branchPtr(Assembler::Equal, handlerAddr,
ImmPtr(&DeadObjectProxy::singleton), failure->label());
Address addr(stubAddress(reader.stubOffset()));
Address addr(stubAddress(compartmentOffset));
masm.branchTestObjCompartment(Assembler::NotEqual, obj, addr, scratch,
failure->label());
return true;
}
bool BaselineCacheIRCompiler::emitGuardAnyClass() {
bool BaselineCacheIRCompiler::emitGuardAnyClass(ObjOperandId objId,
uint32_t claspOffset) {
JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
ObjOperandId objId = reader.objOperandId();
Register obj = allocator.useRegister(masm, objId);
AutoScratchRegister scratch(allocator, masm);
@ -306,7 +309,7 @@ bool BaselineCacheIRCompiler::emitGuardAnyClass() {
return false;
}
Address testAddr(stubAddress(reader.stubOffset()));
Address testAddr(stubAddress(claspOffset));
if (objectGuardNeedsSpectreMitigations(objId)) {
masm.branchTestObjClass(Assembler::NotEqual, obj, testAddr, scratch, obj,
failure->label());

Просмотреть файл

@ -551,10 +551,23 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
MOZ_ASSERT(group);
addStubField(uintptr_t(group), StubField::Type::ObjectGroup);
}
void writeObjectField(JSObject* obj) {
assertSameCompartment(obj);
addStubField(uintptr_t(obj), StubField::Type::JSObject);
}
void writeRawWordField(const void* ptr) {
addStubField(uintptr_t(ptr), StubField::Type::RawWord);
}
void writeJSOpImm(JSOp op) {
static_assert(sizeof(JSOp) == sizeof(uint8_t), "JSOp must fit in a byte");
buffer_.writeByte(uint8_t(op));
}
void writeGuardClassKindImm(GuardClassKind kind) {
static_assert(sizeof(GuardClassKind) == sizeof(uint8_t),
"GuardClassKind must fit in a byte");
buffer_.writeByte(uint8_t(kind));
}
void writeBoolImm(bool b) { buffer_.writeByte(uint32_t(b)); }
CacheIRWriter(const CacheIRWriter&) = delete;
@ -788,24 +801,6 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
guardGroup(obj, group);
}
void guardProto(ObjOperandId obj, JSObject* proto) {
assertSameCompartment(proto);
writeOpWithOperandId(CacheOp::GuardProto, obj);
addStubField(uintptr_t(proto), StubField::Type::JSObject);
}
void guardClass(ObjOperandId obj, GuardClassKind kind) {
static_assert(sizeof(GuardClassKind) == sizeof(uint8_t),
"GuardClassKind must fit in a byte");
writeOpWithOperandId(CacheOp::GuardClass, obj);
buffer_.writeByte(uint32_t(kind));
}
void guardAnyClass(ObjOperandId obj, const JSClass* clasp) {
writeOpWithOperandId(CacheOp::GuardAnyClass, obj);
addStubField(uintptr_t(clasp), StubField::Type::RawWord);
}
void guardFunctionIsNative(ObjOperandId obj) {
writeOpWithOperandId(CacheOp::GuardFunctionIsNative, obj);
}
@ -868,16 +863,6 @@ class MOZ_RAII CacheIRWriter : public JS::CustomAutoRooter {
buffer_.writeByte(uint32_t(magic));
}
void guardCompartment(ObjOperandId obj, JSObject* global,
JS::Compartment* compartment) {
assertSameCompartment(global);
writeOpWithOperandId(CacheOp::GuardCompartment, obj);
// Add a reference to a global in the compartment to keep it alive.
addStubField(uintptr_t(global), StubField::Type::JSObject);
// Use RawWord, because compartments never move and it can't be GCed.
addStubField(uintptr_t(compartment), StubField::Type::RawWord);
}
void guardIsExtensible(ObjOperandId obj) {
writeOpWithOperandId(CacheOp::GuardIsExtensible, obj);
}

Просмотреть файл

@ -1820,9 +1820,8 @@ bool CacheIRCompiler::emitGuardType() {
return true;
}
bool CacheIRCompiler::emitGuardClass() {
bool CacheIRCompiler::emitGuardClass(ObjOperandId objId, GuardClassKind kind) {
JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
ObjOperandId objId = reader.objOperandId();
Register obj = allocator.useRegister(masm, objId);
AutoScratchRegister scratch(allocator, masm);
@ -1832,7 +1831,7 @@ bool CacheIRCompiler::emitGuardClass() {
}
const JSClass* clasp = nullptr;
switch (reader.guardClassKind()) {
switch (kind) {
case GuardClassKind::Array:
clasp = &ArrayObject::class_;
break;

Просмотреть файл

@ -148,6 +148,7 @@
- name: GuardProto
shared: false
gen_boilerplate: true
operands:
obj: ObjId
proto: ObjectField
@ -155,6 +156,7 @@
# Guard per GuardClassKind.
- name: GuardClass
shared: true
gen_boilerplate: true
operands:
obj: ObjId
kind: GuardClassKindImm
@ -162,6 +164,7 @@
# Guard on an arbitrary JSClass.
- name: GuardAnyClass
shared: false
gen_boilerplate: true
operands:
obj: ObjId
clasp: RawWordField
@ -169,6 +172,7 @@
# Add a reference to a global in the compartment to keep it alive.
- name: GuardCompartment
shared: false
gen_boilerplate: true
operands:
obj: ObjId
global: ObjectField

Просмотреть файл

@ -67,9 +67,12 @@ operand_writer_info = {
'ShapeField': ('Shape*', 'writeShapeField'),
'GroupField': ('ObjectGroup*', 'writeGroupField'),
'ObjectField': ('JSObject*', 'writeObjectField'),
'RawWordField': ('const void*', 'writeRawWordField'),
'JSOpImm': ('JSOp', 'writeJSOpImm'),
'BoolImm': ('bool', 'writeBoolImm'),
'GuardClassKindImm': ('GuardClassKind', 'writeGuardClassKindImm'),
}
@ -114,9 +117,12 @@ operand_compiler_info = {
'ShapeField': ('uint32_t', 'Offset', 'reader.stubOffset()'),
'GroupField': ('uint32_t', 'Offset', 'reader.stubOffset()'),
'ObjectField': ('uint32_t', 'Offset', 'reader.stubOffset()'),
'RawWordField': ('uint32_t', 'Offset', 'reader.stubOffset()'),
'JSOpImm': ('JSOp', '', 'reader.jsop()'),
'BoolImm': ('bool', '', 'reader.readBool()')
'BoolImm': ('bool', '', 'reader.readBool()'),
'GuardClassKindImm': ('GuardClassKind', '', 'reader.guardClassKind()'),
}

Просмотреть файл

@ -628,10 +628,11 @@ bool IonCacheIRCompiler::emitGuardGroup(ObjOperandId objId,
return true;
}
bool IonCacheIRCompiler::emitGuardProto() {
bool IonCacheIRCompiler::emitGuardProto(ObjOperandId objId,
uint32_t protoOffset) {
JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
Register obj = allocator.useRegister(masm, reader.objOperandId());
JSObject* proto = objectStubField(reader.stubOffset());
Register obj = allocator.useRegister(masm, objId);
JSObject* proto = objectStubField(protoOffset);
AutoScratchRegister scratch(allocator, masm);
@ -646,11 +647,13 @@ bool IonCacheIRCompiler::emitGuardProto() {
return true;
}
bool IonCacheIRCompiler::emitGuardCompartment() {
bool IonCacheIRCompiler::emitGuardCompartment(ObjOperandId objId,
uint32_t globalOffset,
uint32_t compartmentOffset) {
JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
Register obj = allocator.useRegister(masm, reader.objOperandId());
JSObject* globalWrapper = objectStubField(reader.stubOffset());
JS::Compartment* compartment = compartmentStubField(reader.stubOffset());
Register obj = allocator.useRegister(masm, objId);
JSObject* globalWrapper = objectStubField(globalOffset);
JS::Compartment* compartment = compartmentStubField(compartmentOffset);
AutoScratchRegister scratch(allocator, masm);
FailurePath* failure;
@ -670,13 +673,13 @@ bool IonCacheIRCompiler::emitGuardCompartment() {
return true;
}
bool IonCacheIRCompiler::emitGuardAnyClass() {
bool IonCacheIRCompiler::emitGuardAnyClass(ObjOperandId objId,
uint32_t claspOffset) {
JitSpew(JitSpew_Codegen, "%s", __FUNCTION__);
ObjOperandId objId = reader.objOperandId();
Register obj = allocator.useRegister(masm, objId);
AutoScratchRegister scratch(allocator, masm);
const JSClass* clasp = classStubField(reader.stubOffset());
const JSClass* clasp = classStubField(claspOffset);
FailurePath* failure;
if (!addFailurePath(&failure)) {