Backed out changeset 88700ecc3751 (bug 1401827) for leaks detected by asan, e.g. in chrome-1. r=backout

This commit is contained in:
Sebastian Hengst 2017-09-27 19:44:16 +02:00
Родитель aa5d8773e4
Коммит 196c9a4941
13 изменённых файлов: 666 добавлений и 540 удалений

Просмотреть файл

@ -366,9 +366,9 @@ struct js::AsmJSMetadata : Metadata, AsmJSMetadataCacheablePod
return scriptSource.get();
}
bool getFuncName(const Bytes* maybeBytecode, uint32_t funcIndex, UTF8Bytes* name) const override {
const char* p = asmJSFuncNames[funcIndex].get();
if (!p)
return true;
// asm.js doesn't allow exporting imports or putting imports in tables
MOZ_ASSERT(funcIndex >= AsmJSFirstDefFuncIndex);
const char* p = asmJSFuncNames[funcIndex - AsmJSFirstDefFuncIndex].get();
return name->append(p, strlen(p));
}
@ -1385,52 +1385,38 @@ class MOZ_STACK_CLASS ModuleValidator
class Func
{
PropertyName* name_;
uint32_t sigIndex_;
uint32_t firstUse_;
uint32_t funcDefIndex_;
bool defined_;
// Available when defined:
uint32_t index_;
uint32_t srcBegin_;
uint32_t srcEnd_;
uint32_t line_;
Bytes bytes_;
Uint32Vector callSiteLineNums_;
bool defined_;
public:
Func(PropertyName* name, uint32_t sigIndex, uint32_t firstUse, uint32_t funcDefIndex)
: name_(name), sigIndex_(sigIndex), firstUse_(firstUse), funcDefIndex_(funcDefIndex),
defined_(false), srcBegin_(0), srcEnd_(0), line_(0)
Func(PropertyName* name, uint32_t firstUse, uint32_t index)
: name_(name), firstUse_(firstUse), index_(index),
srcBegin_(0), srcEnd_(0), defined_(false)
{}
PropertyName* name() const { return name_; }
uint32_t sigIndex() const { return sigIndex_; }
uint32_t firstUse() const { return firstUse_; }
bool defined() const { return defined_; }
uint32_t funcDefIndex() const { return funcDefIndex_; }
uint32_t index() const { return index_; }
void define(ParseNode* fn, uint32_t line, Bytes&& bytes, Uint32Vector&& callSiteLineNums) {
void define(ParseNode* fn) {
MOZ_ASSERT(!defined_);
defined_ = true;
srcBegin_ = fn->pn_pos.begin;
srcEnd_ = fn->pn_pos.end;
line_ = line;
bytes_ = Move(bytes);
callSiteLineNums_ = Move(callSiteLineNums);
}
uint32_t srcBegin() const { MOZ_ASSERT(defined_); return srcBegin_; }
uint32_t srcEnd() const { MOZ_ASSERT(defined_); return srcEnd_; }
uint32_t line() const { MOZ_ASSERT(defined_); return line_; }
const Bytes& bytes() const { MOZ_ASSERT(defined_); return bytes_; }
Uint32Vector& callSiteLineNums() { MOZ_ASSERT(defined_); return callSiteLineNums_; }
};
typedef Vector<const Func*> ConstFuncVector;
typedef Vector<Func*> FuncVector;
class Table
class FuncPtrTable
{
uint32_t sigIndex_;
PropertyName* name_;
@ -1438,10 +1424,10 @@ class MOZ_STACK_CLASS ModuleValidator
uint32_t mask_;
bool defined_;
Table(Table&& rhs) = delete;
FuncPtrTable(FuncPtrTable&& rhs) = delete;
public:
Table(uint32_t sigIndex, PropertyName* name, uint32_t firstUse, uint32_t mask)
FuncPtrTable(uint32_t sigIndex, PropertyName* name, uint32_t firstUse, uint32_t mask)
: sigIndex_(sigIndex), name_(name), firstUse_(firstUse), mask_(mask), defined_(false)
{}
@ -1453,7 +1439,7 @@ class MOZ_STACK_CLASS ModuleValidator
void define() { MOZ_ASSERT(!defined_); defined_ = true; }
};
typedef Vector<Table*> TableVector;
typedef Vector<FuncPtrTable*> FuncPtrTableVector;
class Global
{
@ -1463,7 +1449,7 @@ class MOZ_STACK_CLASS ModuleValidator
ConstantLiteral,
ConstantImport,
Function,
Table,
FuncPtrTable,
FFI,
ArrayView,
ArrayViewCtor,
@ -1481,8 +1467,8 @@ class MOZ_STACK_CLASS ModuleValidator
unsigned index_;
NumLit literalValue_;
} varOrConst;
uint32_t funcDefIndex_;
uint32_t tableIndex_;
uint32_t funcIndex_;
uint32_t funcPtrTableIndex_;
uint32_t ffiIndex_;
struct {
Scalar::Type viewType_;
@ -1520,13 +1506,13 @@ class MOZ_STACK_CLASS ModuleValidator
MOZ_ASSERT(which_ == ConstantLiteral);
return u.varOrConst.literalValue_;
}
uint32_t funcDefIndex() const {
uint32_t funcIndex() const {
MOZ_ASSERT(which_ == Function);
return u.funcDefIndex_;
return u.funcIndex_;
}
uint32_t tableIndex() const {
MOZ_ASSERT(which_ == Table);
return u.tableIndex_;
uint32_t funcPtrTableIndex() const {
MOZ_ASSERT(which_ == FuncPtrTable);
return u.funcPtrTableIndex_;
}
unsigned ffiIndex() const {
MOZ_ASSERT(which_ == FFI);
@ -1603,43 +1589,21 @@ class MOZ_STACK_CLASS ModuleValidator
};
private:
class HashableSig
{
uint32_t sigIndex_;
const SigWithIdVector& sigs_;
public:
HashableSig(uint32_t sigIndex, const SigWithIdVector& sigs)
: sigIndex_(sigIndex), sigs_(sigs)
{}
uint32_t sigIndex() const {
return sigIndex_;
}
const Sig& sig() const {
return sigs_[sigIndex_];
}
// Implement HashPolicy:
typedef const Sig& Lookup;
static HashNumber hash(Lookup l) {
return l.hash();
}
static bool match(HashableSig lhs, Lookup rhs) {
return lhs.sig() == rhs;
}
};
class NamedSig : public HashableSig
class NamedSig
{
PropertyName* name_;
const SigWithId* sig_;
public:
NamedSig(PropertyName* name, uint32_t sigIndex, const SigWithIdVector& sigs)
: HashableSig(sigIndex, sigs), name_(name)
NamedSig(PropertyName* name, const SigWithId& sig)
: name_(name), sig_(&sig)
{}
PropertyName* name() const {
return name_;
}
const Sig& sig() const {
return *sig_;
}
// Implement HashPolicy:
struct Lookup {
@ -1651,12 +1615,11 @@ class MOZ_STACK_CLASS ModuleValidator
return HashGeneric(l.name, l.sig.hash());
}
static bool match(NamedSig lhs, Lookup rhs) {
return lhs.name() == rhs.name && lhs.sig() == rhs.sig;
return lhs.name_ == rhs.name && *lhs.sig_ == rhs.sig;
}
};
typedef HashSet<HashableSig, HashableSig> SigSet;
typedef HashMap<NamedSig, uint32_t, NamedSig> FuncImportMap;
typedef HashMap<NamedSig, uint32_t, NamedSig> ImportMap;
typedef HashMap<const SigWithId*, uint32_t, SigHashPolicy> SigMap;
typedef HashMap<PropertyName*, Global*> GlobalMap;
typedef HashMap<PropertyName*, MathBuiltin> MathNameMap;
typedef HashMap<PropertyName*, AsmJSAtomicsBuiltinFunction> AtomicsNameMap;
@ -1677,17 +1640,18 @@ class MOZ_STACK_CLASS ModuleValidator
// Validation-internal state:
LifoAlloc validationLifo_;
FuncVector funcDefs_;
TableVector tables_;
FuncVector functions_;
FuncPtrTableVector funcPtrTables_;
GlobalMap globalMap_;
SigSet sigSet_;
FuncImportMap funcImportMap_;
SigMap sigMap_;
ImportMap importMap_;
ArrayViewVector arrayViews_;
bool atomicsPresent_;
bool simdPresent_;
// State used to build the AsmJSModule in finish():
ModuleEnvironment env_;
ModuleGenerator mg_;
MutableAsmJSMetadata asmJSMetadata_;
// Error reporting:
@ -1723,26 +1687,29 @@ class MOZ_STACK_CLASS ModuleValidator
return standardLibrarySimdOpNames_.putNew(atom->asPropertyName(), op);
}
bool newSig(Sig&& sig, uint32_t* sigIndex) {
if (env_.sigs.length() >= MaxTypes)
*sigIndex = 0;
if (mg_.numSigs() >= AsmJSMaxTypes)
return failCurrentOffset("too many signatures");
*sigIndex = env_.sigs.length();
return env_.sigs.append(Move(sig));
*sigIndex = mg_.numSigs();
mg_.initSig(*sigIndex, Move(sig));
return true;
}
bool declareSig(Sig&& sig, uint32_t* sigIndex) {
SigSet::AddPtr p = sigSet_.lookupForAdd(sig);
SigMap::AddPtr p = sigMap_.lookupForAdd(sig);
if (p) {
*sigIndex = p->sigIndex();
MOZ_ASSERT(env_.sigs[*sigIndex] == sig);
*sigIndex = p->value();
MOZ_ASSERT(mg_.sig(*sigIndex) == sig);
return true;
}
return newSig(Move(sig), sigIndex) &&
sigSet_.add(p, HashableSig(*sigIndex, env_.sigs));
sigMap_.add(p, &mg_.sig(*sigIndex), *sigIndex);
}
public:
ModuleValidator(JSContext* cx, AsmJSParser& parser, ParseNode* moduleFunctionNode)
ModuleValidator(JSContext* cx, const CompileArgs& args, AsmJSParser& parser,
ParseNode* moduleFunctionNode)
: cx_(cx),
parser_(parser),
moduleFunctionNode_(moduleFunctionNode),
@ -1755,21 +1722,20 @@ class MOZ_STACK_CLASS ModuleValidator
standardLibrarySimdOpNames_(cx),
dummyFunction_(cx),
validationLifo_(VALIDATION_LIFO_DEFAULT_CHUNK_SIZE),
funcDefs_(cx),
tables_(cx),
functions_(cx),
funcPtrTables_(cx),
globalMap_(cx),
sigSet_(cx),
funcImportMap_(cx),
sigMap_(cx),
importMap_(cx),
arrayViews_(cx),
atomicsPresent_(false),
simdPresent_(false),
env_(CompileMode::Once, Tier::Ion, DebugEnabled::False, ModuleKind::AsmJS),
mg_(args, &env_, nullptr, nullptr),
errorString_(nullptr),
errorOffset_(UINT32_MAX),
errorOverRecursed_(false)
{
env_.minMemoryLength = RoundUpToNextValidAsmJSHeapLength(0);
}
{}
~ModuleValidator() {
if (errorString_) {
@ -1824,7 +1790,7 @@ class MOZ_STACK_CLASS ModuleValidator
!parser_.pc->sc()->hasExplicitUseStrict();
asmJSMetadata_->scriptSource.reset(parser_.ss);
if (!globalMap_.init() || !sigSet_.init() || !funcImportMap_.init())
if (!globalMap_.init() || !sigMap_.init() || !importMap_.init())
return false;
if (!standardLibraryMathNames_.init() ||
@ -1889,7 +1855,17 @@ class MOZ_STACK_CLASS ModuleValidator
if (!dummyFunction_)
return false;
return true;
env_.minMemoryLength = RoundUpToNextValidAsmJSHeapLength(0);
if (!env_.sigs.resize(AsmJSMaxTypes) ||
!env_.funcSigs.resize(AsmJSMaxFuncs) ||
!env_.funcImportGlobalDataOffsets.resize(AsmJSMaxImports) ||
!env_.tables.resize(AsmJSMaxTables) ||
!env_.asmJSSigToTableIndex.resize(AsmJSMaxTypes))
{
return false;
}
return mg_.init(/* codeSectionSize (ignored) = */ 0, asmJSMetadata_.get());
}
JSContext* cx() const { return cx_; }
@ -1897,13 +1873,13 @@ class MOZ_STACK_CLASS ModuleValidator
PropertyName* globalArgumentName() const { return globalArgumentName_; }
PropertyName* importArgumentName() const { return importArgumentName_; }
PropertyName* bufferArgumentName() const { return bufferArgumentName_; }
const ModuleEnvironment& env() { return env_; }
ModuleGenerator& mg() { return mg_; }
AsmJSParser& parser() const { return parser_; }
TokenStream& tokenStream() const { return parser_.tokenStream; }
RootedFunction& dummyFunction() { return dummyFunction_; }
bool supportsSimd() const { return cx_->jitSupportsSimd(); }
bool atomicsPresent() const { return atomicsPresent_; }
uint32_t minMemoryLength() const { return env_.minMemoryLength; }
uint32_t minMemoryLength() const { return mg_.minMemoryLength(); }
void initModuleFunctionName(PropertyName* name) {
MOZ_ASSERT(!moduleFunctionName_);
@ -1943,8 +1919,8 @@ class MOZ_STACK_CLASS ModuleValidator
MOZ_ASSERT(type.isGlobalVarType());
MOZ_ASSERT(type == Type::canonicalize(Type::lit(lit)));
uint32_t index = env_.globals.length();
if (!env_.globals.emplaceBack(type.canonicalToValType(), !isConst, index))
uint32_t index;
if (!mg_.addGlobal(type.canonicalToValType(), isConst, &index))
return false;
Global::Which which = isConst ? Global::ConstantLiteral : Global::Variable;
@ -1970,9 +1946,9 @@ class MOZ_STACK_CLASS ModuleValidator
if (!fieldChars)
return false;
uint32_t index = env_.globals.length();
uint32_t index;
ValType valType = type.canonicalToValType();
if (!env_.globals.emplaceBack(valType, !isConst, index))
if (!mg_.addGlobal(valType, isConst, &index))
return false;
Global::Which which = isConst ? Global::ConstantImport : Global::Variable;
@ -2174,100 +2150,75 @@ class MOZ_STACK_CLASS ModuleValidator
// Declare which function is exported which gives us an index into the
// module ExportVector.
uint32_t funcIndex = funcImportMap_.count() + func.funcDefIndex();
if (!env_.exports.emplaceBack(Move(fieldChars), funcIndex, DefinitionKind::Function))
if (!mg_.addExport(Move(fieldChars), func.index()))
return false;
// The exported function might have already been exported in which case
// the index will refer into the range of AsmJSExports.
return asmJSMetadata_->asmJSExports.emplaceBack(funcIndex,
return asmJSMetadata_->asmJSExports.emplaceBack(func.index(),
func.srcBegin() - asmJSMetadata_->srcStart,
func.srcEnd() - asmJSMetadata_->srcStart);
}
bool addFuncDef(PropertyName* name, uint32_t firstUse, Sig&& sig, Func** func) {
bool addFunction(PropertyName* name, uint32_t firstUse, Sig&& sig, Func** func) {
uint32_t sigIndex;
if (!declareSig(Move(sig), &sigIndex))
return false;
uint32_t funcDefIndex = funcDefs_.length();
if (funcDefIndex >= MaxFuncs)
uint32_t funcIndex = AsmJSFirstDefFuncIndex + numFunctions();
if (funcIndex >= AsmJSMaxFuncs)
return failCurrentOffset("too many functions");
mg_.initFuncSig(funcIndex, sigIndex);
Global* global = validationLifo_.new_<Global>(Global::Function);
if (!global)
return false;
global->u.funcDefIndex_ = funcDefIndex;
global->u.funcIndex_ = funcIndex;
if (!globalMap_.putNew(name, global))
return false;
*func = validationLifo_.new_<Func>(name, sigIndex, firstUse, funcDefIndex);
return *func && funcDefs_.append(*func);
*func = validationLifo_.new_<Func>(name, firstUse, funcIndex);
return *func && functions_.append(*func);
}
bool declareFuncPtrTable(Sig&& sig, PropertyName* name, uint32_t firstUse, uint32_t mask,
uint32_t* tableIndex)
uint32_t* index)
{
if (mask > MaxTableInitialLength)
return failCurrentOffset("function pointer table too big");
MOZ_ASSERT(env_.tables.length() == tables_.length());
*tableIndex = env_.tables.length();
uint32_t sigIndex;
if (!newSig(Move(sig), &sigIndex))
return false;
MOZ_ASSERT(sigIndex >= env_.asmJSSigToTableIndex.length());
if (!env_.asmJSSigToTableIndex.resize(sigIndex + 1))
if (!mg_.initSigTableLength(sigIndex, mask + 1))
return false;
env_.asmJSSigToTableIndex[sigIndex] = env_.tables.length();
if (!env_.tables.emplaceBack(TableKind::TypedFunction, Limits(mask + 1)))
return false;
Global* global = validationLifo_.new_<Global>(Global::Table);
Global* global = validationLifo_.new_<Global>(Global::FuncPtrTable);
if (!global)
return false;
global->u.tableIndex_ = *tableIndex;
global->u.funcPtrTableIndex_ = *index = funcPtrTables_.length();
if (!globalMap_.putNew(name, global))
return false;
Table* t = validationLifo_.new_<Table>(sigIndex, name, firstUse, mask);
return t && tables_.append(t);
FuncPtrTable* t = validationLifo_.new_<FuncPtrTable>(sigIndex, name, firstUse, mask);
return t && funcPtrTables_.append(t);
}
bool defineFuncPtrTable(uint32_t tableIndex, Uint32Vector&& elems) {
Table& table = *tables_[tableIndex];
bool defineFuncPtrTable(uint32_t funcPtrTableIndex, Uint32Vector&& elems) {
FuncPtrTable& table = *funcPtrTables_[funcPtrTableIndex];
if (table.defined())
return false;
table.define();
for (uint32_t& index : elems)
index += funcImportMap_.count();
return env_.elemSegments.emplaceBack(tableIndex, InitExpr(Val(uint32_t(0))), Move(elems));
return mg_.initSigTableElems(table.sigIndex(), Move(elems));
}
bool declareImport(PropertyName* name, Sig&& sig, unsigned ffiIndex, uint32_t* importIndex) {
FuncImportMap::AddPtr p = funcImportMap_.lookupForAdd(NamedSig::Lookup(name, sig));
bool declareImport(PropertyName* name, Sig&& sig, unsigned ffiIndex, uint32_t* funcIndex) {
ImportMap::AddPtr p = importMap_.lookupForAdd(NamedSig::Lookup(name, sig));
if (p) {
*importIndex = p->value();
*funcIndex = p->value();
return true;
}
*importIndex = funcImportMap_.count();
MOZ_ASSERT(*importIndex == asmJSMetadata_->asmJSImports.length());
if (*importIndex >= MaxImports)
*funcIndex = asmJSMetadata_->asmJSImports.length();
if (*funcIndex > AsmJSMaxImports)
return failCurrentOffset("too many imports");
if (!asmJSMetadata_->asmJSImports.emplaceBack(ffiIndex))
return false;
uint32_t sigIndex;
if (!declareSig(Move(sig), &sigIndex))
return false;
return funcImportMap_.add(p, NamedSig(name, sigIndex, env_.sigs), *importIndex);
if (!mg_.initImport(*funcIndex, sigIndex))
return false;
return importMap_.add(p, NamedSig(name, mg_.sig(sigIndex)), *funcIndex);
}
bool tryConstantAccess(uint64_t start, uint64_t width) {
@ -2276,8 +2227,8 @@ class MOZ_STACK_CLASS ModuleValidator
if (len > uint64_t(INT32_MAX) + 1)
return false;
len = RoundUpToNextValidAsmJSHeapLength(len);
if (len > env_.minMemoryLength)
env_.minMemoryLength = len;
if (len > mg_.minMemoryLength())
mg_.bumpMinMemoryLength(len);
return true;
}
@ -2352,17 +2303,17 @@ class MOZ_STACK_CLASS ModuleValidator
const ArrayView& arrayView(unsigned i) const {
return arrayViews_[i];
}
unsigned numFuncDefs() const {
return funcDefs_.length();
unsigned numFunctions() const {
return functions_.length();
}
const Func& funcDef(unsigned i) const {
return *funcDefs_[i];
Func& function(unsigned i) const {
return *functions_[i];
}
unsigned numFuncPtrTables() const {
return tables_.length();
return funcPtrTables_.length();
}
Table& table(unsigned i) const {
return *tables_[i];
FuncPtrTable& funcPtrTable(unsigned i) const {
return *funcPtrTables_[i];
}
const Global* lookupGlobal(PropertyName* name) const {
@ -2371,11 +2322,13 @@ class MOZ_STACK_CLASS ModuleValidator
return nullptr;
}
Func* lookupFuncDef(PropertyName* name) {
Func* lookupFunction(PropertyName* name) {
if (GlobalMap::Ptr p = globalMap_.lookup(name)) {
Global* value = p->value();
if (value->which() == Global::Function)
return funcDefs_[value->funcDefIndex()];
if (value->which() == Global::Function) {
MOZ_ASSERT(value->funcIndex() >= AsmJSFirstDefFuncIndex);
return functions_[value->funcIndex() - AsmJSFirstDefFuncIndex];
}
}
return nullptr;
}
@ -2404,35 +2357,18 @@ class MOZ_STACK_CLASS ModuleValidator
bool startFunctionBodies() {
if (!arrayViews_.empty())
env_.memoryUsage = atomicsPresent_ ? MemoryUsage::Shared : MemoryUsage::Unshared;
else
env_.memoryUsage = MemoryUsage::None;
return true;
mg_.initMemoryUsage(atomicsPresent_ ? MemoryUsage::Shared : MemoryUsage::Unshared);
return mg_.startFuncDefs();
}
bool finishFunctionBodies() {
return mg_.finishFuncDefs();
}
SharedModule finish() {
MOZ_ASSERT(env_.funcSigs.empty());
if (!env_.funcSigs.resize(funcImportMap_.count() + funcDefs_.length()))
return nullptr;
for (FuncImportMap::Range r = funcImportMap_.all(); !r.empty(); r.popFront()) {
uint32_t funcIndex = r.front().value();
MOZ_ASSERT(!env_.funcSigs[funcIndex]);
env_.funcSigs[funcIndex] = &env_.sigs[r.front().key().sigIndex()];
}
for (const Func* func : funcDefs_) {
uint32_t funcIndex = funcImportMap_.count() + func->funcDefIndex();
MOZ_ASSERT(!env_.funcSigs[funcIndex]);
env_.funcSigs[funcIndex] = &env_.sigs[func->sigIndex()];
}
if (!env_.funcImportGlobalDataOffsets.resize(funcImportMap_.count()))
return nullptr;
asmJSMetadata_->usesSimd = simdPresent_;
MOZ_ASSERT(asmJSMetadata_->asmJSFuncNames.empty());
if (!asmJSMetadata_->asmJSFuncNames.resize(funcImportMap_.count()))
return nullptr;
for (const Func* func : funcDefs_) {
for (const Func* func : functions_) {
CacheableChars funcName = StringToNewUTF8CharsZ(cx_, *func->name());
if (!funcName || !asmJSMetadata_->asmJSFuncNames.emplaceBack(Move(funcName)))
return nullptr;
@ -2446,47 +2382,13 @@ class MOZ_STACK_CLASS ModuleValidator
uint32_t endAfterCurly = pos.end;
asmJSMetadata_->srcLengthWithRightBrace = endAfterCurly - asmJSMetadata_->srcStart;
ScriptedCaller scriptedCaller;
if (parser_.ss->filename()) {
scriptedCaller.line = scriptedCaller.column = 0; // unused
scriptedCaller.filename = DuplicateString(parser_.ss->filename());
if (!scriptedCaller.filename)
return nullptr;
}
MutableCompileArgs args = cx_->new_<CompileArgs>();
if (!args || !args->initFromContext(cx_, Move(scriptedCaller)))
return nullptr;
uint32_t codeSectionSize = 0;
for (Func* func : funcDefs_)
codeSectionSize += func->bytes().length();
// asm.js does not have any wasm bytecode to save; view-source is
// provided through the ScriptSource.
SharedBytes bytes = cx_->new_<ShareableBytes>();
if (!bytes)
return nullptr;
ModuleGenerator mg(*args, &env_, nullptr, nullptr);
if (!mg.init(codeSectionSize, asmJSMetadata_.get()))
return nullptr;
if (!mg.startFuncDefs())
return nullptr;
for (Func* func : funcDefs_) {
if (!mg.compileFuncDef(funcImportMap_.count() + func->funcDefIndex(), func->line(),
func->bytes().begin(), func->bytes().end(),
Move(func->callSiteLineNums()))) {
return nullptr;
}
}
if (!mg.finishFuncDefs())
return nullptr;
return mg.finishModule(*bytes);
return mg_.finishModule(*bytes);
}
};
@ -3025,13 +2927,13 @@ class MOZ_STACK_CLASS FunctionValidator
continueLabels_.init();
}
void define(ModuleValidator::Func* func, unsigned line) {
bool finish(uint32_t funcIndex, unsigned line) {
MOZ_ASSERT(!blockDepth_);
MOZ_ASSERT(breakableStack_.empty());
MOZ_ASSERT(continuableStack_.empty());
MOZ_ASSERT(breakLabels_.empty());
MOZ_ASSERT(continueLabels_.empty());
func->define(fn_, line, Move(bytes_), Move(callSiteLineNums_));
return m_.mg().compileFuncDef(funcIndex, line, Move(bytes_), Move(callSiteLineNums_));
}
bool fail(ParseNode* pn, const char* str) {
@ -4078,7 +3980,7 @@ CheckVarRef(FunctionValidator& f, ParseNode* varRef, Type* type)
case ModuleValidator::Global::FFI:
case ModuleValidator::Global::MathBuiltinFunction:
case ModuleValidator::Global::AtomicsBuiltinFunction:
case ModuleValidator::Global::Table:
case ModuleValidator::Global::FuncPtrTable:
case ModuleValidator::Global::ArrayView:
case ModuleValidator::Global::ArrayViewCtor:
case ModuleValidator::Global::SimdCtor:
@ -4830,16 +4732,14 @@ static bool
CheckFunctionSignature(ModuleValidator& m, ParseNode* usepn, Sig&& sig, PropertyName* name,
ModuleValidator::Func** func)
{
ModuleValidator::Func* existing = m.lookupFuncDef(name);
ModuleValidator::Func* existing = m.lookupFunction(name);
if (!existing) {
if (!CheckModuleLevelName(m, usepn, name))
return false;
return m.addFuncDef(name, usepn->pn_pos.begin, Move(sig), func);
return m.addFunction(name, usepn->pn_pos.begin, Move(sig), func);
}
const SigWithId& existingSig = m.env().sigs[existing->sigIndex()];
if (!CheckSignatureAgainstExisting(m, usepn, sig, existingSig))
if (!CheckSignatureAgainstExisting(m, usepn, sig, m.mg().funcSig(existing->index())))
return false;
*func = existing;
@ -4873,10 +4773,10 @@ CheckInternalCall(FunctionValidator& f, ParseNode* callNode, PropertyName* calle
if (!CheckFunctionSignature(f.m(), callNode, Move(sig), calleeName, &callee))
return false;
if (!f.writeCall(callNode, MozOp::OldCallDirect))
if (!f.writeCall(callNode, Op::Call))
return false;
if (!f.encoder().writeVarU32(callee->funcDefIndex()))
if (!f.encoder().writeVarU32(callee->index()))
return false;
*type = Type::ret(ret);
@ -4885,27 +4785,27 @@ CheckInternalCall(FunctionValidator& f, ParseNode* callNode, PropertyName* calle
static bool
CheckFuncPtrTableAgainstExisting(ModuleValidator& m, ParseNode* usepn, PropertyName* name,
Sig&& sig, unsigned mask, uint32_t* tableIndex)
Sig&& sig, unsigned mask, uint32_t* funcPtrTableIndex)
{
if (const ModuleValidator::Global* existing = m.lookupGlobal(name)) {
if (existing->which() != ModuleValidator::Global::Table)
if (existing->which() != ModuleValidator::Global::FuncPtrTable)
return m.failName(usepn, "'%s' is not a function-pointer table", name);
ModuleValidator::Table& table = m.table(existing->tableIndex());
ModuleValidator::FuncPtrTable& table = m.funcPtrTable(existing->funcPtrTableIndex());
if (mask != table.mask())
return m.failf(usepn, "mask does not match previous value (%u)", table.mask());
if (!CheckSignatureAgainstExisting(m, usepn, sig, m.env().sigs[table.sigIndex()]))
if (!CheckSignatureAgainstExisting(m, usepn, sig, m.mg().sig(table.sigIndex())))
return false;
*tableIndex = existing->tableIndex();
*funcPtrTableIndex = existing->funcPtrTableIndex();
return true;
}
if (!CheckModuleLevelName(m, usepn, name))
return false;
if (!m.declareFuncPtrTable(Move(sig), name, usepn->pn_pos.begin, mask, tableIndex))
if (!m.declareFuncPtrTable(Move(sig), name, usepn->pn_pos.begin, mask, funcPtrTableIndex))
return false;
return true;
@ -4925,7 +4825,7 @@ CheckFuncPtrCall(FunctionValidator& f, ParseNode* callNode, Type ret, Type* type
PropertyName* name = tableNode->name();
if (const ModuleValidator::Global* existing = f.lookupGlobal(name)) {
if (existing->which() != ModuleValidator::Global::Table)
if (existing->which() != ModuleValidator::Global::FuncPtrTable)
return f.failName(tableNode, "'%s' is not the name of a function-pointer array", name);
}
@ -4960,7 +4860,7 @@ CheckFuncPtrCall(FunctionValidator& f, ParseNode* callNode, Type ret, Type* type
return false;
// Call signature
if (!f.encoder().writeVarU32(f.m().table(tableIndex).sigIndex()))
if (!f.encoder().writeVarU32(f.m().funcPtrTable(tableIndex).sigIndex()))
return false;
*type = Type::ret(ret);
@ -4993,14 +4893,14 @@ CheckFFICall(FunctionValidator& f, ParseNode* callNode, unsigned ffiIndex, Type
Sig sig(Move(args), ret.canonicalToExprType());
uint32_t importIndex;
if (!f.m().declareImport(calleeName, Move(sig), ffiIndex, &importIndex))
uint32_t funcIndex;
if (!f.m().declareImport(calleeName, Move(sig), ffiIndex, &funcIndex))
return false;
if (!f.writeCall(callNode, Op::Call))
return false;
if (!f.encoder().writeVarU32(importIndex))
if (!f.encoder().writeVarU32(funcIndex))
return false;
*type = Type::ret(ret);
@ -5927,7 +5827,7 @@ CheckCoercedCall(FunctionValidator& f, ParseNode* call, Type ret, Type* type)
case ModuleValidator::Global::ConstantLiteral:
case ModuleValidator::Global::ConstantImport:
case ModuleValidator::Global::Variable:
case ModuleValidator::Global::Table:
case ModuleValidator::Global::FuncPtrTable:
case ModuleValidator::Global::ArrayView:
case ModuleValidator::Global::ArrayViewCtor:
return f.failName(callee, "'%s' is not callable function", callee->name());
@ -6537,10 +6437,14 @@ CheckLoopConditionOnEntry(FunctionValidator& f, ParseNode* cond)
if (!condType.isInt())
return f.failf(cond, "%s is not a subtype of int", condType.toChars());
if (!f.encoder().writeOp(Op::I32Eqz))
// TODO change this to i32.eqz
// i32.eq 0 $f
if (!f.writeInt32Lit(0))
return false;
if (!f.encoder().writeOp(Op::I32Eq))
return false;
// brIf (i32.eqz $f) $out
// brIf (i32.eq 0 $f) $out
if (!f.writeBreakIf())
return false;
@ -7222,7 +7126,10 @@ CheckFunction(ModuleValidator& m)
if (func->defined())
return m.failName(fn, "function '%s' already defined", FunctionName(fn));
f.define(func, line);
func->define(fn);
if (!f.finish(func->index(), line))
return m.fail(fn, "internal compiler failure (probably out of memory)");
// Release the parser's lifo memory only after the last use of a parse node.
m.parser().release(mark);
@ -7232,8 +7139,8 @@ CheckFunction(ModuleValidator& m)
static bool
CheckAllFunctionsDefined(ModuleValidator& m)
{
for (unsigned i = 0; i < m.numFuncDefs(); i++) {
const ModuleValidator::Func& f = m.funcDef(i);
for (unsigned i = 0; i < m.numFunctions(); i++) {
ModuleValidator::Func& f = m.function(i);
if (!f.defined())
return m.failNameOffset(f.firstUse(), "missing definition of function %s", f.name());
}
@ -7276,18 +7183,18 @@ CheckFuncPtrTable(ModuleValidator& m, ParseNode* var)
unsigned mask = length - 1;
Uint32Vector elemFuncDefIndices;
Uint32Vector elemFuncIndices;
const Sig* sig = nullptr;
for (ParseNode* elem = ListHead(arrayLiteral); elem; elem = NextNode(elem)) {
if (!elem->isKind(PNK_NAME))
return m.fail(elem, "function-pointer table's elements must be names of functions");
PropertyName* funcName = elem->name();
const ModuleValidator::Func* func = m.lookupFuncDef(funcName);
const ModuleValidator::Func* func = m.lookupFunction(funcName);
if (!func)
return m.fail(elem, "function-pointer table's elements must be names of functions");
const Sig& funcSig = m.env().sigs[func->sigIndex()];
const Sig& funcSig = m.mg().funcSig(func->index());
if (sig) {
if (*sig != funcSig)
return m.fail(elem, "all functions in table must have same signature");
@ -7295,7 +7202,7 @@ CheckFuncPtrTable(ModuleValidator& m, ParseNode* var)
sig = &funcSig;
}
if (!elemFuncDefIndices.append(func->funcDefIndex()))
if (!elemFuncIndices.append(func->index()))
return false;
}
@ -7307,7 +7214,7 @@ CheckFuncPtrTable(ModuleValidator& m, ParseNode* var)
if (!CheckFuncPtrTableAgainstExisting(m, var, var->name(), Move(copy), mask, &tableIndex))
return false;
if (!m.defineFuncPtrTable(tableIndex, Move(elemFuncDefIndices)))
if (!m.defineFuncPtrTable(tableIndex, Move(elemFuncIndices)))
return m.fail(var, "duplicate function-pointer definition");
return true;
@ -7329,11 +7236,11 @@ CheckFuncPtrTables(ModuleValidator& m)
}
for (unsigned i = 0; i < m.numFuncPtrTables(); i++) {
ModuleValidator::Table& table = m.table(i);
if (!table.defined()) {
return m.failNameOffset(table.firstUse(),
ModuleValidator::FuncPtrTable& funcPtrTable = m.funcPtrTable(i);
if (!funcPtrTable.defined()) {
return m.failNameOffset(funcPtrTable.firstUse(),
"function-pointer table %s wasn't defined",
table.name());
funcPtrTable.name());
}
}
@ -7347,7 +7254,7 @@ CheckModuleExportFunction(ModuleValidator& m, ParseNode* pn, PropertyName* maybe
return m.fail(pn, "expected name of exported function");
PropertyName* funcName = pn->name();
const ModuleValidator::Func* func = m.lookupFuncDef(funcName);
const ModuleValidator::Func* func = m.lookupFunction(funcName);
if (!func)
return m.failName(pn, "function '%s' not found", funcName);
@ -7431,7 +7338,19 @@ CheckModule(JSContext* cx, AsmJSParser& parser, ParseNode* stmtList, unsigned* t
ParseNode* moduleFunctionNode = parser.pc->functionBox()->functionNode;
MOZ_ASSERT(moduleFunctionNode);
ModuleValidator m(cx, parser, moduleFunctionNode);
ScriptedCaller scriptedCaller;
if (parser.ss->filename()) {
scriptedCaller.line = scriptedCaller.column = 0; // unused
scriptedCaller.filename = DuplicateString(parser.ss->filename());
if (!scriptedCaller.filename)
return nullptr;
}
MutableCompileArgs args = cx->new_<CompileArgs>();
if (!args || !args->initFromContext(cx, Move(scriptedCaller)))
return nullptr;
ModuleValidator m(cx, *args, parser, moduleFunctionNode);
if (!m.init())
return nullptr;
@ -7456,6 +7375,9 @@ CheckModule(JSContext* cx, AsmJSParser& parser, ParseNode* stmtList, unsigned* t
if (!CheckFunctions(m))
return nullptr;
if (!m.finishFunctionBodies())
return nullptr;
if (!CheckFuncPtrTables(m))
return nullptr;

Просмотреть файл

@ -322,8 +322,7 @@ enum class Op
};
inline bool
IsPrefixByte(uint8_t b)
{
IsPrefixByte(uint8_t b) {
return b >= uint8_t(Op::AtomicPrefix);
}
@ -365,7 +364,6 @@ enum class MozOp
F64Atan2,
// asm.js-style call_indirect with the callee evaluated first.
OldCallDirect,
OldCallIndirect,
// Atomics
@ -506,6 +504,22 @@ static const unsigned MaxMemoryMaximumPages = 65536;
static const unsigned MaxModuleBytes = 1024 * 1024 * 1024;
static const unsigned MaxFunctionBytes = 128 * 1024;
// To be able to assign function indices during compilation while the number of
// imports is still unknown, asm.js sets a maximum number of imports so it can
// immediately start handing out function indices starting at the maximum + 1.
// this means that there is a "hole" between the last import and the first
// definition, but that's fine.
static const unsigned AsmJSMaxTypes = 4 * 1024;
static const unsigned AsmJSMaxFuncs = 512 * 1024;
static const unsigned AsmJSMaxImports = 4 * 1024;
static const unsigned AsmJSMaxTables = 4 * 1024;
static const unsigned AsmJSFirstDefFuncIndex = AsmJSMaxImports + 1;
static_assert(AsmJSMaxTypes <= MaxTypes, "conservative");
static_assert(AsmJSMaxImports <= MaxImports, "conservative");
static_assert(AsmJSFirstDefFuncIndex < MaxFuncs, "conservative");
} // namespace wasm
} // namespace js

Просмотреть файл

@ -390,8 +390,6 @@ wasm::Classify(OpBytes op)
case MozOp::F32x4store2:
case MozOp::F32x4store3:
return OpKind::TeeStore;
case MozOp::OldCallDirect:
return OpKind::OldCallDirect;
case MozOp::OldCallIndirect:
return OpKind::OldCallIndirect;
case MozOp::I32AtomicsLoad:

Просмотреть файл

@ -133,7 +133,6 @@ enum class OpKind {
TeeGlobal,
Call,
CallIndirect,
OldCallDirect,
OldCallIndirect,
Return,
If,
@ -530,8 +529,6 @@ class MOZ_STACK_CLASS OpIter : private Policy
MOZ_MUST_USE bool readB32x4Const(I32x4* i32x4);
MOZ_MUST_USE bool readCall(uint32_t* calleeIndex, ValueVector* argValues);
MOZ_MUST_USE bool readCallIndirect(uint32_t* sigIndex, Value* callee, ValueVector* argValues);
MOZ_MUST_USE bool readOldCallDirect(uint32_t numFuncImports, uint32_t* funcIndex,
ValueVector* argValues);
MOZ_MUST_USE bool readOldCallIndirect(uint32_t* sigIndex, Value* callee, ValueVector* argValues);
MOZ_MUST_USE bool readAtomicLoad(LinearMemoryAddress<Value>* addr,
Scalar::Type* viewType);
@ -1625,33 +1622,6 @@ OpIter<Policy>::readCallIndirect(uint32_t* sigIndex, Value* callee, ValueVector*
return push(sig.ret());
}
template <typename Policy>
inline bool
OpIter<Policy>::readOldCallDirect(uint32_t numFuncImports, uint32_t* funcIndex,
ValueVector* argValues)
{
MOZ_ASSERT(Classify(op_) == OpKind::OldCallDirect);
uint32_t funcDefIndex;
if (!readVarU32(&funcDefIndex))
return fail("unable to read call function index");
if (UINT32_MAX - funcDefIndex < numFuncImports)
return fail("callee index out of range");
*funcIndex = numFuncImports + funcDefIndex;
if (*funcIndex >= env_.funcSigs.length())
return fail("callee index out of range");
const Sig& sig = *env_.funcSigs[*funcIndex];
if (!popCallArgs(sig.args(), argValues))
return false;
return push(sig.ret());
}
template <typename Policy>
inline bool
OpIter<Policy>::readOldCallIndirect(uint32_t* sigIndex, Value* callee, ValueVector* argValues)

Просмотреть файл

@ -359,7 +359,6 @@ typedef UniquePtr<MetadataTier> UniqueMetadataTier;
class Metadata : public ShareableBase<Metadata>, public MetadataCacheablePod
{
protected:
UniqueMetadataTier metadata1_;
mutable UniqueMetadataTier metadata2_; // Access only when hasTier2() is true
mutable Atomic<bool> hasTier2_;

Просмотреть файл

@ -72,6 +72,9 @@ ModuleGenerator::ModuleGenerator(const CompileArgs& args, ModuleEnvironment* env
linkDataTier_(nullptr),
metadataTier_(nullptr),
taskState_(mutexid::WasmCompileTaskState),
numFuncDefs_(0),
numSigs_(0),
numTables_(0),
lifo_(GENERATOR_LIFO_DEFAULT_CHUNK_SIZE),
masmAlloc_(&lifo_),
masm_(MacroAssembler::WasmToken(), masmAlloc_),
@ -136,77 +139,72 @@ ModuleGenerator::~ModuleGenerator()
}
bool
ModuleGenerator::allocateGlobalBytes(uint32_t bytes, uint32_t align, uint32_t* globalDataOffset)
ModuleGenerator::initAsmJS(Metadata* asmJSMetadata)
{
MOZ_ASSERT(!startedFuncDefs_);
MOZ_ASSERT(env_->isAsmJS());
CheckedInt<uint32_t> newGlobalDataLength(metadata_->globalDataLength);
newGlobalDataLength += ComputeByteAlignment(newGlobalDataLength.value(), align);
if (!newGlobalDataLength.isValid())
if (!linkData_.initTier1(Tier::Ion, *asmJSMetadata))
return false;
linkDataTier_ = &linkData_.linkData(Tier::Ion);
*globalDataOffset = newGlobalDataLength.value();
newGlobalDataLength += bytes;
metadataTier_ = &asmJSMetadata->metadata(Tier::Ion);
metadata_ = asmJSMetadata;
MOZ_ASSERT(isAsmJS());
if (!newGlobalDataLength.isValid())
return false;
// For asm.js, the Vectors in ModuleEnvironment are max-sized reservations
// and will be initialized in a linear order via init* functions as the
// module is generated.
MOZ_ASSERT(env_->sigs.length() == AsmJSMaxTypes);
MOZ_ASSERT(env_->tables.length() == AsmJSMaxTables);
MOZ_ASSERT(env_->asmJSSigToTableIndex.length() == AsmJSMaxTypes);
metadata_->globalDataLength = newGlobalDataLength.value();
return true;
}
bool
ModuleGenerator::init(size_t codeSectionSize, Metadata* maybeAsmJSMetadata)
ModuleGenerator::initWasm(size_t codeSectionSize)
{
// Perform fallible metadata, linkdata, assumption allocations.
MOZ_ASSERT(!env_->isAsmJS());
if (maybeAsmJSMetadata) {
MOZ_ASSERT(isAsmJS());
metadataTier_ = &maybeAsmJSMetadata->metadata(tier());
metadata_ = maybeAsmJSMetadata;
} else {
MOZ_ASSERT(!isAsmJS());
auto metadataTier = js::MakeUnique<MetadataTier>(tier());
if (!metadataTier)
return false;
metadataTier_ = metadataTier.get();
metadata_ = js_new<Metadata>(Move(metadataTier));
if (!metadata_)
return false;
}
auto metadataTier = js::MakeUnique<MetadataTier>(tier());
if (!metadataTier)
return false;
if (compileArgs_->scriptedCaller.filename) {
metadata_->filename = DuplicateString(compileArgs_->scriptedCaller.filename.get());
if (!metadata_->filename)
return false;
}
metadata_ = js_new<Metadata>(Move(metadataTier));
if (!metadata_)
return false;
metadataTier_ = &metadata_->metadata(tier());
if (!linkData_.initTier1(tier(), *metadata_))
return false;
linkDataTier_ = &linkData_.linkData(tier());
if (!assumptions_.clone(compileArgs_->assumptions))
return false;
MOZ_ASSERT(!isAsmJS());
// The funcToCodeRange_ maps function indices to code-range indices and all
// elements will be initialized by the time module generation is finished.
// For wasm, the amount of code, functions, signatures, imports, exports,
// etc are known a priori.
if (!funcToCodeRange_.appendN(BAD_CODE_RANGE, env_->funcSigs.length()))
return false;
numSigs_ = env_->sigs.length();
numTables_ = env_->tables.length();
// Pre-reserve space for large Vectors to avoid the significant cost of the
// final reallocs. In particular, the MacroAssembler can be enormous, so be
// extra conservative. Note, podResizeToFit calls at the end will trim off
// unneeded capacity.
// When estimating the MacroAssembler buffer size, be extra conservative
// since the price is low and the cost of an extra resize is high.
if (!masm_.reserve(size_t(1.2 * EstimateCompiledCodeSize(tier(), codeSectionSize))))
return false;
// Although we could compute it more precisely (only the number of far jumps
// is unknown), 2x number of functions is a good conservative estimate and
// podResizeToFit will remove waste at the end.
if (!metadataTier_->codeRanges.reserve(2 * env_->numFuncDefs()))
return false;
// Code can vary a lot, so use a conservative estimate of 1 load/store/call/trap
// per 10 bytes of bytecode and rely on podResizeToFit() to remove waste.
const size_t CallSitesPerByteCode = 10;
if (!metadataTier_->callSites.reserve(codeSectionSize / CallSitesPerByteCode))
return false;
@ -217,19 +215,10 @@ ModuleGenerator::init(size_t codeSectionSize, Metadata* maybeAsmJSMetadata)
// Allocate space in TlsData for declarations that need it.
MOZ_ASSERT(metadata_->globalDataLength == 0);
for (size_t i = 0; i < env_->funcImportGlobalDataOffsets.length(); i++) {
uint32_t globalDataOffset;
if (!allocateGlobalBytes(sizeof(FuncImportTls), sizeof(void*), &globalDataOffset))
return false;
env_->funcImportGlobalDataOffsets[i] = globalDataOffset;
Sig copy;
if (!copy.clone(*env_->funcSigs[i]))
return false;
if (!metadataTier_->funcImports.emplaceBack(Move(copy), globalDataOffset))
env_->funcImportGlobalDataOffsets[i] = metadata_->globalDataLength;
metadata_->globalDataLength += sizeof(FuncImportTls);
if (!addFuncImport(*env_->funcSigs[i], env_->funcImportGlobalDataOffsets[i]))
return false;
}
@ -238,77 +227,72 @@ ModuleGenerator::init(size_t codeSectionSize, Metadata* maybeAsmJSMetadata)
return false;
}
if (!isAsmJS()) {
for (SigWithId& sig : env_->sigs) {
if (SigIdDesc::isGlobal(sig)) {
uint32_t globalDataOffset;
if (!allocateGlobalBytes(sizeof(void*), sizeof(void*), &globalDataOffset))
return false;
for (uint32_t i = 0; i < numSigs_; i++) {
SigWithId& sig = env_->sigs[i];
if (SigIdDesc::isGlobal(sig)) {
uint32_t globalDataOffset;
if (!allocateGlobalBytes(sizeof(void*), sizeof(void*), &globalDataOffset))
return false;
sig.id = SigIdDesc::global(sig, globalDataOffset);
sig.id = SigIdDesc::global(sig, globalDataOffset);
Sig copy;
if (!copy.clone(sig))
return false;
Sig copy;
if (!copy.clone(sig))
return false;
if (!metadata_->sigIds.emplaceBack(Move(copy), sig.id))
return false;
} else {
sig.id = SigIdDesc::immediate(sig);
}
if (!metadata_->sigIds.emplaceBack(Move(copy), sig.id))
return false;
} else {
sig.id = SigIdDesc::immediate(sig);
}
}
for (GlobalDesc& global : env_->globals) {
if (global.isConstant())
continue;
uint32_t width = SizeOf(global.type());
uint32_t globalDataOffset;
if (!allocateGlobalBytes(width, width, &globalDataOffset))
if (!allocateGlobal(&global))
return false;
global.setOffset(globalDataOffset);
}
// Accumulate all exported functions, whether by explicit export or
// implicitly by being an element of an external (imported or exported)
// table or by being the start function. The FuncExportVector stored in
// Metadata needs to be sorted (to allow O(log(n)) lookup at runtime) and
// deduplicated, so use an intermediate vector to sort and de-duplicate.
Uint32Vector exportedFuncs;
// Build a HashSet of all exported functions, whether by explicit export of
// the function, or implicitly by being an element of an external (imported
// or exported) table, or being the start function.
for (const Export& exp : env_->exports) {
if (exp.kind() == DefinitionKind::Function) {
if (!exportedFuncs.append(exp.funcIndex()))
if (!exportedFuncs_.put(exp.funcIndex()))
return false;
}
}
for (ElemSegment& elems : env_->elemSegments) {
if (env_->tables[elems.tableIndex].external) {
if (!exportedFuncs.appendAll(elems.elemFuncIndices))
return false;
}
}
if (env_->startFuncIndex && !exportedFuncs.append(*env_->startFuncIndex))
return false;
std::sort(exportedFuncs.begin(), exportedFuncs.end());
auto* newEnd = std::unique(exportedFuncs.begin(), exportedFuncs.end());
exportedFuncs.erase(newEnd, exportedFuncs.end());
if (!metadataTier_->funcExports.reserve(exportedFuncs.length()))
return false;
for (uint32_t funcIndex : exportedFuncs) {
Sig sig;
if (!sig.clone(*env_->funcSigs[funcIndex]))
if (env_->startFuncIndex) {
metadata_->startFuncIndex.emplace(*env_->startFuncIndex);
if (!exportedFuncs_.put(*env_->startFuncIndex))
return false;
}
return true;
}
bool
ModuleGenerator::init(size_t codeSectionSize, Metadata* maybeAsmJSMetadata)
{
if (!funcToCodeRange_.appendN(BAD_CODE_RANGE, env_->funcSigs.length()))
return false;
if (!assumptions_.clone(compileArgs_->assumptions))
return false;
if (!exportedFuncs_.init())
return false;
if (env_->isAsmJS() ? !initAsmJS(maybeAsmJSMetadata) : !initWasm(codeSectionSize))
return false;
if (compileArgs_->scriptedCaller.filename) {
metadata_->filename = DuplicateString(compileArgs_->scriptedCaller.filename.get());
if (!metadata_->filename)
return false;
metadataTier_->funcExports.infallibleEmplaceBack(Move(sig), funcIndex);
}
return true;
@ -574,6 +558,231 @@ ModuleGenerator::linkCompiledCode(const CompiledCode& code)
return true;
}
bool
ModuleGenerator::finishTask(CompileTask* task)
{
masm_.haltingAlign(CodeAlignment);
// Before merging in the new function's code, if calls in a prior code range
// might go out of range, insert far jumps to extend the range.
if (!InRange(startOfUnpatchedCallsites_, masm_.size() + task->output.bytes.length())) {
startOfUnpatchedCallsites_ = masm_.size();
if (!linkCallSites())
return false;
}
if (!linkCompiledCode(task->output))
return false;
task->output.clear();
MOZ_ASSERT(task->inputs.empty());
MOZ_ASSERT(task->output.empty());
MOZ_ASSERT(task->lifo.isEmpty());
freeTasks_.infallibleAppend(task);
return true;
}
bool
ModuleGenerator::finishFuncExports()
{
// In addition to all the functions that were explicitly exported, any
// element of an exported table is also exported.
for (ElemSegment& elems : env_->elemSegments) {
if (env_->tables[elems.tableIndex].external) {
for (uint32_t funcIndex : elems.elemFuncIndices) {
if (!exportedFuncs_.put(funcIndex))
return false;
}
}
}
// ModuleGenerator::exportedFuncs_ is an unordered HashSet. The
// FuncExportVector stored in Metadata needs to be stored sorted by
// function index to allow O(log(n)) lookup at runtime.
Uint32Vector sorted;
if (!sorted.reserve(exportedFuncs_.count()))
return false;
for (Uint32Set::Range r = exportedFuncs_.all(); !r.empty(); r.popFront())
sorted.infallibleAppend(r.front());
std::sort(sorted.begin(), sorted.end());
MOZ_ASSERT(metadataTier_->funcExports.empty());
if (!metadataTier_->funcExports.reserve(sorted.length()))
return false;
for (uint32_t funcIndex : sorted) {
Sig sig;
if (!sig.clone(funcSig(funcIndex)))
return false;
metadataTier_->funcExports.infallibleEmplaceBack(Move(sig), funcIndex);
}
return true;
}
bool
ModuleGenerator::addFuncImport(const Sig& sig, uint32_t globalDataOffset)
{
MOZ_ASSERT(!finishedFuncDefs_);
Sig copy;
if (!copy.clone(sig))
return false;
return metadataTier_->funcImports.emplaceBack(Move(copy), globalDataOffset);
}
bool
ModuleGenerator::allocateGlobalBytes(uint32_t bytes, uint32_t align, uint32_t* globalDataOffset)
{
CheckedInt<uint32_t> newGlobalDataLength(metadata_->globalDataLength);
newGlobalDataLength += ComputeByteAlignment(newGlobalDataLength.value(), align);
if (!newGlobalDataLength.isValid())
return false;
*globalDataOffset = newGlobalDataLength.value();
newGlobalDataLength += bytes;
if (!newGlobalDataLength.isValid())
return false;
metadata_->globalDataLength = newGlobalDataLength.value();
return true;
}
bool
ModuleGenerator::allocateGlobal(GlobalDesc* global)
{
MOZ_ASSERT(!startedFuncDefs_);
unsigned width = 0;
switch (global->type()) {
case ValType::I32:
case ValType::F32:
width = 4;
break;
case ValType::I64:
case ValType::F64:
width = 8;
break;
case ValType::I8x16:
case ValType::I16x8:
case ValType::I32x4:
case ValType::F32x4:
case ValType::B8x16:
case ValType::B16x8:
case ValType::B32x4:
width = 16;
break;
}
uint32_t offset;
if (!allocateGlobalBytes(width, width, &offset))
return false;
global->setOffset(offset);
return true;
}
bool
ModuleGenerator::addGlobal(ValType type, bool isConst, uint32_t* index)
{
MOZ_ASSERT(isAsmJS());
MOZ_ASSERT(!startedFuncDefs_);
*index = env_->globals.length();
GlobalDesc global(type, !isConst, *index);
if (!allocateGlobal(&global))
return false;
return env_->globals.append(global);
}
bool
ModuleGenerator::addExport(CacheableChars&& fieldName, uint32_t funcIndex)
{
MOZ_ASSERT(isAsmJS());
return env_->exports.emplaceBack(Move(fieldName), funcIndex, DefinitionKind::Function) &&
exportedFuncs_.put(funcIndex);
}
void
ModuleGenerator::initSig(uint32_t sigIndex, Sig&& sig)
{
MOZ_ASSERT(isAsmJS());
MOZ_ASSERT(sigIndex == numSigs_);
numSigs_++;
MOZ_ASSERT(env_->sigs[sigIndex] == Sig());
env_->sigs[sigIndex] = Move(sig);
}
const SigWithId&
ModuleGenerator::sig(uint32_t index) const
{
MOZ_ASSERT(index < numSigs_);
return env_->sigs[index];
}
void
ModuleGenerator::initFuncSig(uint32_t funcIndex, uint32_t sigIndex)
{
MOZ_ASSERT(isAsmJS());
MOZ_ASSERT(!env_->funcSigs[funcIndex]);
env_->funcSigs[funcIndex] = &env_->sigs[sigIndex];
}
void
ModuleGenerator::initMemoryUsage(MemoryUsage memoryUsage)
{
MOZ_ASSERT(isAsmJS());
MOZ_ASSERT(env_->memoryUsage == MemoryUsage::None);
env_->memoryUsage = memoryUsage;
}
void
ModuleGenerator::bumpMinMemoryLength(uint32_t newMinMemoryLength)
{
MOZ_ASSERT(isAsmJS());
MOZ_ASSERT(newMinMemoryLength >= env_->minMemoryLength);
env_->minMemoryLength = newMinMemoryLength;
}
bool
ModuleGenerator::initImport(uint32_t funcIndex, uint32_t sigIndex)
{
MOZ_ASSERT(isAsmJS());
MOZ_ASSERT(!env_->funcSigs[funcIndex]);
env_->funcSigs[funcIndex] = &env_->sigs[sigIndex];
uint32_t globalDataOffset;
if (!allocateGlobalBytes(sizeof(FuncImportTls), sizeof(void*), &globalDataOffset))
return false;
MOZ_ASSERT(!env_->funcImportGlobalDataOffsets[funcIndex]);
env_->funcImportGlobalDataOffsets[funcIndex] = globalDataOffset;
MOZ_ASSERT(funcIndex == metadataTier_->funcImports.length());
return addFuncImport(sig(sigIndex), globalDataOffset);
}
const SigWithId&
ModuleGenerator::funcSig(uint32_t funcIndex) const
{
MOZ_ASSERT(env_->funcSigs[funcIndex]);
return *env_->funcSigs[funcIndex];
}
bool
ModuleGenerator::startFuncDefs()
{
@ -601,21 +810,6 @@ ModuleGenerator::startFuncDefs()
for (size_t i = 0; i < numTasks; i++)
freeTasks_.infallibleAppend(&tasks_[i]);
// Fill in function stubs for each import so that imported functions can be
// used in all the places that normal function definitions can (table
// elements, export calls, etc).
CompiledCode& importCode = tasks_[0].output;
MOZ_ASSERT(importCode.empty());
if (!GenerateImportFunctions(*env_, metadataTier_->funcImports, &importCode))
return false;
if (!linkCompiledCode(importCode))
return false;
importCode.clear();
startedFuncDefs_ = true;
MOZ_ASSERT(!finishedFuncDefs_);
return true;
@ -664,31 +858,6 @@ wasm::ExecuteCompileTaskFromHelperThread(CompileTask* task)
taskState->failedOrFinished.notify_one();
}
bool
ModuleGenerator::finishTask(CompileTask* task)
{
masm_.haltingAlign(CodeAlignment);
// Before merging in the new function's code, if calls in a prior code range
// might go out of range, insert far jumps to extend the range.
if (!InRange(startOfUnpatchedCallsites_, masm_.size() + task->output.bytes.length())) {
startOfUnpatchedCallsites_ = masm_.size();
if (!linkCallSites())
return false;
}
if (!linkCompiledCode(task->output))
return false;
task->output.clear();
MOZ_ASSERT(task->inputs.empty());
MOZ_ASSERT(task->output.empty());
MOZ_ASSERT(task->lifo.isEmpty());
freeTasks_.infallibleAppend(task);
return true;
}
bool
ModuleGenerator::launchBatchCompile()
{
@ -743,13 +912,15 @@ ModuleGenerator::finishOutstandingTask()
bool
ModuleGenerator::compileFuncDef(uint32_t funcIndex, uint32_t lineOrBytecode,
const uint8_t* begin, const uint8_t* end,
Bytes&& bytes, const uint8_t* begin, const uint8_t* end,
Uint32Vector&& lineNums)
{
MOZ_ASSERT(startedFuncDefs_);
MOZ_ASSERT(!finishedFuncDefs_);
MOZ_ASSERT_IF(mode() == CompileMode::Tier1, funcIndex < env_->numFuncs());
numFuncDefs_++;
if (!currentTask_) {
if (freeTasks_.empty() && !finishOutstandingTask())
return false;
@ -759,7 +930,7 @@ ModuleGenerator::compileFuncDef(uint32_t funcIndex, uint32_t lineOrBytecode,
uint32_t funcBytecodeLength = end - begin;
FuncCompileInputVector& inputs = currentTask_->inputs;
if (!inputs.emplaceBack(funcIndex, lineOrBytecode, begin, end, Move(lineNums)))
if (!inputs.emplaceBack(funcIndex, lineOrBytecode, Move(bytes), begin, end, Move(lineNums)))
return false;
uint32_t threshold;
@ -774,6 +945,20 @@ ModuleGenerator::compileFuncDef(uint32_t funcIndex, uint32_t lineOrBytecode,
return batchedBytecode_ <= threshold || launchBatchCompile();
}
bool
ModuleGenerator::compileFuncDef(uint32_t funcIndex, uint32_t lineOrBytecode,
const uint8_t* begin, const uint8_t* end)
{
return compileFuncDef(funcIndex, lineOrBytecode, Bytes(), begin, end, Uint32Vector());
}
bool
ModuleGenerator::compileFuncDef(uint32_t funcIndex, uint32_t lineOrBytecode,
Bytes&& bytes, Uint32Vector&& lineNums)
{
return compileFuncDef(funcIndex, lineOrBytecode, Move(bytes), bytes.begin(), bytes.end(), Move(lineNums));
}
bool
ModuleGenerator::finishFuncDefs()
{
@ -788,22 +973,62 @@ ModuleGenerator::finishFuncDefs()
return false;
}
MOZ_ASSERT_IF(!isAsmJS(), numFuncDefs_ == env_->numFuncDefs());
finishedFuncDefs_ = true;
return true;
}
bool
ModuleGenerator::initSigTableLength(uint32_t sigIndex, uint32_t length)
{
MOZ_ASSERT(isAsmJS());
MOZ_ASSERT(length != 0);
MOZ_ASSERT(length <= MaxTableInitialLength);
MOZ_ASSERT(env_->asmJSSigToTableIndex[sigIndex] == 0);
env_->asmJSSigToTableIndex[sigIndex] = numTables_;
TableDesc& table = env_->tables[numTables_++];
table.kind = TableKind::TypedFunction;
table.limits.initial = length;
table.limits.maximum = Some(length);
return allocateGlobalBytes(sizeof(TableTls), sizeof(void*), &table.globalDataOffset);
}
bool
ModuleGenerator::initSigTableElems(uint32_t sigIndex, Uint32Vector&& elemFuncIndices)
{
MOZ_ASSERT(isAsmJS());
MOZ_ASSERT(finishedFuncDefs_);
uint32_t tableIndex = env_->asmJSSigToTableIndex[sigIndex];
MOZ_ASSERT(env_->tables[tableIndex].limits.initial == elemFuncIndices.length());
InitExpr offset(Val(uint32_t(0)));
return env_->elemSegments.emplaceBack(tableIndex, offset, Move(elemFuncIndices));
}
bool
ModuleGenerator::finishLinking()
{
// All functions and traps CodeRanges should have been processed.
#ifdef DEBUG
for (uint32_t codeRangeIndex : funcToCodeRange_)
MOZ_ASSERT(codeRangeIndex != BAD_CODE_RANGE);
if (isAsmJS()) {
for (uint32_t i = 0; i < AsmJSFirstDefFuncIndex; i++)
MOZ_ASSERT(funcToCodeRange_[i] == BAD_CODE_RANGE);
for (uint32_t i = AsmJSFirstDefFuncIndex; i < AsmJSFirstDefFuncIndex + numFuncDefs_; i++)
MOZ_ASSERT(funcToCodeRange_[i] != BAD_CODE_RANGE);
for (uint32_t i = AsmJSFirstDefFuncIndex + numFuncDefs_; i < funcToCodeRange_.length(); i++)
MOZ_ASSERT(funcToCodeRange_[i] == BAD_CODE_RANGE);
} else {
for (uint32_t codeRangeIndex : funcToCodeRange_)
MOZ_ASSERT(codeRangeIndex != BAD_CODE_RANGE);
}
#endif
// Now that all functions and stubs are generated and their CodeRanges
// known, patch all calls (which can emit far jumps) and far jumps.
// known, patch all calls (which can emit far jumps) and far jumps.
if (!linkCallSites())
return false;
@ -856,7 +1081,6 @@ ModuleGenerator::finishMetadata(const ShareableBytes& bytecode)
metadata_->memoryUsage = env_->memoryUsage;
metadata_->minMemoryLength = env_->minMemoryLength;
metadata_->maxMemoryLength = env_->maxMemoryLength;
metadata_->startFuncIndex = env_->startFuncIndex;
metadata_->tables = Move(env_->tables);
metadata_->globals = Move(env_->globals);
metadata_->funcNames = Move(env_->funcNames);
@ -876,6 +1100,12 @@ ModuleGenerator::finishMetadata(const ShareableBytes& bytecode)
metadataTier_->debugTrapFarJumpOffsets.podResizeToFit();
metadataTier_->debugFuncToCodeRange.podResizeToFit();
// For asm.js, the tables vector is over-allocated (to avoid resize during
// parallel copilation). Shrink it back down to fit.
if (isAsmJS() && !metadata_->tables.resize(numTables_))
return false;
// Complete function exports and element segments with code range indices,
// now that every function has a code range.
@ -925,6 +1155,13 @@ ModuleGenerator::finishCodeSegment(const ShareableBytes& bytecode)
{
MOZ_ASSERT(finishedFuncDefs_);
// Because of asm.js, we can only generate the FuncExportVector at the end
// of module generation (after we've seen the end of the exports object at
// the end of the asm.js module).
if (!finishFuncExports())
return nullptr;
// Now that all imports/exports are known, we can generate a special
// CompiledCode containing stubs.

Просмотреть файл

@ -31,6 +31,7 @@ namespace wasm {
struct FuncCompileInput
{
Bytes bytesToDelete;
const uint8_t* begin;
const uint8_t* end;
uint32_t index;
@ -39,10 +40,12 @@ struct FuncCompileInput
FuncCompileInput(uint32_t index,
uint32_t lineOrBytecode,
Bytes&& bytesToDelete,
const uint8_t* begin,
const uint8_t* end,
Uint32Vector&& callSiteLineNums)
: begin(begin),
: bytesToDelete(Move(bytesToDelete)),
begin(begin),
end(end),
index(index),
lineOrBytecode(lineOrBytecode),
@ -142,6 +145,7 @@ struct CompileTask
class MOZ_STACK_CLASS ModuleGenerator
{
typedef HashSet<uint32_t, DefaultHasher<uint32_t>, SystemAllocPolicy> Uint32Set;
typedef Vector<CompileTask, 0, SystemAllocPolicy> CompileTaskVector;
typedef Vector<CompileTask*, 0, SystemAllocPolicy> CompileTaskPtrVector;
typedef EnumeratedArray<Trap, Trap::Limit, uint32_t> Uint32TrapArray;
@ -163,6 +167,9 @@ class MOZ_STACK_CLASS ModuleGenerator
// Data scoped to the ModuleGenerator's lifetime
ExclusiveCompileTaskState taskState_;
uint32_t numFuncDefs_;
uint32_t numSigs_;
uint32_t numTables_;
LifoAlloc lifo_;
jit::JitContext jcx_;
jit::TempAllocator masmAlloc_;
@ -173,6 +180,7 @@ class MOZ_STACK_CLASS ModuleGenerator
TrapFarJumpVector trapFarJumps_;
CallFarJumpVector callFarJumps_;
CallSiteTargetVector callSiteTargets_;
Uint32Set exportedFuncs_;
uint32_t lastPatchedCallSite_;
uint32_t startOfUnpatchedCallsites_;
CodeOffsetVector debugTrapFarJumps_;
@ -189,8 +197,6 @@ class MOZ_STACK_CLASS ModuleGenerator
DebugOnly<bool> startedFuncDefs_;
DebugOnly<bool> finishedFuncDefs_;
bool allocateGlobalBytes(uint32_t bytes, uint32_t align, uint32_t* globalDataOff);
bool funcIsCompiled(uint32_t funcIndex) const;
const CodeRange& funcCodeRange(uint32_t funcIndex) const;
@ -198,13 +204,23 @@ class MOZ_STACK_CLASS ModuleGenerator
void noteCodeRange(uint32_t codeRangeIndex, const CodeRange& codeRange);
bool linkCompiledCode(const CompiledCode& code);
bool finishTask(CompileTask* task);
bool launchBatchCompile();
bool finishOutstandingTask();
bool finishFuncExports();
bool finishLinking();
bool finishMetadata(const ShareableBytes& bytecode);
UniqueConstCodeSegment finishCodeSegment(const ShareableBytes& bytecode);
UniqueJumpTable createJumpTable(const CodeSegment& codeSegment);
bool addFuncImport(const Sig& sig, uint32_t globalDataOffset);
bool allocateGlobalBytes(uint32_t bytes, uint32_t align, uint32_t* globalDataOff);
bool allocateGlobal(GlobalDesc* global);
bool launchBatchCompile();
bool compileFuncDef(uint32_t funcIndex, uint32_t lineOrBytecode,
Bytes&& bytes, const uint8_t* begin, const uint8_t* end,
Uint32Vector&& lineNums);
bool initAsmJS(Metadata* asmJSMetadata);
bool initWasm(size_t codeLength);
bool isAsmJS() const { return env_->isAsmJS(); }
Tier tier() const { return env_->tier(); }
@ -215,22 +231,39 @@ class MOZ_STACK_CLASS ModuleGenerator
ModuleGenerator(const CompileArgs& args, ModuleEnvironment* env,
Atomic<bool>* cancelled, UniqueChars* error);
~ModuleGenerator();
MOZ_MUST_USE bool init(size_t codeSectionSize, Metadata* maybeAsmJSMetadata = nullptr);
// After initialization, startFuncDefs() shall be called before one call to
// compileFuncDef() for each funcIndex in the range [0, env->numFuncDefs),
// followed by finishFuncDefs().
// Function definitions:
MOZ_MUST_USE bool startFuncDefs();
MOZ_MUST_USE bool compileFuncDef(uint32_t funcIndex, uint32_t lineOrBytecode,
const uint8_t* begin, const uint8_t* end,
Uint32Vector&& callSiteLineNums = Uint32Vector());
const uint8_t* begin, const uint8_t* end);
MOZ_MUST_USE bool compileFuncDef(uint32_t funcIndex, uint32_t lineOrBytecode,
Bytes&& bytes, Uint32Vector&& callSiteLineNums);
MOZ_MUST_USE bool finishFuncDefs();
// After finishFuncDefs(), one of the following is called, depending on the
// CompileMode: finishModule for Once or Tier1, finishTier2 for Tier2.
// asm.js accessors:
uint32_t minMemoryLength() const { return env_->minMemoryLength; }
uint32_t numSigs() const { return numSigs_; }
const SigWithId& sig(uint32_t sigIndex) const;
const SigWithId& funcSig(uint32_t funcIndex) const;
// asm.js lazy initialization:
void initSig(uint32_t sigIndex, Sig&& sig);
void initFuncSig(uint32_t funcIndex, uint32_t sigIndex);
MOZ_MUST_USE bool initImport(uint32_t funcIndex, uint32_t sigIndex);
MOZ_MUST_USE bool initSigTableLength(uint32_t sigIndex, uint32_t length);
MOZ_MUST_USE bool initSigTableElems(uint32_t sigIndex, Uint32Vector&& elemFuncIndices);
void initMemoryUsage(MemoryUsage memoryUsage);
void bumpMinMemoryLength(uint32_t newMinMemoryLength);
MOZ_MUST_USE bool addGlobal(ValType type, bool isConst, uint32_t* index);
MOZ_MUST_USE bool addExport(CacheableChars&& fieldChars, uint32_t funcIndex);
// Finish compilation of the given bytecode.
SharedModule finishModule(const ShareableBytes& bytecode);
// Finish compilation of the given bytecode, installing tier-variant parts
// for Tier 2 into module.
MOZ_MUST_USE bool finishTier2(Module& module);
};

Просмотреть файл

@ -2037,19 +2037,14 @@ EmitCallArgs(FunctionCompiler& f, const Sig& sig, const DefVector& args, CallCom
}
static bool
EmitCall(FunctionCompiler& f, bool asmJSFuncDef)
EmitCall(FunctionCompiler& f)
{
uint32_t lineOrBytecode = f.readCallSiteLineOrBytecode();
uint32_t funcIndex;
DefVector args;
if (asmJSFuncDef) {
if (!f.iter().readOldCallDirect(f.env().numFuncImports(), &funcIndex, &args))
return false;
} else {
if (!f.iter().readCall(&funcIndex, &args))
return false;
}
if (!f.iter().readCall(&funcIndex, &args))
return false;
if (f.inDeadCode())
return true;
@ -3332,7 +3327,7 @@ EmitBodyExprs(FunctionCompiler& f)
// Calls
case uint16_t(Op::Call):
CHECK(EmitCall(f, /* asmJSFuncDef = */ false));
CHECK(EmitCall(f));
case uint16_t(Op::CallIndirect):
CHECK(EmitCallIndirect(f, /* oldStyle = */ false));
@ -3728,8 +3723,6 @@ EmitBodyExprs(FunctionCompiler& f)
CHECK_ASMJS(EmitBinaryMathBuiltinCall(f, SymbolicAddress::PowD, ValType::F64));
case uint16_t(MozOp::F64Atan2):
CHECK_ASMJS(EmitBinaryMathBuiltinCall(f, SymbolicAddress::ATan2D, ValType::F64));
case uint16_t(MozOp::OldCallDirect):
CHECK_ASMJS(EmitCall(f, /* asmJSFuncDef = */ true));
case uint16_t(MozOp::OldCallIndirect):
CHECK_ASMJS(EmitCallIndirect(f, /* oldStyle = */ true));

Просмотреть файл

@ -542,33 +542,6 @@ GenerateImportFunction(jit::MacroAssembler& masm, const FuncImport& fi, SigIdDes
return FinishOffsets(masm, offsets);
}
static const unsigned STUBS_LIFO_DEFAULT_CHUNK_SIZE = 4 * 1024;
bool
wasm::GenerateImportFunctions(const ModuleEnvironment& env, const FuncImportVector& imports,
CompiledCode* code)
{
LifoAlloc lifo(STUBS_LIFO_DEFAULT_CHUNK_SIZE);
TempAllocator alloc(&lifo);
MacroAssembler masm(MacroAssembler::WasmToken(), alloc);
for (uint32_t funcIndex = 0; funcIndex < imports.length(); funcIndex++) {
const FuncImport& fi = imports[funcIndex];
FuncOffsets offsets;
if (!GenerateImportFunction(masm, fi, env.funcSigs[funcIndex]->id, &offsets))
return false;
if (!code->codeRanges.emplaceBack(funcIndex, /* bytecodeOffset = */ 0, offsets))
return false;
}
masm.finish();
if (masm.oom())
return false;
return code->swap(masm);
}
// Generate a stub that is called via the internal ABI derived from the
// signature of the import and calls into an appropriate callImport C++
// function, having boxed all the ABI arguments into a homogeneous Value array.
@ -1340,6 +1313,8 @@ GenerateDebugTrapStub(MacroAssembler& masm, Label* throwLabel, CallableOffsets*
return FinishOffsets(masm, offsets);
}
static const unsigned STUBS_LIFO_DEFAULT_CHUNK_SIZE = 4 * 1024;
bool
wasm::GenerateStubs(const ModuleEnvironment& env, const FuncImportVector& imports,
const FuncExportVector& exports, CompiledCode* code)
@ -1368,6 +1343,14 @@ wasm::GenerateStubs(const ModuleEnvironment& env, const FuncImportVector& import
return false;
if (!code->codeRanges.emplaceBack(CodeRange::ImportJitExit, funcIndex, offsets))
return false;
if (!env.isAsmJS()) {
FuncOffsets offsets;
if (!GenerateImportFunction(masm, fi, env.funcSigs[funcIndex]->id, &offsets))
return false;
if (!code->codeRanges.emplaceBack(funcIndex, /* bytecodeOffset = */ 0, offsets))
return false;
}
}
for (const FuncExport& fe : exports) {

Просмотреть файл

@ -28,10 +28,6 @@ extern bool
GenerateBuiltinThunk(jit::MacroAssembler& masm, jit::ABIFunctionType abiType, ExitReason exitReason,
void* funcPtr, CallableOffsets* offsets);
extern bool
GenerateImportFunctions(const ModuleEnvironment& env, const FuncImportVector& imports,
CompiledCode* code);
extern bool
GenerateStubs(const ModuleEnvironment& env, const FuncImportVector& imports,
const FuncExportVector& exports, CompiledCode* code);

Просмотреть файл

@ -2804,7 +2804,8 @@ ParseLimits(WasmParseContext& c, Limits* limits)
if (c.ts.getIf(WasmToken::Index, &token))
maximum.emplace(token.index());
*limits = Limits(initial.index(), maximum);
Limits r = { initial.index(), maximum };
*limits = r;
return true;
}
@ -2871,7 +2872,8 @@ ParseMemory(WasmParseContext& c, WasmToken token, AstModule* module)
return false;
}
if (!module->addMemory(name, Limits(pages, Some(pages))))
Limits memory = { uint32_t(pages), Some(uint32_t(pages)) };
if (!module->addMemory(name, memory))
return false;
if (!c.ts.match(WasmToken::CloseParen, c.error))
@ -3163,7 +3165,8 @@ ParseTable(WasmParseContext& c, WasmToken token, AstModule* module)
if (numElements != elems.length())
return false;
if (!module->addTable(name, Limits(numElements, Some(numElements))))
Limits r = { numElements, Some(numElements) };
if (!module->addTable(name, r))
return false;
auto* zero = new(c.lifo) AstConst(Val(uint32_t(0)));

Просмотреть файл

@ -164,29 +164,6 @@ struct ShareableBase : AtomicRefCounted<T>
// ValType utilities
static inline unsigned
SizeOf(ValType vt)
{
switch (vt) {
case ValType::I32:
case ValType::F32:
return 4;
case ValType::I64:
case ValType::F64:
return 8;
case ValType::I8x16:
case ValType::I16x8:
case ValType::I32x4:
case ValType::F32x4:
case ValType::B8x16:
case ValType::B16x8:
case ValType::B32x4:
return 16;
default:
MOZ_CRASH("Invalid ValType");
}
}
static inline bool
IsSimdType(ValType vt)
{
@ -878,8 +855,7 @@ struct SigWithId : Sig
SigIdDesc id;
SigWithId() = default;
explicit SigWithId(Sig&& sig) : Sig(Move(sig)), id() {}
SigWithId(Sig&& sig, SigIdDesc id) : Sig(Move(sig)), id(id) {}
explicit SigWithId(Sig&& sig, SigIdDesc id) : Sig(Move(sig)), id(id) {}
void operator=(Sig&& rhs) { Sig::operator=(Move(rhs)); }
WASM_DECLARE_SERIALIZABLE(SigWithId)
@ -1352,11 +1328,6 @@ struct Limits
{
uint32_t initial;
Maybe<uint32_t> maximum;
Limits() = default;
explicit Limits(uint32_t initial, const Maybe<uint32_t>& maximum = Nothing())
: initial(initial), maximum(maximum)
{}
};
// TableDesc describes a table as well as the offset of the table's base pointer

Просмотреть файл

@ -44,10 +44,9 @@ struct ModuleEnvironment
CompileMode mode_;
Tier tier_;
// Module fields decoded from the module environment (or initialized while
// validating an asm.js module) and immutable during compilation:
// Module fields filled out incrementally during decoding:
MemoryUsage memoryUsage;
uint32_t minMemoryLength;
Atomic<uint32_t> minMemoryLength;
Maybe<uint32_t> maxMemoryLength;
SigWithIdVector sigs;
SigWithIdPtrVector funcSigs;
@ -58,8 +57,6 @@ struct ModuleEnvironment
ImportVector imports;
ExportVector exports;
Maybe<uint32_t> startFuncIndex;
// Fields decoded as part of the wasm module tail:
ElemSegmentVector elemSegments;
DataSegmentVector dataSegments;
NameInBytecodeVector funcNames;
@ -101,14 +98,24 @@ struct ModuleEnvironment
return sigs.length();
}
size_t numFuncs() const {
// asm.js pre-reserves a bunch of function index space which is
// incrementally filled in during function-body validation. Thus, there
// are a few possible interpretations of numFuncs() (total index space
// size vs. exact number of imports/definitions encountered so far) and
// to simplify things we simply only define this quantity for wasm.
MOZ_ASSERT(!isAsmJS());
return funcSigs.length();
}
size_t numFuncImports() const {
return funcImportGlobalDataOffsets.length();
}
size_t numFuncDefs() const {
// asm.js overallocates the length of funcSigs and in general does not
// know the number of function definitions until it's done compiling.
MOZ_ASSERT(!isAsmJS());
return funcSigs.length() - funcImportGlobalDataOffsets.length();
}
size_t numFuncImports() const {
MOZ_ASSERT(!isAsmJS());
return funcImportGlobalDataOffsets.length();
}
bool usesMemory() const {
return UsesMemory(memoryUsage);
}