Bug 1395587 - Baldr: shrink the ModuleGenerator interface (r=lth)

MozReview-Commit-ID: DdVvMrtpEIl

--HG--
extra : rebase_source : 6768dbd541689345c120991524de62ff89f6d93c
This commit is contained in:
Luke Wagner 2017-09-06 08:30:35 -05:00
Родитель 64db05d9c1
Коммит 4cc7b26afd
13 изменённых файлов: 207 добавлений и 381 удалений

Просмотреть файл

@ -1652,6 +1652,7 @@ class MOZ_STACK_CLASS ModuleValidator
bool simdPresent_;
// State used to build the AsmJSModule in finish():
ModuleEnvironment env_;
ModuleGenerator mg_;
MutableAsmJSMetadata asmJSMetadata_;
@ -1709,7 +1710,8 @@ class MOZ_STACK_CLASS ModuleValidator
}
public:
ModuleValidator(JSContext* cx, AsmJSParser& parser, ParseNode* moduleFunctionNode)
ModuleValidator(JSContext* cx, const CompileArgs& args, AsmJSParser& parser,
ParseNode* moduleFunctionNode)
: cx_(cx),
parser_(parser),
moduleFunctionNode_(moduleFunctionNode),
@ -1730,7 +1732,8 @@ class MOZ_STACK_CLASS ModuleValidator
arrayViews_(cx),
atomicsPresent_(false),
simdPresent_(false),
mg_(nullptr, nullptr),
env_(CompileMode::Once, Tier::Ion, DebugEnabled::False, ModuleKind::AsmJS),
mg_(args, &env_, nullptr, nullptr),
errorString_(nullptr),
errorOffset_(UINT32_MAX),
errorOverRecursed_(false)
@ -1855,35 +1858,17 @@ class MOZ_STACK_CLASS ModuleValidator
if (!dummyFunction_)
return false;
ScriptedCaller scriptedCaller;
if (parser_.ss->filename()) {
scriptedCaller.line = scriptedCaller.column = 0; // unused
scriptedCaller.filename = DuplicateString(parser_.ss->filename());
if (!scriptedCaller.filename)
return false;
}
MutableCompileArgs args = cx_->new_<CompileArgs>();
if (!args || !args->initFromContext(cx_, Move(scriptedCaller)))
return false;
auto env = MakeUnique<ModuleEnvironment>(ModuleKind::AsmJS);
if (!env ||
!env->sigs.resize(AsmJSMaxTypes) ||
!env->funcSigs.resize(AsmJSMaxFuncs) ||
!env->funcImportGlobalDataOffsets.resize(AsmJSMaxImports) ||
!env->tables.resize(AsmJSMaxTables) ||
!env->asmJSSigToTableIndex.resize(AsmJSMaxTypes))
env_.minMemoryLength = RoundUpToNextValidAsmJSHeapLength(0);
if (!env_.sigs.resize(AsmJSMaxTypes) ||
!env_.funcSigs.resize(AsmJSMaxFuncs) ||
!env_.funcImportGlobalDataOffsets.resize(AsmJSMaxImports) ||
!env_.tables.resize(AsmJSMaxTables) ||
!env_.asmJSSigToTableIndex.resize(AsmJSMaxTypes))
{
return false;
}
env->minMemoryLength = RoundUpToNextValidAsmJSHeapLength(0);
if (!mg_.init(Move(env), *args, CompileMode::Once, asmJSMetadata_.get()))
return false;
return true;
return mg_.init(asmJSMetadata_.get());
}
JSContext* cx() const { return cx_; }
@ -2493,7 +2478,7 @@ IsSimdTuple(ModuleValidator& m, ParseNode* pn, SimdType* type)
}
static bool
IsNumericLiteral(ModuleValidator& m, ParseNode* pn, bool* isSimd = nullptr);
IsNumericLiteral(ModuleValidator& m, ParseNode* pn);
static NumLit
ExtractNumericLiteral(ModuleValidator& m, ParseNode* pn);
@ -2544,16 +2529,9 @@ IsSimdLiteral(ModuleValidator& m, ParseNode* pn)
}
static bool
IsNumericLiteral(ModuleValidator& m, ParseNode* pn, bool* isSimd)
IsNumericLiteral(ModuleValidator& m, ParseNode* pn)
{
if (IsNumericNonFloatLiteral(pn) || IsFloatLiteral(m, pn))
return true;
if (IsSimdLiteral(m, pn)) {
if (isSimd)
*isSimd = true;
return true;
}
return false;
return IsNumericNonFloatLiteral(pn) || IsFloatLiteral(m, pn) || IsSimdLiteral(m, pn);
}
// The JS grammar treats -42 as -(42) (i.e., with separate grammar
@ -2963,13 +2941,6 @@ class MOZ_STACK_CLASS FunctionValidator
MOZ_ASSERT(continuableStack_.empty());
MOZ_ASSERT(breakLabels_.empty());
MOZ_ASSERT(continueLabels_.empty());
for (auto iter = locals_.all(); !iter.empty(); iter.popFront()) {
if (iter.front().value().type.isSimd()) {
setUsesSimd();
break;
}
}
return m_.mg().finishFuncDef(funcIndex, &fg_);
}
@ -2989,16 +2960,6 @@ class MOZ_STACK_CLASS FunctionValidator
return m_.failName(pn, fmt, name);
}
/***************************************************** Attributes */
void setUsesSimd() {
fg_.setUsesSimd();
}
void setUsesAtomics() {
fg_.setUsesAtomics();
}
/***************************************************** Local scope setup */
bool addLocal(ParseNode* pn, PropertyName* name, Type type) {
@ -3894,13 +3855,9 @@ IsLiteralOrConst(FunctionValidator& f, ParseNode* pn, NumLit* lit)
return true;
}
bool isSimd = false;
if (!IsNumericLiteral(f.m(), pn, &isSimd))
if (!IsNumericLiteral(f.m(), pn))
return false;
if (isSimd)
f.setUsesSimd();
*lit = ExtractNumericLiteral(f.m(), pn);
return true;
}
@ -4709,8 +4666,6 @@ static bool
CheckAtomicsBuiltinCall(FunctionValidator& f, ParseNode* callNode, AsmJSAtomicsBuiltinFunction func,
Type* type)
{
f.setUsesAtomics();
switch (func) {
case AsmJSAtomicsBuiltin_compareExchange:
return CheckAtomicsCompareExchange(f, callNode, type);
@ -5623,8 +5578,6 @@ static bool
CheckSimdOperationCall(FunctionValidator& f, ParseNode* call, const ModuleValidator::Global* global,
Type* type)
{
f.setUsesSimd();
MOZ_ASSERT(global->isSimdOperation());
SimdType opType = global->simdOperationType();
@ -5719,8 +5672,6 @@ static bool
CheckSimdCtorCall(FunctionValidator& f, ParseNode* call, const ModuleValidator::Global* global,
Type* type)
{
f.setUsesSimd();
MOZ_ASSERT(call->isKind(PNK_CALL));
SimdType simdType = global->simdCtorType();
@ -5858,10 +5809,7 @@ CheckCoercedCall(FunctionValidator& f, ParseNode* call, Type ret, Type* type)
if (!CheckRecursionLimitDontReport(f.cx()))
return f.m().failOverRecursed();
bool isSimd = false;
if (IsNumericLiteral(f.m(), call, &isSimd)) {
if (isSimd)
f.setUsesSimd();
if (IsNumericLiteral(f.m(), call)) {
NumLit lit = ExtractNumericLiteral(f.m(), call);
if (!f.writeConstExpr(lit))
return false;
@ -6411,12 +6359,8 @@ CheckExpr(FunctionValidator& f, ParseNode* expr, Type* type)
if (!CheckRecursionLimitDontReport(f.cx()))
return f.m().failOverRecursed();
bool isSimd = false;
if (IsNumericLiteral(f.m(), expr, &isSimd)) {
if (isSimd)
f.setUsesSimd();
if (IsNumericLiteral(f.m(), expr))
return CheckNumericLiteral(f, expr, type);
}
switch (expr->getKind()) {
case PNK_NAME: return CheckVarRef(f, expr, type);
@ -7404,7 +7348,19 @@ CheckModule(JSContext* cx, AsmJSParser& parser, ParseNode* stmtList, unsigned* t
ParseNode* moduleFunctionNode = parser.pc->functionBox()->functionNode;
MOZ_ASSERT(moduleFunctionNode);
ModuleValidator m(cx, parser, moduleFunctionNode);
ScriptedCaller scriptedCaller;
if (parser.ss->filename()) {
scriptedCaller.line = scriptedCaller.column = 0; // unused
scriptedCaller.filename = DuplicateString(parser.ss->filename());
if (!scriptedCaller.filename)
return nullptr;
}
MutableCompileArgs args = cx->new_<CompileArgs>();
if (!args || !args->initFromContext(cx, Move(scriptedCaller)))
return nullptr;
ModuleValidator m(cx, *args, parser, moduleFunctionNode);
if (!m.init())
return nullptr;

Просмотреть файл

@ -364,6 +364,7 @@ class BaseCompiler
#endif
typedef Vector<NonAssertingLabel, 8, SystemAllocPolicy> LabelVector;
typedef Vector<MIRType, 8, SystemAllocPolicy> MIRTypeVector;
// The strongly typed register wrappers have saved my bacon a few
// times; though they are largely redundant they stay, for now.

Просмотреть файл

@ -378,7 +378,8 @@ class Metadata : public ShareableBase<Metadata>, public MetadataCacheablePod
public:
explicit Metadata(UniqueMetadataTier tier, ModuleKind kind = ModuleKind::Wasm)
: MetadataCacheablePod(kind),
metadata1_(Move(tier))
metadata1_(Move(tier)),
debugEnabled(false)
{}
virtual ~Metadata() {}

Просмотреть файл

@ -60,17 +60,17 @@ DecodeFunctionBody(Decoder& d, ModuleGenerator& mg, uint32_t funcIndex)
}
static bool
DecodeCodeSection(Decoder& d, ModuleGenerator& mg)
DecodeCodeSection(Decoder& d, ModuleGenerator& mg, ModuleEnvironment* env)
{
uint32_t sectionStart, sectionSize;
if (!d.startSection(SectionId::Code, &mg.mutableEnv(), &sectionStart, &sectionSize, "code"))
if (!d.startSection(SectionId::Code, env, &sectionStart, &sectionSize, "code"))
return false;
if (!mg.startFuncDefs())
return false;
if (sectionStart == Decoder::NotStarted) {
if (mg.env().numFuncDefs() != 0)
if (env->numFuncDefs() != 0)
return d.fail("expected function bodies");
return mg.finishFuncDefs();
@ -80,11 +80,11 @@ DecodeCodeSection(Decoder& d, ModuleGenerator& mg)
if (!d.readVarU32(&numFuncDefs))
return d.fail("expected function body count");
if (numFuncDefs != mg.env().numFuncDefs())
if (numFuncDefs != env->numFuncDefs())
return d.fail("function body count does not match function signature count");
for (uint32_t funcDefIndex = 0; funcDefIndex < numFuncDefs; funcDefIndex++) {
if (!DecodeFunctionBody(d, mg, mg.env().numFuncImports() + funcDefIndex))
if (!DecodeFunctionBody(d, mg, env->numFuncImports() + funcDefIndex))
return false;
}
@ -112,107 +112,48 @@ CompileArgs::initFromContext(JSContext* cx, ScriptedCaller&& scriptedCaller)
return assumptions.initBuildIdFromContext(cx);
}
static void
CompilerAvailability(ModuleKind kind, const CompileArgs& args, bool* baselineEnabled,
bool* debugEnabled, bool* ionEnabled)
{
bool baselinePossible = kind == ModuleKind::Wasm && BaselineCanCompile();
*baselineEnabled = baselinePossible && args.baselineEnabled;
*debugEnabled = baselinePossible && args.debugEnabled;
*ionEnabled = args.ionEnabled;
// Default to Ion if necessary: We will never get to this point on platforms
// that don't have Ion at all, so this can happen if the user has disabled
// both compilers or if she has disabled Ion but baseline can't compile the
// code.
if (!(*baselineEnabled || *ionEnabled))
*ionEnabled = true;
}
static bool
BackgroundWorkPossible()
{
return CanUseExtraThreads() && HelperThreadState().cpuCount > 1;
}
bool
wasm::GetDebugEnabled(const CompileArgs& args, ModuleKind kind)
{
bool baselineEnabled, debugEnabled, ionEnabled;
CompilerAvailability(kind, args, &baselineEnabled, &debugEnabled, &ionEnabled);
return debugEnabled;
}
wasm::CompileMode
wasm::GetInitialCompileMode(const CompileArgs& args, ModuleKind kind)
{
bool baselineEnabled, debugEnabled, ionEnabled;
CompilerAvailability(kind, args, &baselineEnabled, &debugEnabled, &ionEnabled);
return BackgroundWorkPossible() && baselineEnabled && ionEnabled && !debugEnabled
? CompileMode::Tier1
: CompileMode::Once;
}
wasm::Tier
wasm::GetTier(const CompileArgs& args, CompileMode compileMode, ModuleKind kind)
{
bool baselineEnabled, debugEnabled, ionEnabled;
CompilerAvailability(kind, args, &baselineEnabled, &debugEnabled, &ionEnabled);
switch (compileMode) {
case CompileMode::Tier1:
MOZ_ASSERT(baselineEnabled);
return Tier::Baseline;
case CompileMode::Tier2:
MOZ_ASSERT(ionEnabled);
return Tier::Ion;
case CompileMode::Once:
return (debugEnabled || !ionEnabled) ? Tier::Baseline : Tier::Ion;
default:
MOZ_CRASH("Bad mode");
}
}
static bool
Compile(ModuleGenerator& mg, const ShareableBytes& bytecode, const CompileArgs& args,
UniqueChars* error, CompileMode compileMode)
{
MOZ_RELEASE_ASSERT(wasm::HaveSignalHandlers());
auto env = js::MakeUnique<ModuleEnvironment>();
if (!env)
return false;
Decoder d(bytecode.bytes, error);
if (!DecodeModuleEnvironment(d, env.get()))
return false;
if (!mg.init(Move(env), args, compileMode))
return false;
if (!DecodeCodeSection(d, mg))
return false;
if (!DecodeModuleTail(d, &mg.mutableEnv()))
return false;
MOZ_ASSERT(!*error, "unreported error");
return true;
}
SharedModule
wasm::CompileInitialTier(const ShareableBytes& bytecode, const CompileArgs& args, UniqueChars* error)
{
ModuleGenerator mg(error, nullptr);
MOZ_RELEASE_ASSERT(wasm::HaveSignalHandlers());
CompileMode mode = GetInitialCompileMode(args);
if (!Compile(mg, bytecode, args, error, mode))
bool baselineEnabled = BaselineCanCompile() && args.baselineEnabled;
bool debugEnabled = BaselineCanCompile() && args.debugEnabled;
bool ionEnabled = args.ionEnabled || !baselineEnabled;
CompileMode mode;
Tier tier;
DebugEnabled debug;
if (BackgroundWorkPossible() && baselineEnabled && ionEnabled && !debugEnabled) {
mode = CompileMode::Tier1;
tier = Tier::Baseline;
debug = DebugEnabled::False;
} else {
mode = CompileMode::Once;
tier = debugEnabled || !ionEnabled ? Tier::Baseline : Tier::Ion;
debug = debugEnabled ? DebugEnabled::True : DebugEnabled::False;
}
ModuleEnvironment env(mode, tier, debug);
Decoder d(bytecode.bytes, error);
if (!DecodeModuleEnvironment(d, &env))
return nullptr;
ModuleGenerator mg(args, &env, nullptr, error);
if (!mg.init())
return nullptr;
if (!DecodeCodeSection(d, mg, &env))
return nullptr;
if (!DecodeModuleTail(d, &env))
return nullptr;
return mg.finishModule(bytecode);
@ -221,10 +162,23 @@ wasm::CompileInitialTier(const ShareableBytes& bytecode, const CompileArgs& args
bool
wasm::CompileTier2(Module& module, const CompileArgs& args, Atomic<bool>* cancelled)
{
UniqueChars error;
ModuleGenerator mg(&error, cancelled);
MOZ_RELEASE_ASSERT(wasm::HaveSignalHandlers());
if (!Compile(mg, module.bytecode(), args, &error, CompileMode::Tier2))
UniqueChars error;
Decoder d(module.bytecode().bytes, &error);
ModuleEnvironment env(CompileMode::Tier2, Tier::Ion, DebugEnabled::False);
if (!DecodeModuleEnvironment(d, &env))
return false;
ModuleGenerator mg(args, &env, cancelled, &error);
if (!mg.init())
return false;
if (!DecodeCodeSection(d, mg, &env))
return false;
if (!DecodeModuleTail(d, &env))
return false;
return mg.finishTier2(module);

Просмотреть файл

@ -75,31 +75,6 @@ CompileInitialTier(const ShareableBytes& bytecode, const CompileArgs& args, Uniq
bool
CompileTier2(Module& module, const CompileArgs& args, Atomic<bool>* cancelled);
// Select whether debugging is available based on the available compilers, the
// configuration options, and the nature of the module. Note debugging can be
// unavailable even if selected, if Rabaldr is unavailable or the module is not
// compilable by Rabaldr.
bool
GetDebugEnabled(const CompileArgs& args, ModuleKind kind = ModuleKind::Wasm);
// Select the mode for the initial compilation of a module. The mode is "Tier1"
// precisely if both compilers are available, we're not debugging, and it is
// possible to compile in the background, and in that case, we'll compile twice,
// with the mode set to "Tier2" during the second (background) compilation.
// Otherwise, the tier is "Once" and we'll compile once, with the appropriate
// compiler.
CompileMode
GetInitialCompileMode(const CompileArgs& args, ModuleKind kind = ModuleKind::Wasm);
// Select the tier for a compilation. The tier is Tier::Baseline if we're
// debugging, if Baldr is not available, or if both compilers are are available
// and the compileMode is Tier1; otherwise the tier is Tier::Ion.
Tier
GetTier(const CompileArgs& args, CompileMode compileMode, ModuleKind kind = ModuleKind::Wasm);
} // namespace wasm
} // namespace js

Просмотреть файл

@ -45,11 +45,12 @@ static const unsigned GENERATOR_LIFO_DEFAULT_CHUNK_SIZE = 4 * 1024;
static const unsigned COMPILATION_LIFO_DEFAULT_CHUNK_SIZE = 64 * 1024;
static const uint32_t BAD_CODE_RANGE = UINT32_MAX;
ModuleGenerator::ModuleGenerator(UniqueChars* error, mozilla::Atomic<bool>* cancelled)
: compileMode_(CompileMode(-1)),
tier_(Tier(-1)),
ModuleGenerator::ModuleGenerator(const CompileArgs& args, ModuleEnvironment* env,
Atomic<bool>* cancelled, UniqueChars* error)
: compileArgs_(&args),
error_(error),
cancelled_(cancelled),
env_(env),
linkDataTier_(nullptr),
metadataTier_(nullptr),
numSigs_(0),
@ -78,17 +79,17 @@ ModuleGenerator::~ModuleGenerator()
if (outstanding_) {
AutoLockHelperThreadState lock;
while (true) {
CompileTaskPtrVector& worklist = HelperThreadState().wasmWorklist(lock, compileMode_);
CompileTaskPtrVector& worklist = HelperThreadState().wasmWorklist(lock, mode());
MOZ_ASSERT(outstanding_ >= worklist.length());
outstanding_ -= worklist.length();
worklist.clear();
CompileTaskPtrVector& finished = HelperThreadState().wasmFinishedList(lock, compileMode_);
CompileTaskPtrVector& finished = HelperThreadState().wasmFinishedList(lock, mode());
MOZ_ASSERT(outstanding_ >= finished.length());
outstanding_ -= finished.length();
finished.clear();
uint32_t numFailed = HelperThreadState().harvestFailedWasmJobs(lock, compileMode_);
uint32_t numFailed = HelperThreadState().harvestFailedWasmJobs(lock, mode());
MOZ_ASSERT(outstanding_ >= numFailed);
outstanding_ -= numFailed;
@ -99,8 +100,8 @@ ModuleGenerator::~ModuleGenerator()
}
}
MOZ_ASSERT(HelperThreadState().wasmCompilationInProgress(compileMode_));
HelperThreadState().wasmCompilationInProgress(compileMode_) = false;
MOZ_ASSERT(HelperThreadState().wasmCompilationInProgress(mode()));
HelperThreadState().wasmCompilationInProgress(mode()) = false;
} else {
MOZ_ASSERT(!outstanding_);
}
@ -121,12 +122,6 @@ ModuleGenerator::initAsmJS(Metadata* asmJSMetadata)
metadata_ = asmJSMetadata;
MOZ_ASSERT(isAsmJS());
// Enabling debugging requires baseline and baseline is only enabled for
// wasm (since the baseline does not currently support Atomics or SIMD).
metadata_->debugEnabled = false;
tier_ = Tier::Ion;
// For asm.js, the Vectors in ModuleEnvironment are max-sized reservations
// and will be initialized in a linear order via init* functions as the
// module is generated.
@ -139,13 +134,11 @@ ModuleGenerator::initAsmJS(Metadata* asmJSMetadata)
}
bool
ModuleGenerator::initWasm(const CompileArgs& args)
ModuleGenerator::initWasm()
{
MOZ_ASSERT(!env_->isAsmJS());
tier_ = GetTier(args, compileMode_);
auto metadataTier = js::MakeUnique<MetadataTier>(tier_);
auto metadataTier = js::MakeUnique<MetadataTier>(tier());
if (!metadataTier)
return false;
@ -153,16 +146,14 @@ ModuleGenerator::initWasm(const CompileArgs& args)
if (!metadata_)
return false;
metadataTier_ = &metadata_->metadata(tier_);
metadataTier_ = &metadata_->metadata(tier());
if (!linkData_.initTier1(tier_, *metadata_))
if (!linkData_.initTier1(tier(), *metadata_))
return false;
linkDataTier_ = &linkData_.linkData(tier_);
linkDataTier_ = &linkData_.linkData(tier());
MOZ_ASSERT(!isAsmJS());
metadata_->debugEnabled = GetDebugEnabled(args);
// For wasm, the Vectors are correctly-sized and already initialized.
numSigs_ = env_->sigs.length();
@ -220,43 +211,26 @@ ModuleGenerator::initWasm(const CompileArgs& args)
return false;
}
if (metadata_->debugEnabled) {
if (!debugFuncArgTypes_.resize(env_->funcSigs.length()))
return false;
if (!debugFuncReturnTypes_.resize(env_->funcSigs.length()))
return false;
for (size_t i = 0; i < debugFuncArgTypes_.length(); i++) {
if (!debugFuncArgTypes_[i].appendAll(env_->funcSigs[i]->args()))
return false;
debugFuncReturnTypes_[i] = env_->funcSigs[i]->ret();
}
}
return true;
}
bool
ModuleGenerator::init(UniqueModuleEnvironment env, const CompileArgs& args,
CompileMode compileMode, Metadata* maybeAsmJSMetadata)
ModuleGenerator::init(Metadata* maybeAsmJSMetadata)
{
compileArgs_ = &args;
compileMode_ = compileMode;
env_ = Move(env);
if (!funcToCodeRange_.appendN(BAD_CODE_RANGE, env_->funcSigs.length()))
return false;
if (!assumptions_.clone(args.assumptions))
if (!assumptions_.clone(compileArgs_->assumptions))
return false;
if (!exportedFuncs_.init())
return false;
if (env_->isAsmJS() ? !initAsmJS(maybeAsmJSMetadata) : !initWasm(args))
if (env_->isAsmJS() ? !initAsmJS(maybeAsmJSMetadata) : !initWasm())
return false;
if (args.scriptedCaller.filename) {
metadata_->filename = DuplicateString(args.scriptedCaller.filename.get());
if (compileArgs_->scriptedCaller.filename) {
metadata_->filename = DuplicateString(compileArgs_->scriptedCaller.filename.get());
if (!metadata_->filename)
return false;
}
@ -264,14 +238,6 @@ ModuleGenerator::init(UniqueModuleEnvironment env, const CompileArgs& args,
return true;
}
ModuleEnvironment&
ModuleGenerator::mutableEnv()
{
// Mutation is not safe during parallel compilation.
MOZ_ASSERT(!startedFuncDefs_ || finishedFuncDefs_);
return *env_;
}
bool
ModuleGenerator::finishOutstandingTask()
{
@ -283,17 +249,17 @@ ModuleGenerator::finishOutstandingTask()
while (true) {
MOZ_ASSERT(outstanding_ > 0);
if (HelperThreadState().wasmFailed(lock, compileMode_)) {
if (HelperThreadState().wasmFailed(lock, mode())) {
if (error_) {
MOZ_ASSERT(!*error_, "Should have stopped earlier");
*error_ = Move(HelperThreadState().harvestWasmError(lock, compileMode_));
*error_ = Move(HelperThreadState().harvestWasmError(lock, mode()));
}
return false;
}
if (!HelperThreadState().wasmFinishedList(lock, compileMode_).empty()) {
if (!HelperThreadState().wasmFinishedList(lock, mode()).empty()) {
outstanding_--;
task = HelperThreadState().wasmFinishedList(lock, compileMode_).popCopy();
task = HelperThreadState().wasmFinishedList(lock, mode()).popCopy();
break;
}
@ -888,14 +854,14 @@ ModuleGenerator::startFuncDefs()
uint32_t numTasks;
if (CanUseExtraThreads() &&
threads.cpuCount > 1 &&
threads.wasmCompilationInProgress(compileMode_).compareExchange(false, true))
threads.wasmCompilationInProgress(mode()).compareExchange(false, true))
{
#ifdef DEBUG
{
AutoLockHelperThreadState lock;
MOZ_ASSERT(!HelperThreadState().wasmFailed(lock, compileMode_));
MOZ_ASSERT(HelperThreadState().wasmWorklist(lock, compileMode_).empty());
MOZ_ASSERT(HelperThreadState().wasmFinishedList(lock, compileMode_).empty());
MOZ_ASSERT(!HelperThreadState().wasmFailed(lock, mode()));
MOZ_ASSERT(HelperThreadState().wasmWorklist(lock, mode()).empty());
MOZ_ASSERT(HelperThreadState().wasmFinishedList(lock, mode()).empty());
}
#endif
parallel_ = true;
@ -906,12 +872,8 @@ ModuleGenerator::startFuncDefs()
if (!tasks_.initCapacity(numTasks))
return false;
for (size_t i = 0; i < numTasks; i++) {
tasks_.infallibleEmplaceBack(*env_,
tier_,
compileMode_,
COMPILATION_LIFO_DEFAULT_CHUNK_SIZE);
}
for (size_t i = 0; i < numTasks; i++)
tasks_.infallibleEmplaceBack(*env_, COMPILATION_LIFO_DEFAULT_CHUNK_SIZE);
if (!freeTasks_.reserve(numTasks))
return false;
@ -959,13 +921,11 @@ ModuleGenerator::launchBatchCompile()
if (cancelled_ && *cancelled_)
return false;
currentTask_->setDebugEnabled(metadata_->debugEnabled);
size_t numBatchedFuncs = currentTask_->units().length();
MOZ_ASSERT(numBatchedFuncs);
if (parallel_) {
if (!StartOffThreadWasmCompile(currentTask_, compileMode_))
if (!StartOffThreadWasmCompile(currentTask_, mode()))
return false;
outstanding_++;
} else {
@ -986,7 +946,7 @@ bool
ModuleGenerator::finishFuncDef(uint32_t funcIndex, FunctionGenerator* fg)
{
MOZ_ASSERT(activeFuncDef_ == fg);
MOZ_ASSERT_IF(compileMode_ == CompileMode::Tier1, funcIndex < env_->numFuncs());
MOZ_ASSERT_IF(mode() == CompileMode::Tier1, funcIndex < env_->numFuncs());
UniqueFuncBytes func = Move(fg->funcBytes_);
func->setFunc(funcIndex, &funcSig(funcIndex));
@ -995,7 +955,7 @@ ModuleGenerator::finishFuncDef(uint32_t funcIndex, FunctionGenerator* fg)
return false;
uint32_t threshold;
switch (tier_) {
switch (tier()) {
case Tier::Baseline: threshold = JitOptions.wasmBatchBaselineThreshold; break;
case Tier::Ion: threshold = JitOptions.wasmBatchIonThreshold; break;
default: MOZ_CRASH("Invalid tier value"); break;
@ -1072,7 +1032,7 @@ ModuleGenerator::finishFuncDefs()
// that all functions have been compiled.
for (ElemSegment& elems : env_->elemSegments) {
Uint32Vector& codeRangeIndices = elems.elemCodeRangeIndices(tier_);
Uint32Vector& codeRangeIndices = elems.elemCodeRangeIndices(tier());
MOZ_ASSERT(codeRangeIndices.empty());
if (!codeRangeIndices.reserve(elems.elemFuncIndices.length()))
@ -1121,7 +1081,7 @@ ModuleGenerator::initSigTableElems(uint32_t sigIndex, Uint32Vector&& elemFuncInd
if (!env_->elemSegments.emplaceBack(tableIndex, offset, Move(elemFuncIndices)))
return false;
env_->elemSegments.back().elemCodeRangeIndices(tier_) = Move(codeRangeIndices);
env_->elemSegments.back().elemCodeRangeIndices(tier()) = Move(codeRangeIndices);
return true;
}
@ -1158,11 +1118,21 @@ ModuleGenerator::finishMetadata(const ShareableBytes& bytecode)
metadata_->funcNames = Move(env_->funcNames);
metadata_->customSections = Move(env_->customSections);
// Additional debug information to copy.
metadata_->debugFuncArgTypes = Move(debugFuncArgTypes_);
metadata_->debugFuncReturnTypes = Move(debugFuncReturnTypes_);
if (metadata_->debugEnabled)
// Copy over additional debug information.
if (env_->debugEnabled()) {
metadata_->debugEnabled = true;
const size_t numSigs = env_->funcSigs.length();
if (!metadata_->debugFuncArgTypes.resize(numSigs))
return false;
if (!metadata_->debugFuncReturnTypes.resize(numSigs))
return false;
for (size_t i = 0; i < numSigs; i++) {
if (!metadata_->debugFuncArgTypes[i].appendAll(env_->funcSigs[i]->args()))
return false;
metadata_->debugFuncReturnTypes[i] = env_->funcSigs[i]->ret();
}
metadataTier_->debugFuncToCodeRange = Move(funcToCodeRange_);
}
// These Vectors can get large and the excess capacity can be significant,
// so realloc them down to size.
@ -1218,13 +1188,13 @@ ModuleGenerator::finishCodeSegment(const ShareableBytes& bytecode)
if (!finishLinkData())
return nullptr;
return CodeSegment::create(tier_, masm_, bytecode, *linkDataTier_, *metadata_);
return CodeSegment::create(tier(), masm_, bytecode, *linkDataTier_, *metadata_);
}
UniqueJumpTable
ModuleGenerator::createJumpTable(const CodeSegment& codeSegment)
{
MOZ_ASSERT(compileMode_ == CompileMode::Tier1);
MOZ_ASSERT(mode() == CompileMode::Tier1);
MOZ_ASSERT(!isAsmJS());
uint32_t tableSize = env_->numFuncImports() + env_->numFuncDefs();
@ -1244,22 +1214,22 @@ ModuleGenerator::createJumpTable(const CodeSegment& codeSegment)
SharedModule
ModuleGenerator::finishModule(const ShareableBytes& bytecode)
{
MOZ_ASSERT(compileMode_ == CompileMode::Once || compileMode_ == CompileMode::Tier1);
MOZ_ASSERT(mode() == CompileMode::Once || mode() == CompileMode::Tier1);
UniqueConstCodeSegment codeSegment = finishCodeSegment(bytecode);
if (!codeSegment)
return nullptr;
UniqueJumpTable maybeJumpTable;
if (compileMode_ == CompileMode::Tier1) {
if (mode() == CompileMode::Tier1) {
maybeJumpTable = createJumpTable(*codeSegment);
if (!maybeJumpTable)
return nullptr;
}
UniqueConstBytes maybeDebuggingBytes;
if (metadata_->debugEnabled) {
MOZ_ASSERT(compileMode_ == CompileMode::Once);
if (env_->debugEnabled()) {
MOZ_ASSERT(mode() == CompileMode::Once);
Bytes bytes;
if (!bytes.resize(masm_.bytesNeeded()))
return nullptr;
@ -1285,7 +1255,7 @@ ModuleGenerator::finishModule(const ShareableBytes& bytecode)
if (!module)
return nullptr;
if (compileMode_ == CompileMode::Tier1)
if (mode() == CompileMode::Tier1)
module->startTier2(*compileArgs_);
return module;
@ -1294,9 +1264,9 @@ ModuleGenerator::finishModule(const ShareableBytes& bytecode)
bool
ModuleGenerator::finishTier2(Module& module)
{
MOZ_ASSERT(compileMode_ == CompileMode::Tier2);
MOZ_ASSERT(tier_ == Tier::Ion);
MOZ_ASSERT(!metadata_->debugEnabled);
MOZ_ASSERT(mode() == CompileMode::Tier2);
MOZ_ASSERT(tier() == Tier::Ion);
MOZ_ASSERT(!env_->debugEnabled());
if (cancelled_ && *cancelled_)
return false;
@ -1305,10 +1275,10 @@ ModuleGenerator::finishTier2(Module& module)
if (!codeSegment)
return false;
module.finishTier2(linkData_.takeLinkData(tier_),
metadata_->takeMetadata(tier_),
module.finishTier2(linkData_.takeLinkData(tier()),
metadata_->takeMetadata(tier()),
Move(codeSegment),
Move(env_));
env_);
return true;
}

Просмотреть файл

@ -27,14 +27,8 @@
namespace js {
namespace wasm {
struct ModuleEnvironment;
typedef Vector<jit::MIRType, 8, SystemAllocPolicy> MIRTypeVector;
typedef jit::ABIArgIter<MIRTypeVector> ABIArgMIRTypeIter;
typedef jit::ABIArgIter<ValTypeVector> ABIArgValTypeIter;
struct CompileArgs;
struct ModuleEnvironment;
class FunctionGenerator;
// The FuncBytes class represents a single, concurrently-compilable function.
@ -132,13 +126,10 @@ typedef Vector<FuncCompileUnit, 8, SystemAllocPolicy> FuncCompileUnitVector;
class CompileTask
{
const ModuleEnvironment& env_;
Tier tier_;
CompileMode mode_;
LifoAlloc lifo_;
Maybe<jit::TempAllocator> alloc_;
Maybe<jit::MacroAssembler> masm_;
FuncCompileUnitVector units_;
bool debugEnabled_;
CompileTask(const CompileTask&) = delete;
CompileTask& operator=(const CompileTask&) = delete;
@ -146,14 +137,11 @@ class CompileTask
void init() {
alloc_.emplace(&lifo_);
masm_.emplace(jit::MacroAssembler::WasmToken(), *alloc_);
debugEnabled_ = false;
}
public:
CompileTask(const ModuleEnvironment& env, Tier tier, CompileMode mode, size_t defaultChunkSize)
CompileTask(const ModuleEnvironment& env, size_t defaultChunkSize)
: env_(env),
tier_(tier),
mode_(mode),
lifo_(defaultChunkSize)
{
init();
@ -174,16 +162,13 @@ class CompileTask
return units_;
}
Tier tier() const {
return tier_;
return env_.tier;
}
CompileMode mode() const {
return mode_;
return env_.mode;
}
bool debugEnabled() const {
return debugEnabled_;
}
void setDebugEnabled(bool enabled) {
debugEnabled_ = enabled;
return env_.debug == DebugEnabled::True;
}
bool reset(UniqueFuncBytesVector* freeFuncBytes) {
for (FuncCompileUnit& unit : units_) {
@ -217,11 +202,10 @@ class MOZ_STACK_CLASS ModuleGenerator
typedef EnumeratedArray<Trap, Trap::Limit, CallableOffsets> TrapExitOffsetArray;
// Constant parameters
SharedCompileArgs compileArgs_;
CompileMode compileMode_;
Tier tier_;
UniqueChars* error_;
Atomic<bool>* cancelled_;
SharedCompileArgs const compileArgs_;
UniqueChars* const error_;
Atomic<bool>* const cancelled_;
ModuleEnvironment* const env_;
// Data that is moved into the result of finish()
Assumptions assumptions_;
@ -232,7 +216,6 @@ class MOZ_STACK_CLASS ModuleGenerator
UniqueJumpTable jumpTable_;
// Data scoped to the ModuleGenerator's lifetime
UniqueModuleEnvironment env_;
uint32_t numSigs_;
uint32_t numTables_;
LifoAlloc lifo_;
@ -244,8 +227,6 @@ class MOZ_STACK_CLASS ModuleGenerator
uint32_t lastPatchedCallsite_;
uint32_t startOfUnpatchedCallsites_;
Uint32Vector debugTrapFarJumps_;
FuncArgTypesVector debugFuncArgTypes_;
FuncReturnTypesVector debugFuncReturnTypes_;
// Parallel compilation
bool parallel_;
@ -283,40 +264,19 @@ class MOZ_STACK_CLASS ModuleGenerator
MOZ_MUST_USE bool launchBatchCompile();
MOZ_MUST_USE bool initAsmJS(Metadata* asmJSMetadata);
MOZ_MUST_USE bool initWasm(const CompileArgs& args);
MOZ_MUST_USE bool initWasm();
bool isAsmJS() const { return env_->isAsmJS(); }
Tier tier() const { return env_->tier; }
CompileMode mode() const { return env_->mode; }
bool debugEnabled() const { return env_->debugEnabled(); }
public:
explicit ModuleGenerator(UniqueChars* error, Atomic<bool>* cancelled);
ModuleGenerator(const CompileArgs& args, ModuleEnvironment* env,
Atomic<bool>* cancelled, UniqueChars* error);
~ModuleGenerator();
MOZ_MUST_USE bool init(UniqueModuleEnvironment env, const CompileArgs& args,
CompileMode compileMode = CompileMode::Once,
Metadata* maybeAsmJSMetadata = nullptr);
const ModuleEnvironment& env() const { return *env_; }
ModuleEnvironment& mutableEnv();
bool isAsmJS() const { return metadata_->kind == ModuleKind::AsmJS; }
CompileMode mode() const { return compileMode_; }
Tier tier() const { return tier_; }
jit::MacroAssembler& masm() { return masm_; }
// Memory:
bool usesMemory() const { return env_->usesMemory(); }
uint32_t minMemoryLength() const { return env_->minMemoryLength; }
// Tables:
uint32_t numTables() const { return numTables_; }
const TableDescVector& tables() const { return env_->tables; }
// Signatures:
uint32_t numSigs() const { return numSigs_; }
const SigWithId& sig(uint32_t sigIndex) const;
const SigWithId& funcSig(uint32_t funcIndex) const;
const SigWithIdPtrVector& funcSigs() const { return env_->funcSigs; }
// Globals:
const GlobalDescVector& globals() const { return env_->globals; }
MOZ_MUST_USE bool init(Metadata* maybeAsmJSMetadata = nullptr);
// Function definitions:
MOZ_MUST_USE bool startFuncDefs();
@ -324,6 +284,12 @@ class MOZ_STACK_CLASS ModuleGenerator
MOZ_MUST_USE bool finishFuncDef(uint32_t funcIndex, FunctionGenerator* fg);
MOZ_MUST_USE bool finishFuncDefs();
// asm.js accessors:
uint32_t minMemoryLength() const { return env_->minMemoryLength; }
uint32_t numSigs() const { return numSigs_; }
const SigWithId& sig(uint32_t sigIndex) const;
const SigWithId& funcSig(uint32_t funcIndex) const;
// asm.js lazy initialization:
void initSig(uint32_t sigIndex, Sig&& sig);
void initFuncSig(uint32_t funcIndex, uint32_t sigIndex);
@ -355,34 +321,13 @@ class MOZ_STACK_CLASS FunctionGenerator
friend class ModuleGenerator;
ModuleGenerator* m_;
bool usesSimd_;
bool usesAtomics_;
UniqueFuncBytes funcBytes_;
public:
FunctionGenerator()
: m_(nullptr), usesSimd_(false), usesAtomics_(false), funcBytes_(nullptr)
: m_(nullptr), funcBytes_(nullptr)
{}
bool usesSimd() const {
return usesSimd_;
}
void setUsesSimd() {
usesSimd_ = true;
}
bool usesAtomics() const {
return usesAtomics_;
}
void setUsesAtomics() {
usesAtomics_ = true;
}
bool isAsmJS() const {
return m_->isAsmJS();
}
Bytes& bytes() {
return funcBytes_->bytes();
}

Просмотреть файл

@ -191,7 +191,7 @@ class FunctionCompiler
if (!newBlock(/* prev */ nullptr, &curBlock_))
return false;
for (ABIArgValTypeIter i(args); !i.done(); i++) {
for (ABIArgIter<ValTypeVector> i(args); !i.done(); i++) {
MWasmParameter* ins = MWasmParameter::New(alloc(), *i, i.mirType());
curBlock_->add(ins);
curBlock_->initSlot(info().localSlot(i.index()), ins);

Просмотреть файл

@ -305,7 +305,7 @@ Module::notifyCompilationListeners()
void
Module::finishTier2(UniqueLinkDataTier linkData2, UniqueMetadataTier metadata2,
UniqueConstCodeSegment code2, UniqueModuleEnvironment env2)
UniqueConstCodeSegment code2, ModuleEnvironment* env2)
{
// Install the data in the data structures. They will not be visible yet.

Просмотреть файл

@ -226,7 +226,7 @@ class Module : public JS::WasmModule
void startTier2(const CompileArgs& args);
void finishTier2(UniqueLinkDataTier linkData2, UniqueMetadataTier metadata2,
UniqueConstCodeSegment code2, UniqueModuleEnvironment env2);
UniqueConstCodeSegment code2, ModuleEnvironment* env2);
// Wait until Ion-compiled code is available, which will be true either
// immediately (first-level compile was Ion and is already done), not at all

Просмотреть файл

@ -31,6 +31,10 @@ using namespace js::wasm;
using mozilla::ArrayLength;
typedef Vector<jit::MIRType, 8, SystemAllocPolicy> MIRTypeVector;
typedef jit::ABIArgIter<MIRTypeVector> ABIArgMIRTypeIter;
typedef jit::ABIArgIter<ValTypeVector> ABIArgValTypeIter;
static void
FinishOffsets(MacroAssembler& masm, Offsets* offsets)
{

Просмотреть файл

@ -361,7 +361,7 @@ enum class Tier
Baseline,
Debug = Baseline,
Ion,
Serialized = Ion,
Serialized = Ion
};
// The CompileMode controls how compilation of a module is performed (notably,
@ -374,6 +374,14 @@ enum class CompileMode
Tier2
};
// Typed enum for whether debugging is enabled.
enum class DebugEnabled
{
False,
True
};
// Iterator over tiers present in a tiered data structure.
class Tiers

Просмотреть файл

@ -35,11 +35,16 @@ namespace wasm {
struct ModuleEnvironment
{
ModuleKind kind;
// Constant parameters for the entire compilation:
const CompileMode mode;
const Tier tier;
const DebugEnabled debug;
const ModuleKind kind;
// Module fields filled out incrementally during decoding:
MemoryUsage memoryUsage;
Atomic<uint32_t> minMemoryLength;
Maybe<uint32_t> maxMemoryLength;
SigWithIdVector sigs;
SigWithIdPtrVector funcSigs;
Uint32Vector funcImportGlobalDataOffsets;
@ -54,8 +59,14 @@ struct ModuleEnvironment
NameInBytecodeVector funcNames;
CustomSectionVector customSections;
explicit ModuleEnvironment(ModuleKind kind = ModuleKind::Wasm)
: kind(kind),
explicit ModuleEnvironment(CompileMode mode = CompileMode::Once,
Tier tier = Tier::Ion,
DebugEnabled debug = DebugEnabled::False,
ModuleKind kind = ModuleKind::Wasm)
: mode(mode),
tier(tier),
debug(debug),
kind(kind),
memoryUsage(MemoryUsage::None),
minMemoryLength(0)
{}
@ -91,6 +102,9 @@ struct ModuleEnvironment
bool isAsmJS() const {
return kind == ModuleKind::AsmJS;
}
bool debugEnabled() const {
return debug == DebugEnabled::True;
}
bool funcIsImport(uint32_t funcIndex) const {
return funcIndex < funcImportGlobalDataOffsets.length();
}
@ -99,8 +113,6 @@ struct ModuleEnvironment
}
};
typedef UniquePtr<ModuleEnvironment> UniqueModuleEnvironment;
// The Encoder class appends bytes to the Bytes object it is given during
// construction. The client is responsible for the Bytes's lifetime and must
// keep the Bytes alive as long as the Encoder is used.