зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1276028 - Baldr: split out CodeSegment and Metadata from Module (r=bbouvier)
MozReview-Commit-ID: AnITPZYpgp1 --HG-- extra : rebase_source : d3164565407ff3ddcb7da967ba32319f318c4f5d
This commit is contained in:
Родитель
57323900e3
Коммит
e0962c91f1
|
@ -296,19 +296,28 @@ class AsmJSExport
|
||||||
|
|
||||||
typedef Vector<AsmJSExport, 0, SystemAllocPolicy> AsmJSExportVector;
|
typedef Vector<AsmJSExport, 0, SystemAllocPolicy> AsmJSExportVector;
|
||||||
|
|
||||||
// Holds the trivially-memcpy()able, serializable portion of AsmJSModuleData.
|
enum class CacheResult
|
||||||
struct AsmJSModuleCacheablePod
|
{
|
||||||
|
Hit,
|
||||||
|
Miss
|
||||||
|
};
|
||||||
|
|
||||||
|
// Holds the immutable guts of an AsmJSModule.
|
||||||
|
//
|
||||||
|
// AsmJSMetadata is built incrementally by ModuleValidator and then shared
|
||||||
|
// immutably between AsmJSModules.
|
||||||
|
|
||||||
|
struct AsmJSMetadataCacheablePod
|
||||||
{
|
{
|
||||||
uint32_t minHeapLength;
|
uint32_t minHeapLength;
|
||||||
uint32_t numFFIs;
|
uint32_t numFFIs;
|
||||||
uint32_t srcLength;
|
uint32_t srcLength;
|
||||||
uint32_t srcLengthWithRightBrace;
|
uint32_t srcLengthWithRightBrace;
|
||||||
|
|
||||||
|
AsmJSMetadataCacheablePod() { PodZero(this); }
|
||||||
};
|
};
|
||||||
|
|
||||||
// Holds the immutable guts of an AsmJSModule. This struct is mutably built up
|
struct AsmJSMetadata : RefCounted<AsmJSMetadata>, AsmJSMetadataCacheablePod
|
||||||
// by ModuleValidator and then handed over to the AsmJSModule constructor in
|
|
||||||
// finish().
|
|
||||||
struct AsmJSModuleData : AsmJSModuleCacheablePod
|
|
||||||
{
|
{
|
||||||
AsmJSGlobalVector globals;
|
AsmJSGlobalVector globals;
|
||||||
AsmJSImportVector imports;
|
AsmJSImportVector imports;
|
||||||
|
@ -317,6 +326,8 @@ struct AsmJSModuleData : AsmJSModuleCacheablePod
|
||||||
PropertyName* importArgumentName;
|
PropertyName* importArgumentName;
|
||||||
PropertyName* bufferArgumentName;
|
PropertyName* bufferArgumentName;
|
||||||
|
|
||||||
|
CacheResult cacheResult;
|
||||||
|
|
||||||
// These values are not serialized since they are relative to the
|
// These values are not serialized since they are relative to the
|
||||||
// containing script which can be different between serialization and
|
// containing script which can be different between serialization and
|
||||||
// deserialization contexts. Thus, they must be set explicitly using the
|
// deserialization contexts. Thus, they must be set explicitly using the
|
||||||
|
@ -327,19 +338,18 @@ struct AsmJSModuleData : AsmJSModuleCacheablePod
|
||||||
bool strict;
|
bool strict;
|
||||||
ScriptSourceHolder scriptSource;
|
ScriptSourceHolder scriptSource;
|
||||||
|
|
||||||
AsmJSModuleData()
|
AsmJSMetadata()
|
||||||
: globalArgumentName(nullptr),
|
: globalArgumentName(nullptr),
|
||||||
importArgumentName(nullptr),
|
importArgumentName(nullptr),
|
||||||
bufferArgumentName(nullptr),
|
bufferArgumentName(nullptr),
|
||||||
|
cacheResult(CacheResult::Miss),
|
||||||
srcStart(0),
|
srcStart(0),
|
||||||
srcBodyStart(0),
|
srcBodyStart(0),
|
||||||
strict(false)
|
strict(false)
|
||||||
{
|
{}
|
||||||
PodZero(&pod());
|
|
||||||
}
|
|
||||||
|
|
||||||
AsmJSModuleCacheablePod& pod() { return *this; }
|
AsmJSMetadataCacheablePod& pod() { return *this; }
|
||||||
const AsmJSModuleCacheablePod& pod() const { return *this; }
|
const AsmJSMetadataCacheablePod& pod() const { return *this; }
|
||||||
|
|
||||||
void trace(JSTracer* trc) const {
|
void trace(JSTracer* trc) const {
|
||||||
for (const AsmJSGlobal& global : globals)
|
for (const AsmJSGlobal& global : globals)
|
||||||
|
@ -349,42 +359,41 @@ struct AsmJSModuleData : AsmJSModuleCacheablePod
|
||||||
TraceNameField(trc, &bufferArgumentName, "asm.js buffer argument name");
|
TraceNameField(trc, &bufferArgumentName, "asm.js buffer argument name");
|
||||||
}
|
}
|
||||||
|
|
||||||
WASM_DECLARE_SERIALIZABLE(AsmJSModuleData)
|
WASM_DECLARE_SERIALIZABLE(AsmJSMetadata)
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef UniquePtr<AsmJSModuleData> UniqueAsmJSModuleData;
|
typedef RefPtr<AsmJSMetadata> MutableAsmJSMetadata;
|
||||||
|
typedef RefPtr<const AsmJSMetadata> SharedAsmJSMetadata;
|
||||||
|
|
||||||
// An AsmJSModule is-a Module with the extra persistent state necessary to
|
// An AsmJSModule is-a Module with the extra persistent state necessary to
|
||||||
// represent a compiled asm.js module.
|
// represent a compiled asm.js module.
|
||||||
class js::AsmJSModule final : public Module
|
class js::AsmJSModule final : public Module
|
||||||
{
|
{
|
||||||
typedef UniquePtr<const AsmJSModuleData> UniqueConstAsmJSModuleData;
|
const SharedStaticLinkData staticLinkData_;
|
||||||
typedef UniquePtr<const StaticLinkData> UniqueConstStaticLinkData;
|
const SharedExportMap exportMap_;
|
||||||
|
const SharedAsmJSMetadata asmJSMetadata_;
|
||||||
const UniqueConstStaticLinkData link_;
|
|
||||||
const UniqueExportMap exportMap_;
|
|
||||||
const UniqueConstAsmJSModuleData module_;
|
|
||||||
|
|
||||||
public:
|
public:
|
||||||
AsmJSModule(UniqueModuleData base,
|
AsmJSModule(UniqueCodeSegment code,
|
||||||
UniqueStaticLinkData link,
|
const Metadata& metadata,
|
||||||
UniqueExportMap exportMap,
|
const StaticLinkData& staticLinkData,
|
||||||
UniqueAsmJSModuleData module)
|
const ExportMap& exportMap,
|
||||||
: Module(Move(base)),
|
const AsmJSMetadata& asmJSMetadata)
|
||||||
link_(Move(link)),
|
: Module(Move(code), metadata),
|
||||||
exportMap_(Move(exportMap)),
|
staticLinkData_(&staticLinkData),
|
||||||
module_(Move(module))
|
exportMap_(&exportMap),
|
||||||
|
asmJSMetadata_(&asmJSMetadata)
|
||||||
{}
|
{}
|
||||||
|
|
||||||
virtual void trace(JSTracer* trc) override {
|
virtual void trace(JSTracer* trc) override {
|
||||||
Module::trace(trc);
|
Module::trace(trc);
|
||||||
module_->trace(trc);
|
asmJSMetadata_->trace(trc);
|
||||||
}
|
}
|
||||||
virtual void addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code, size_t* data) override {
|
virtual void addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code, size_t* data) override {
|
||||||
Module::addSizeOfMisc(mallocSizeOf, code, data);
|
Module::addSizeOfMisc(mallocSizeOf, code, data);
|
||||||
*data += mallocSizeOf(link_.get()) + link_->sizeOfExcludingThis(mallocSizeOf);
|
*data += mallocSizeOf(staticLinkData_.get()) + staticLinkData_->sizeOfExcludingThis(mallocSizeOf);
|
||||||
*data += mallocSizeOf(exportMap_.get()) + exportMap_->sizeOfExcludingThis(mallocSizeOf);
|
*data += mallocSizeOf(exportMap_.get()) + exportMap_->sizeOfExcludingThis(mallocSizeOf);
|
||||||
*data += mallocSizeOf(module_.get()) + module_->sizeOfExcludingThis(mallocSizeOf);
|
*data += mallocSizeOf(asmJSMetadata_.get()) + asmJSMetadata_->sizeOfExcludingThis(mallocSizeOf);
|
||||||
}
|
}
|
||||||
virtual bool mutedErrors() const override {
|
virtual bool mutedErrors() const override {
|
||||||
return scriptSource()->mutedErrors();
|
return scriptSource()->mutedErrors();
|
||||||
|
@ -396,16 +405,17 @@ class js::AsmJSModule final : public Module
|
||||||
return scriptSource();
|
return scriptSource();
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t minHeapLength() const { return module_->minHeapLength; }
|
uint32_t minHeapLength() const { return asmJSMetadata_->minHeapLength; }
|
||||||
uint32_t numFFIs() const { return module_->numFFIs; }
|
uint32_t numFFIs() const { return asmJSMetadata_->numFFIs; }
|
||||||
bool strict() const { return module_->strict; }
|
bool strict() const { return asmJSMetadata_->strict; }
|
||||||
ScriptSource* scriptSource() const { return module_->scriptSource.get(); }
|
ScriptSource* scriptSource() const { return asmJSMetadata_->scriptSource.get(); }
|
||||||
const AsmJSGlobalVector& asmJSGlobals() const { return module_->globals; }
|
const AsmJSGlobalVector& asmJSGlobals() const { return asmJSMetadata_->globals; }
|
||||||
const AsmJSImportVector& asmJSImports() const { return module_->imports; }
|
const AsmJSImportVector& asmJSImports() const { return asmJSMetadata_->imports; }
|
||||||
const AsmJSExportVector& asmJSExports() const { return module_->exports; }
|
const AsmJSExportVector& asmJSExports() const { return asmJSMetadata_->exports; }
|
||||||
PropertyName* globalArgumentName() const { return module_->globalArgumentName; }
|
PropertyName* globalArgumentName() const { return asmJSMetadata_->globalArgumentName; }
|
||||||
PropertyName* importArgumentName() const { return module_->importArgumentName; }
|
PropertyName* importArgumentName() const { return asmJSMetadata_->importArgumentName; }
|
||||||
PropertyName* bufferArgumentName() const { return module_->bufferArgumentName; }
|
PropertyName* bufferArgumentName() const { return asmJSMetadata_->bufferArgumentName; }
|
||||||
|
bool loadedFromCache() const { return asmJSMetadata_->cacheResult == CacheResult::Hit; }
|
||||||
|
|
||||||
// srcStart() refers to the offset in the ScriptSource to the beginning of
|
// srcStart() refers to the offset in the ScriptSource to the beginning of
|
||||||
// the asm.js module function. If the function has been created with the
|
// the asm.js module function. If the function has been created with the
|
||||||
|
@ -413,23 +423,23 @@ class js::AsmJSModule final : public Module
|
||||||
// source. Otherwise, it will be the opening parenthesis of the arguments
|
// source. Otherwise, it will be the opening parenthesis of the arguments
|
||||||
// list.
|
// list.
|
||||||
uint32_t srcStart() const {
|
uint32_t srcStart() const {
|
||||||
return module_->srcStart;
|
return asmJSMetadata_->srcStart;
|
||||||
}
|
}
|
||||||
uint32_t srcEndBeforeCurly() const {
|
uint32_t srcEndBeforeCurly() const {
|
||||||
return module_->srcStart + module_->srcLength;
|
return asmJSMetadata_->srcStart + asmJSMetadata_->srcLength;
|
||||||
}
|
}
|
||||||
uint32_t srcEndAfterCurly() const {
|
uint32_t srcEndAfterCurly() const {
|
||||||
return module_->srcStart + module_->srcLengthWithRightBrace;
|
return asmJSMetadata_->srcStart + asmJSMetadata_->srcLengthWithRightBrace;
|
||||||
}
|
}
|
||||||
|
|
||||||
// srcBodyStart() refers to the offset in the ScriptSource to the end
|
// srcBodyStart() refers to the offset in the ScriptSource to the end
|
||||||
// of the 'use asm' string-literal token.
|
// of the 'use asm' string-literal token.
|
||||||
uint32_t srcBodyStart() const {
|
uint32_t srcBodyStart() const {
|
||||||
return module_->srcBodyStart;
|
return asmJSMetadata_->srcBodyStart;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool staticallyLink(ExclusiveContext* cx) {
|
bool staticallyLink(ExclusiveContext* cx) {
|
||||||
return Module::staticallyLink(cx, *link_);
|
return Module::staticallyLink(cx, *staticLinkData_);
|
||||||
}
|
}
|
||||||
bool dynamicallyLink(JSContext* cx,
|
bool dynamicallyLink(JSContext* cx,
|
||||||
Handle<WasmModuleObject*> moduleObj,
|
Handle<WasmModuleObject*> moduleObj,
|
||||||
|
@ -1685,7 +1695,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
|
|
||||||
// State used to build the AsmJSModule in finish():
|
// State used to build the AsmJSModule in finish():
|
||||||
ModuleGenerator mg_;
|
ModuleGenerator mg_;
|
||||||
UniqueAsmJSModuleData module_;
|
MutableAsmJSMetadata asmJSMetadata_;
|
||||||
|
|
||||||
// Error reporting:
|
// Error reporting:
|
||||||
UniqueChars errorString_;
|
UniqueChars errorString_;
|
||||||
|
@ -1776,15 +1786,15 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
}
|
}
|
||||||
|
|
||||||
bool init() {
|
bool init() {
|
||||||
module_ = cx_->make_unique<AsmJSModuleData>();
|
asmJSMetadata_ = cx_->new_<AsmJSMetadata>();
|
||||||
if (!module_)
|
if (!asmJSMetadata_)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
module_->minHeapLength = RoundUpToNextValidAsmJSHeapLength(0);
|
asmJSMetadata_->minHeapLength = RoundUpToNextValidAsmJSHeapLength(0);
|
||||||
module_->srcStart = moduleFunctionNode_->pn_body->pn_pos.begin;
|
asmJSMetadata_->srcStart = moduleFunctionNode_->pn_body->pn_pos.begin;
|
||||||
module_->srcBodyStart = parser_.tokenStream.currentToken().pos.end;
|
asmJSMetadata_->srcBodyStart = parser_.tokenStream.currentToken().pos.end;
|
||||||
module_->strict = parser_.pc->sc->strict() && !parser_.pc->sc->hasExplicitUseStrict();
|
asmJSMetadata_->strict = parser_.pc->sc->strict() && !parser_.pc->sc->hasExplicitUseStrict();
|
||||||
module_->scriptSource.reset(parser_.ss);
|
asmJSMetadata_->scriptSource.reset(parser_.ss);
|
||||||
|
|
||||||
if (!globalMap_.init() || !sigMap_.init() || !importMap_.init())
|
if (!globalMap_.init() || !sigMap_.init() || !importMap_.init())
|
||||||
return false;
|
return false;
|
||||||
|
@ -1871,23 +1881,23 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
if (!mg_.init(Move(genData), Move(filename)))
|
if (!mg_.init(Move(genData), Move(filename)))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
mg_.bumpMinHeapLength(module_->minHeapLength);
|
mg_.bumpMinHeapLength(asmJSMetadata_->minHeapLength);
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
ExclusiveContext* cx() const { return cx_; }
|
ExclusiveContext* cx() const { return cx_; }
|
||||||
PropertyName* moduleFunctionName() const { return moduleFunctionName_; }
|
PropertyName* moduleFunctionName() const { return moduleFunctionName_; }
|
||||||
PropertyName* globalArgumentName() const { return module_->globalArgumentName; }
|
PropertyName* globalArgumentName() const { return asmJSMetadata_->globalArgumentName; }
|
||||||
PropertyName* importArgumentName() const { return module_->importArgumentName; }
|
PropertyName* importArgumentName() const { return asmJSMetadata_->importArgumentName; }
|
||||||
PropertyName* bufferArgumentName() const { return module_->bufferArgumentName; }
|
PropertyName* bufferArgumentName() const { return asmJSMetadata_->bufferArgumentName; }
|
||||||
ModuleGenerator& mg() { return mg_; }
|
ModuleGenerator& mg() { return mg_; }
|
||||||
AsmJSParser& parser() const { return parser_; }
|
AsmJSParser& parser() const { return parser_; }
|
||||||
TokenStream& tokenStream() const { return parser_.tokenStream; }
|
TokenStream& tokenStream() const { return parser_.tokenStream; }
|
||||||
RootedFunction& dummyFunction() { return dummyFunction_; }
|
RootedFunction& dummyFunction() { return dummyFunction_; }
|
||||||
bool supportsSimd() const { return cx_->jitSupportsSimd(); }
|
bool supportsSimd() const { return cx_->jitSupportsSimd(); }
|
||||||
bool atomicsPresent() const { return atomicsPresent_; }
|
bool atomicsPresent() const { return atomicsPresent_; }
|
||||||
uint32_t minHeapLength() const { return module_->minHeapLength; }
|
uint32_t minHeapLength() const { return asmJSMetadata_->minHeapLength; }
|
||||||
|
|
||||||
void initModuleFunctionName(PropertyName* name) {
|
void initModuleFunctionName(PropertyName* name) {
|
||||||
MOZ_ASSERT(!moduleFunctionName_);
|
MOZ_ASSERT(!moduleFunctionName_);
|
||||||
|
@ -1895,15 +1905,15 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
}
|
}
|
||||||
void initGlobalArgumentName(PropertyName* n) {
|
void initGlobalArgumentName(PropertyName* n) {
|
||||||
MOZ_ASSERT(n->isTenured());
|
MOZ_ASSERT(n->isTenured());
|
||||||
module_->globalArgumentName = n;
|
asmJSMetadata_->globalArgumentName = n;
|
||||||
}
|
}
|
||||||
void initImportArgumentName(PropertyName* n) {
|
void initImportArgumentName(PropertyName* n) {
|
||||||
MOZ_ASSERT(n->isTenured());
|
MOZ_ASSERT(n->isTenured());
|
||||||
module_->importArgumentName = n;
|
asmJSMetadata_->importArgumentName = n;
|
||||||
}
|
}
|
||||||
void initBufferArgumentName(PropertyName* n) {
|
void initBufferArgumentName(PropertyName* n) {
|
||||||
MOZ_ASSERT(n->isTenured());
|
MOZ_ASSERT(n->isTenured());
|
||||||
module_->bufferArgumentName = n;
|
asmJSMetadata_->bufferArgumentName = n;
|
||||||
}
|
}
|
||||||
bool addGlobalVarInit(PropertyName* var, const NumLit& lit, Type type, bool isConst)
|
bool addGlobalVarInit(PropertyName* var, const NumLit& lit, Type type, bool isConst)
|
||||||
{
|
{
|
||||||
|
@ -1929,7 +1939,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
g.pod.u.var.initKind_ = AsmJSGlobal::InitConstant;
|
g.pod.u.var.initKind_ = AsmJSGlobal::InitConstant;
|
||||||
g.pod.u.var.u.val_ = lit.value();
|
g.pod.u.var.u.val_ = lit.value();
|
||||||
g.pod.u.var.globalDataOffset_ = mg_.global(index).globalDataOffset;
|
g.pod.u.var.globalDataOffset_ = mg_.global(index).globalDataOffset;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
bool addGlobalVarImport(PropertyName* var, PropertyName* field, Type type, bool isConst) {
|
bool addGlobalVarImport(PropertyName* var, PropertyName* field, Type type, bool isConst) {
|
||||||
MOZ_ASSERT(type.isGlobalVarType());
|
MOZ_ASSERT(type.isGlobalVarType());
|
||||||
|
@ -1952,7 +1962,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
g.pod.u.var.initKind_ = AsmJSGlobal::InitImport;
|
g.pod.u.var.initKind_ = AsmJSGlobal::InitImport;
|
||||||
g.pod.u.var.u.importType_ = valType;
|
g.pod.u.var.u.importType_ = valType;
|
||||||
g.pod.u.var.globalDataOffset_ = mg_.global(index).globalDataOffset;
|
g.pod.u.var.globalDataOffset_ = mg_.global(index).globalDataOffset;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
bool addArrayView(PropertyName* var, Scalar::Type vt, PropertyName* maybeField) {
|
bool addArrayView(PropertyName* var, Scalar::Type vt, PropertyName* maybeField) {
|
||||||
if (!arrayViews_.append(ArrayView(var, vt)))
|
if (!arrayViews_.append(ArrayView(var, vt)))
|
||||||
|
@ -1967,7 +1977,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
|
|
||||||
AsmJSGlobal g(AsmJSGlobal::ArrayView, maybeField);
|
AsmJSGlobal g(AsmJSGlobal::ArrayView, maybeField);
|
||||||
g.pod.u.viewType_ = vt;
|
g.pod.u.viewType_ = vt;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
bool addMathBuiltinFunction(PropertyName* var, AsmJSMathBuiltinFunction func,
|
bool addMathBuiltinFunction(PropertyName* var, AsmJSMathBuiltinFunction func,
|
||||||
PropertyName* field)
|
PropertyName* field)
|
||||||
|
@ -1981,7 +1991,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
|
|
||||||
AsmJSGlobal g(AsmJSGlobal::MathBuiltinFunction, field);
|
AsmJSGlobal g(AsmJSGlobal::MathBuiltinFunction, field);
|
||||||
g.pod.u.mathBuiltinFunc_ = func;
|
g.pod.u.mathBuiltinFunc_ = func;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
private:
|
private:
|
||||||
bool addGlobalDoubleConstant(PropertyName* var, double constant) {
|
bool addGlobalDoubleConstant(PropertyName* var, double constant) {
|
||||||
|
@ -2000,7 +2010,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
AsmJSGlobal g(AsmJSGlobal::Constant, field);
|
AsmJSGlobal g(AsmJSGlobal::Constant, field);
|
||||||
g.pod.u.constant.value_ = constant;
|
g.pod.u.constant.value_ = constant;
|
||||||
g.pod.u.constant.kind_ = AsmJSGlobal::MathConstant;
|
g.pod.u.constant.kind_ = AsmJSGlobal::MathConstant;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
bool addGlobalConstant(PropertyName* var, double constant, PropertyName* field) {
|
bool addGlobalConstant(PropertyName* var, double constant, PropertyName* field) {
|
||||||
if (!addGlobalDoubleConstant(var, constant))
|
if (!addGlobalDoubleConstant(var, constant))
|
||||||
|
@ -2009,7 +2019,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
AsmJSGlobal g(AsmJSGlobal::Constant, field);
|
AsmJSGlobal g(AsmJSGlobal::Constant, field);
|
||||||
g.pod.u.constant.value_ = constant;
|
g.pod.u.constant.value_ = constant;
|
||||||
g.pod.u.constant.kind_ = AsmJSGlobal::GlobalConstant;
|
g.pod.u.constant.kind_ = AsmJSGlobal::GlobalConstant;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
bool addAtomicsBuiltinFunction(PropertyName* var, AsmJSAtomicsBuiltinFunction func,
|
bool addAtomicsBuiltinFunction(PropertyName* var, AsmJSAtomicsBuiltinFunction func,
|
||||||
PropertyName* field)
|
PropertyName* field)
|
||||||
|
@ -2025,7 +2035,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
|
|
||||||
AsmJSGlobal g(AsmJSGlobal::AtomicsBuiltinFunction, field);
|
AsmJSGlobal g(AsmJSGlobal::AtomicsBuiltinFunction, field);
|
||||||
g.pod.u.atomicsBuiltinFunc_ = func;
|
g.pod.u.atomicsBuiltinFunc_ = func;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
bool addSimdCtor(PropertyName* var, SimdType type, PropertyName* field) {
|
bool addSimdCtor(PropertyName* var, SimdType type, PropertyName* field) {
|
||||||
Global* global = validationLifo_.new_<Global>(Global::SimdCtor);
|
Global* global = validationLifo_.new_<Global>(Global::SimdCtor);
|
||||||
|
@ -2037,7 +2047,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
|
|
||||||
AsmJSGlobal g(AsmJSGlobal::SimdCtor, field);
|
AsmJSGlobal g(AsmJSGlobal::SimdCtor, field);
|
||||||
g.pod.u.simdCtorType_ = type;
|
g.pod.u.simdCtorType_ = type;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
bool addSimdOperation(PropertyName* var, SimdType type, SimdOperation op, PropertyName* opName)
|
bool addSimdOperation(PropertyName* var, SimdType type, SimdOperation op, PropertyName* opName)
|
||||||
{
|
{
|
||||||
|
@ -2052,7 +2062,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
AsmJSGlobal g(AsmJSGlobal::SimdOp, opName);
|
AsmJSGlobal g(AsmJSGlobal::SimdOp, opName);
|
||||||
g.pod.u.simdOp.type_ = type;
|
g.pod.u.simdOp.type_ = type;
|
||||||
g.pod.u.simdOp.which_ = op;
|
g.pod.u.simdOp.which_ = op;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
bool addArrayViewCtor(PropertyName* var, Scalar::Type vt, PropertyName* field) {
|
bool addArrayViewCtor(PropertyName* var, Scalar::Type vt, PropertyName* field) {
|
||||||
Global* global = validationLifo_.new_<Global>(Global::ArrayViewCtor);
|
Global* global = validationLifo_.new_<Global>(Global::ArrayViewCtor);
|
||||||
|
@ -2064,12 +2074,12 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
|
|
||||||
AsmJSGlobal g(AsmJSGlobal::ArrayViewCtor, field);
|
AsmJSGlobal g(AsmJSGlobal::ArrayViewCtor, field);
|
||||||
g.pod.u.viewType_ = vt;
|
g.pod.u.viewType_ = vt;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
bool addFFI(PropertyName* var, PropertyName* field) {
|
bool addFFI(PropertyName* var, PropertyName* field) {
|
||||||
if (module_->numFFIs == UINT32_MAX)
|
if (asmJSMetadata_->numFFIs == UINT32_MAX)
|
||||||
return false;
|
return false;
|
||||||
uint32_t ffiIndex = module_->numFFIs++;
|
uint32_t ffiIndex = asmJSMetadata_->numFFIs++;
|
||||||
|
|
||||||
Global* global = validationLifo_.new_<Global>(Global::FFI);
|
Global* global = validationLifo_.new_<Global>(Global::FFI);
|
||||||
if (!global)
|
if (!global)
|
||||||
|
@ -2080,7 +2090,7 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
|
|
||||||
AsmJSGlobal g(AsmJSGlobal::FFI, field);
|
AsmJSGlobal g(AsmJSGlobal::FFI, field);
|
||||||
g.pod.u.ffiIndex_ = ffiIndex;
|
g.pod.u.ffiIndex_ = ffiIndex;
|
||||||
return module_->globals.append(g);
|
return asmJSMetadata_->globals.append(g);
|
||||||
}
|
}
|
||||||
bool addExportField(ParseNode* pn, const Func& func, PropertyName* maybeFieldName) {
|
bool addExportField(ParseNode* pn, const Func& func, PropertyName* maybeFieldName) {
|
||||||
// Record the field name of this export.
|
// Record the field name of this export.
|
||||||
|
@ -2100,10 +2110,10 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
|
|
||||||
// The exported function might have already been exported in which case
|
// The exported function might have already been exported in which case
|
||||||
// the index will refer into the range of AsmJSExports.
|
// the index will refer into the range of AsmJSExports.
|
||||||
MOZ_ASSERT(exportIndex <= module_->exports.length());
|
MOZ_ASSERT(exportIndex <= asmJSMetadata_->exports.length());
|
||||||
return exportIndex < module_->exports.length() ||
|
return exportIndex < asmJSMetadata_->exports.length() ||
|
||||||
module_->exports.emplaceBack(func.srcBegin() - module_->srcStart,
|
asmJSMetadata_->exports.emplaceBack(func.srcBegin() - asmJSMetadata_->srcStart,
|
||||||
func.srcEnd() - module_->srcStart);
|
func.srcEnd() - asmJSMetadata_->srcStart);
|
||||||
}
|
}
|
||||||
bool addFunction(PropertyName* name, uint32_t firstUse, Sig&& sig, Func** func) {
|
bool addFunction(PropertyName* name, uint32_t firstUse, Sig&& sig, Func** func) {
|
||||||
uint32_t sigIndex;
|
uint32_t sigIndex;
|
||||||
|
@ -2155,10 +2165,10 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
*importIndex = p->value();
|
*importIndex = p->value();
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
*importIndex = module_->imports.length();
|
*importIndex = asmJSMetadata_->imports.length();
|
||||||
if (*importIndex >= MaxImports)
|
if (*importIndex >= MaxImports)
|
||||||
return failCurrentOffset("too many imports");
|
return failCurrentOffset("too many imports");
|
||||||
if (!module_->imports.emplaceBack(ffiIndex))
|
if (!asmJSMetadata_->imports.emplaceBack(ffiIndex))
|
||||||
return false;
|
return false;
|
||||||
uint32_t sigIndex;
|
uint32_t sigIndex;
|
||||||
if (!declareSig(Move(sig), &sigIndex))
|
if (!declareSig(Move(sig), &sigIndex))
|
||||||
|
@ -2174,8 +2184,8 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
if (len > uint64_t(INT32_MAX) + 1)
|
if (len > uint64_t(INT32_MAX) + 1)
|
||||||
return false;
|
return false;
|
||||||
len = RoundUpToNextValidAsmJSHeapLength(len);
|
len = RoundUpToNextValidAsmJSHeapLength(len);
|
||||||
if (len > module_->minHeapLength) {
|
if (len > asmJSMetadata_->minHeapLength) {
|
||||||
module_->minHeapLength = len;
|
asmJSMetadata_->minHeapLength = len;
|
||||||
mg_.bumpMinHeapLength(len);
|
mg_.bumpMinHeapLength(len);
|
||||||
}
|
}
|
||||||
return true;
|
return true;
|
||||||
|
@ -2320,24 +2330,31 @@ class MOZ_STACK_CLASS ModuleValidator
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t endBeforeCurly = tokenStream().currentToken().pos.end;
|
uint32_t endBeforeCurly = tokenStream().currentToken().pos.end;
|
||||||
module_->srcLength = endBeforeCurly - module_->srcStart;
|
asmJSMetadata_->srcLength = endBeforeCurly - asmJSMetadata_->srcStart;
|
||||||
|
|
||||||
TokenPos pos;
|
TokenPos pos;
|
||||||
JS_ALWAYS_TRUE(tokenStream().peekTokenPos(&pos, TokenStream::Operand));
|
JS_ALWAYS_TRUE(tokenStream().peekTokenPos(&pos, TokenStream::Operand));
|
||||||
uint32_t endAfterCurly = pos.end;
|
uint32_t endAfterCurly = pos.end;
|
||||||
module_->srcLengthWithRightBrace = endAfterCurly - module_->srcStart;
|
asmJSMetadata_->srcLengthWithRightBrace = endAfterCurly - asmJSMetadata_->srcStart;
|
||||||
|
|
||||||
UniqueModuleData base;
|
UniqueCodeSegment code;
|
||||||
UniqueStaticLinkData link;
|
SharedMetadata metadata;
|
||||||
UniqueExportMap exportMap;
|
SharedStaticLinkData staticLinkData;
|
||||||
if (!mg_.finish(Move(funcNames), &base, &link, &exportMap, slowFuncs))
|
SharedExportMap exportMap;
|
||||||
|
if (!mg_.finish(Move(funcNames), &code, &metadata, &staticLinkData, &exportMap, slowFuncs))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
moduleObj.set(WasmModuleObject::create(cx_));
|
moduleObj.set(WasmModuleObject::create(cx_));
|
||||||
if (!moduleObj)
|
if (!moduleObj)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return moduleObj->init(js_new<AsmJSModule>(Move(base), Move(link), Move(exportMap), Move(module_)));
|
auto* module = js_new<AsmJSModule>(Move(code), *metadata, *staticLinkData, *exportMap,
|
||||||
|
*asmJSMetadata_);
|
||||||
|
if (!module)
|
||||||
|
return false;
|
||||||
|
|
||||||
|
moduleObj->init(*module);
|
||||||
|
return true;
|
||||||
}
|
}
|
||||||
};
|
};
|
||||||
|
|
||||||
|
@ -8049,15 +8066,8 @@ AsmJSGlobal::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
||||||
return cursor;
|
return cursor;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
|
||||||
AsmJSGlobal::clone(JSContext* cx, AsmJSGlobal* out) const
|
|
||||||
{
|
|
||||||
*out = *this;
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
AsmJSModuleData::serializedSize() const
|
AsmJSMetadata::serializedSize() const
|
||||||
{
|
{
|
||||||
return sizeof(pod()) +
|
return sizeof(pod()) +
|
||||||
SerializedVectorSize(globals) +
|
SerializedVectorSize(globals) +
|
||||||
|
@ -8069,7 +8079,7 @@ AsmJSModuleData::serializedSize() const
|
||||||
}
|
}
|
||||||
|
|
||||||
uint8_t*
|
uint8_t*
|
||||||
AsmJSModuleData::serialize(uint8_t* cursor) const
|
AsmJSMetadata::serialize(uint8_t* cursor) const
|
||||||
{
|
{
|
||||||
cursor = WriteBytes(cursor, &pod(), sizeof(pod()));
|
cursor = WriteBytes(cursor, &pod(), sizeof(pod()));
|
||||||
cursor = SerializeVector(cursor, globals);
|
cursor = SerializeVector(cursor, globals);
|
||||||
|
@ -8082,7 +8092,7 @@ AsmJSModuleData::serialize(uint8_t* cursor) const
|
||||||
}
|
}
|
||||||
|
|
||||||
const uint8_t*
|
const uint8_t*
|
||||||
AsmJSModuleData::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
AsmJSMetadata::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
||||||
{
|
{
|
||||||
(cursor = ReadBytes(cursor, &pod(), sizeof(pod()))) &&
|
(cursor = ReadBytes(cursor, &pod(), sizeof(pod()))) &&
|
||||||
(cursor = DeserializeVector(cx, cursor, &globals)) &&
|
(cursor = DeserializeVector(cx, cursor, &globals)) &&
|
||||||
|
@ -8091,27 +8101,12 @@ AsmJSModuleData::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
||||||
(cursor = DeserializeName(cx, cursor, &globalArgumentName)) &&
|
(cursor = DeserializeName(cx, cursor, &globalArgumentName)) &&
|
||||||
(cursor = DeserializeName(cx, cursor, &importArgumentName)) &&
|
(cursor = DeserializeName(cx, cursor, &importArgumentName)) &&
|
||||||
(cursor = DeserializeName(cx, cursor, &bufferArgumentName));
|
(cursor = DeserializeName(cx, cursor, &bufferArgumentName));
|
||||||
|
cacheResult = CacheResult::Hit;
|
||||||
return cursor;
|
return cursor;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
|
||||||
AsmJSModuleData::clone(JSContext* cx, AsmJSModuleData* out) const
|
|
||||||
{
|
|
||||||
out->pod() = pod();
|
|
||||||
out->globalArgumentName = globalArgumentName;
|
|
||||||
out->importArgumentName = importArgumentName;
|
|
||||||
out->bufferArgumentName = bufferArgumentName;
|
|
||||||
out->srcStart = srcStart;
|
|
||||||
out->srcBodyStart = srcBodyStart;
|
|
||||||
out->strict = strict;
|
|
||||||
out->scriptSource.reset(scriptSource.get());
|
|
||||||
return CloneVector(cx, globals, &out->globals) &&
|
|
||||||
ClonePodVector(cx, imports, &out->imports) &&
|
|
||||||
ClonePodVector(cx, exports, &out->exports);
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
AsmJSModuleData::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
AsmJSMetadata::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
{
|
{
|
||||||
return globals.sizeOfExcludingThis(mallocSizeOf) +
|
return globals.sizeOfExcludingThis(mallocSizeOf) +
|
||||||
imports.sizeOfExcludingThis(mallocSizeOf) +
|
imports.sizeOfExcludingThis(mallocSizeOf) +
|
||||||
|
@ -8121,19 +8116,21 @@ AsmJSModuleData::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
size_t
|
size_t
|
||||||
AsmJSModule::serializedSize() const
|
AsmJSModule::serializedSize() const
|
||||||
{
|
{
|
||||||
return base().serializedSize() +
|
return codeSegment().serializedSize() +
|
||||||
link_->serializedSize() +
|
metadata().serializedSize() +
|
||||||
|
staticLinkData_->serializedSize() +
|
||||||
exportMap_->serializedSize() +
|
exportMap_->serializedSize() +
|
||||||
module_->serializedSize();
|
asmJSMetadata_->serializedSize();
|
||||||
}
|
}
|
||||||
|
|
||||||
uint8_t*
|
uint8_t*
|
||||||
AsmJSModule::serialize(uint8_t* cursor) const
|
AsmJSModule::serialize(uint8_t* cursor) const
|
||||||
{
|
{
|
||||||
cursor = base().serialize(cursor);
|
cursor = codeSegment().serialize(cursor);
|
||||||
cursor = link_->serialize(cursor);
|
cursor = metadata().serialize(cursor);
|
||||||
|
cursor = staticLinkData_->serialize(cursor);
|
||||||
cursor = exportMap_->serialize(cursor);
|
cursor = exportMap_->serialize(cursor);
|
||||||
cursor = module_->serialize(cursor);
|
cursor = asmJSMetadata_->serialize(cursor);
|
||||||
return cursor;
|
return cursor;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8149,46 +8146,53 @@ AsmJSModule::deserialize(ExclusiveContext* cx, const uint8_t* cursor, AsmJSParse
|
||||||
// Vectors so, for simplicity, inhibit GC of the atoms zone.
|
// Vectors so, for simplicity, inhibit GC of the atoms zone.
|
||||||
AutoKeepAtoms aka(cx->perThreadData);
|
AutoKeepAtoms aka(cx->perThreadData);
|
||||||
|
|
||||||
UniqueModuleData base = cx->make_unique<ModuleData>();
|
UniqueCodeSegment code = MakeUnique<CodeSegment>();
|
||||||
if (!base)
|
if (!code)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
cursor = base->deserialize(cx, cursor);
|
cursor = code->deserialize(cx, cursor);
|
||||||
if (!cursor)
|
if (!cursor)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
MOZ_ASSERT(!base->loadedFromCache);
|
MutableMetadata metadata = js_new<Metadata>();
|
||||||
base->loadedFromCache = true;
|
if (!metadata)
|
||||||
|
|
||||||
UniqueStaticLinkData link = cx->make_unique<StaticLinkData>();
|
|
||||||
if (!link)
|
|
||||||
return nullptr;
|
return nullptr;
|
||||||
cursor = link->deserialize(cx, cursor);
|
cursor = metadata->deserialize(cx, cursor);
|
||||||
if (!cursor)
|
if (!cursor)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
UniqueExportMap exportMap = cx->make_unique<ExportMap>();
|
MutableStaticLinkData staticLinkData = cx->new_<StaticLinkData>();
|
||||||
|
if (!staticLinkData)
|
||||||
|
return nullptr;
|
||||||
|
cursor = staticLinkData->deserialize(cx, cursor);
|
||||||
|
if (!cursor)
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
|
MutableExportMap exportMap = cx->new_<ExportMap>();
|
||||||
if (!exportMap)
|
if (!exportMap)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
cursor = exportMap->deserialize(cx, cursor);
|
cursor = exportMap->deserialize(cx, cursor);
|
||||||
if (!cursor)
|
if (!cursor)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
UniqueAsmJSModuleData module = cx->make_unique<AsmJSModuleData>();
|
MutableAsmJSMetadata asmJSMetadata = cx->new_<AsmJSMetadata>();
|
||||||
if (!module)
|
if (!asmJSMetadata)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
cursor = module->deserialize(cx, cursor);
|
cursor = asmJSMetadata->deserialize(cx, cursor);
|
||||||
if (!cursor)
|
if (!cursor)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
// See AsmJSModuleData comment as well as ModuleValidator::init().
|
// See AsmJSMetadata comment as well as ModuleValidator::init().
|
||||||
module->srcStart = parser.pc->maybeFunction->pn_body->pn_pos.begin;
|
asmJSMetadata->srcStart = parser.pc->maybeFunction->pn_body->pn_pos.begin;
|
||||||
module->srcBodyStart = parser.tokenStream.currentToken().pos.end;
|
asmJSMetadata->srcBodyStart = parser.tokenStream.currentToken().pos.end;
|
||||||
module->strict = parser.pc->sc->strict() && !parser.pc->sc->hasExplicitUseStrict();
|
asmJSMetadata->strict = parser.pc->sc->strict() && !parser.pc->sc->hasExplicitUseStrict();
|
||||||
module->scriptSource.reset(parser.ss);
|
asmJSMetadata->scriptSource.reset(parser.ss);
|
||||||
|
|
||||||
if (!moduleObj->init(js_new<AsmJSModule>(Move(base), Move(link), Move(exportMap), Move(module))))
|
auto* module = js_new<AsmJSModule>(Move(code), *metadata, *staticLinkData, *exportMap,
|
||||||
|
*asmJSMetadata);
|
||||||
|
if (!module)
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
|
moduleObj->init(*module);
|
||||||
return cursor;
|
return cursor;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -8202,26 +8206,18 @@ AsmJSModule::clone(JSContext* cx, MutableHandle<WasmModuleObject*> moduleObj) co
|
||||||
// Prevent any GC that may move the temporarily-unrooted atoms being cloned.
|
// Prevent any GC that may move the temporarily-unrooted atoms being cloned.
|
||||||
AutoKeepAtoms aka(cx->perThreadData);
|
AutoKeepAtoms aka(cx->perThreadData);
|
||||||
|
|
||||||
UniqueModuleData base = cx->make_unique<ModuleData>();
|
UniqueCodeSegment code = CodeSegment::clone(cx, codeSegment());
|
||||||
if (!base || !this->base().clone(cx, base.get()))
|
if (!code)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
UniqueStaticLinkData link = cx->make_unique<StaticLinkData>();
|
auto* module = js_new<AsmJSModule>(Move(code), metadata(), *staticLinkData_, *exportMap_,
|
||||||
if (!link || !link_->clone(cx, link.get()))
|
*asmJSMetadata_);
|
||||||
|
if (!module)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
UniqueExportMap exportMap = cx->make_unique<ExportMap>();
|
moduleObj->init(*module);
|
||||||
if (!exportMap || !exportMap_->clone(cx, exportMap.get()))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
UniqueAsmJSModuleData module = cx->make_unique<AsmJSModuleData>();
|
return Module::clone(cx, *staticLinkData_, &moduleObj->module());
|
||||||
if (!module || !module_->clone(cx, module.get()))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
if (!moduleObj->init(js_new<AsmJSModule>(Move(base), Move(link), Move(exportMap), Move(module))))
|
|
||||||
return false;
|
|
||||||
|
|
||||||
return Module::clone(cx, *link_, &moduleObj->module());
|
|
||||||
}
|
}
|
||||||
|
|
||||||
namespace {
|
namespace {
|
||||||
|
@ -8782,7 +8778,7 @@ js::IsAsmJSModuleLoadedFromCache(JSContext* cx, unsigned argc, Value* vp)
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool loadedFromCache = AsmJSModuleToModuleObject(fun)->module().loadedFromCache();
|
bool loadedFromCache = AsmJSModuleToModuleObject(fun)->module().asAsmJS().loadedFromCache();
|
||||||
|
|
||||||
args.rval().set(BooleanValue(loadedFromCache));
|
args.rval().set(BooleanValue(loadedFromCache));
|
||||||
return true;
|
return true;
|
||||||
|
|
|
@ -1102,7 +1102,7 @@ DecodeNameSection(JSContext* cx, Decoder& d, CacheableCharsVector* funcNames)
|
||||||
|
|
||||||
static bool
|
static bool
|
||||||
DecodeModule(JSContext* cx, UniqueChars file, const uint8_t* bytes, uint32_t length,
|
DecodeModule(JSContext* cx, UniqueChars file, const uint8_t* bytes, uint32_t length,
|
||||||
ImportNameVector* importNames, UniqueExportMap* exportMap,
|
ImportNameVector* importNames, SharedExportMap* exportMap,
|
||||||
MutableHandle<ArrayBufferObject*> heap, MutableHandle<WasmModuleObject*> moduleObj)
|
MutableHandle<ArrayBufferObject*> heap, MutableHandle<WasmModuleObject*> moduleObj)
|
||||||
{
|
{
|
||||||
Decoder d(bytes, bytes + length);
|
Decoder d(bytes, bytes + length);
|
||||||
|
@ -1155,20 +1155,24 @@ DecodeModule(JSContext* cx, UniqueChars file, const uint8_t* bytes, uint32_t len
|
||||||
return Fail(cx, d, "failed to skip unknown section at end");
|
return Fail(cx, d, "failed to skip unknown section at end");
|
||||||
}
|
}
|
||||||
|
|
||||||
UniqueModuleData module;
|
UniqueCodeSegment code;
|
||||||
UniqueStaticLinkData staticLink;
|
SharedMetadata metadata;
|
||||||
|
SharedStaticLinkData staticLinkData;
|
||||||
SlowFunctionVector slowFuncs(cx);
|
SlowFunctionVector slowFuncs(cx);
|
||||||
if (!mg.finish(Move(funcNames), &module, &staticLink, exportMap, &slowFuncs))
|
if (!mg.finish(Move(funcNames), &code, &metadata, &staticLinkData, exportMap, &slowFuncs))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
moduleObj.set(WasmModuleObject::create(cx));
|
moduleObj.set(WasmModuleObject::create(cx));
|
||||||
if (!moduleObj)
|
if (!moduleObj)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (!moduleObj->init(cx->new_<Module>(Move(module))))
|
auto module = cx->new_<Module>(Move(code), *metadata);
|
||||||
|
if (!module)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return moduleObj->module().staticallyLink(cx, *staticLink);
|
moduleObj->init(*module);
|
||||||
|
|
||||||
|
return moduleObj->module().staticallyLink(cx, *staticLinkData);
|
||||||
}
|
}
|
||||||
|
|
||||||
/*****************************************************************************/
|
/*****************************************************************************/
|
||||||
|
@ -1295,7 +1299,7 @@ wasm::Eval(JSContext* cx, Handle<TypedArrayObject*> code, HandleObject importObj
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
ImportNameVector importNames;
|
ImportNameVector importNames;
|
||||||
UniqueExportMap exportMap;
|
SharedExportMap exportMap;
|
||||||
Rooted<ArrayBufferObject*> heap(cx);
|
Rooted<ArrayBufferObject*> heap(cx);
|
||||||
Rooted<WasmModuleObject*> moduleObj(cx);
|
Rooted<WasmModuleObject*> moduleObj(cx);
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,381 @@
|
||||||
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||||
|
* vim: set ts=8 sts=4 et sw=4 tw=99:
|
||||||
|
*
|
||||||
|
* Copyright 2016 Mozilla Foundation
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#include "asmjs/WasmCode.h"
|
||||||
|
|
||||||
|
#include "mozilla/Atomics.h"
|
||||||
|
|
||||||
|
#include "asmjs/WasmSerialize.h"
|
||||||
|
#include "jit/ExecutableAllocator.h"
|
||||||
|
|
||||||
|
using namespace js;
|
||||||
|
using namespace js::jit;
|
||||||
|
using namespace js::wasm;
|
||||||
|
using mozilla::Atomic;
|
||||||
|
|
||||||
|
// Limit the number of concurrent wasm code allocations per process. Note that
|
||||||
|
// on Linux, the real maximum is ~32k, as each module requires 2 maps (RW/RX),
|
||||||
|
// and the kernel's default max_map_count is ~65k.
|
||||||
|
//
|
||||||
|
// Note: this can be removed once writable/non-executable global data stops
|
||||||
|
// being stored in the code segment.
|
||||||
|
static Atomic<uint32_t> wasmCodeAllocations(0);
|
||||||
|
static const uint32_t MaxWasmCodeAllocations = 16384;
|
||||||
|
|
||||||
|
static uint8_t*
|
||||||
|
AllocateCodeSegment(ExclusiveContext* cx, uint32_t totalLength)
|
||||||
|
{
|
||||||
|
if (wasmCodeAllocations >= MaxWasmCodeAllocations)
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
|
// Allocate RW memory. DynamicallyLinkModule will reprotect the code as RX.
|
||||||
|
unsigned permissions =
|
||||||
|
ExecutableAllocator::initialProtectionFlags(ExecutableAllocator::Writable);
|
||||||
|
|
||||||
|
void* p = AllocateExecutableMemory(nullptr, totalLength, permissions,
|
||||||
|
"wasm-code-segment", gc::SystemPageSize());
|
||||||
|
if (!p) {
|
||||||
|
ReportOutOfMemory(cx);
|
||||||
|
return nullptr;
|
||||||
|
}
|
||||||
|
|
||||||
|
wasmCodeAllocations++;
|
||||||
|
return (uint8_t*)p;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* static */ UniqueCodeSegment
|
||||||
|
CodeSegment::allocate(ExclusiveContext* cx, uint32_t codeLength, uint32_t globalDataLength)
|
||||||
|
{
|
||||||
|
UniqueCodeSegment code = cx->make_unique<CodeSegment>();
|
||||||
|
if (!code)
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
|
uint8_t* bytes = AllocateCodeSegment(cx, codeLength + globalDataLength);
|
||||||
|
if (!bytes)
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
|
code->bytes_ = bytes;
|
||||||
|
code->codeLength_ = codeLength;
|
||||||
|
code->globalDataLength_ = globalDataLength;
|
||||||
|
return code;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* static */ UniqueCodeSegment
|
||||||
|
CodeSegment::clone(ExclusiveContext* cx, const CodeSegment& src)
|
||||||
|
{
|
||||||
|
UniqueCodeSegment dst = allocate(cx, src.codeLength_, src.globalDataLength_);
|
||||||
|
if (!dst)
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
|
memcpy(dst->code(), src.code(), src.codeLength());
|
||||||
|
return dst;
|
||||||
|
}
|
||||||
|
|
||||||
|
CodeSegment::~CodeSegment()
|
||||||
|
{
|
||||||
|
if (!bytes_) {
|
||||||
|
MOZ_ASSERT(!totalLength());
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
MOZ_ASSERT(wasmCodeAllocations > 0);
|
||||||
|
wasmCodeAllocations--;
|
||||||
|
|
||||||
|
MOZ_ASSERT(totalLength() > 0);
|
||||||
|
DeallocateExecutableMemory(bytes_, totalLength(), gc::SystemPageSize());
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t
|
||||||
|
CodeSegment::serializedSize() const
|
||||||
|
{
|
||||||
|
return sizeof(uint32_t) +
|
||||||
|
sizeof(uint32_t) +
|
||||||
|
codeLength_;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t*
|
||||||
|
CodeSegment::serialize(uint8_t* cursor) const
|
||||||
|
{
|
||||||
|
cursor = WriteScalar<uint32_t>(cursor, codeLength_);
|
||||||
|
cursor = WriteScalar<uint32_t>(cursor, globalDataLength_);
|
||||||
|
cursor = WriteBytes(cursor, bytes_, codeLength_);
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
const uint8_t*
|
||||||
|
CodeSegment::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
||||||
|
{
|
||||||
|
cursor = ReadScalar<uint32_t>(cursor, &codeLength_);
|
||||||
|
cursor = ReadScalar<uint32_t>(cursor, &globalDataLength_);
|
||||||
|
|
||||||
|
bytes_ = AllocateCodeSegment(cx, codeLength_ + globalDataLength_);
|
||||||
|
if (!bytes_)
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
|
cursor = ReadBytes(cursor, bytes_, codeLength_);
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
static size_t
|
||||||
|
SerializedSigSize(const Sig& sig)
|
||||||
|
{
|
||||||
|
return sizeof(ExprType) +
|
||||||
|
SerializedPodVectorSize(sig.args());
|
||||||
|
}
|
||||||
|
|
||||||
|
static uint8_t*
|
||||||
|
SerializeSig(uint8_t* cursor, const Sig& sig)
|
||||||
|
{
|
||||||
|
cursor = WriteScalar<ExprType>(cursor, sig.ret());
|
||||||
|
cursor = SerializePodVector(cursor, sig.args());
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
static const uint8_t*
|
||||||
|
DeserializeSig(ExclusiveContext* cx, const uint8_t* cursor, Sig* sig)
|
||||||
|
{
|
||||||
|
ExprType ret;
|
||||||
|
cursor = ReadScalar<ExprType>(cursor, &ret);
|
||||||
|
|
||||||
|
ValTypeVector args;
|
||||||
|
cursor = DeserializePodVector(cx, cursor, &args);
|
||||||
|
if (!cursor)
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
|
*sig = Sig(Move(args), ret);
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
static size_t
|
||||||
|
SizeOfSigExcludingThis(const Sig& sig, MallocSizeOf mallocSizeOf)
|
||||||
|
{
|
||||||
|
return sig.args().sizeOfExcludingThis(mallocSizeOf);
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t
|
||||||
|
Export::serializedSize() const
|
||||||
|
{
|
||||||
|
return SerializedSigSize(sig_) +
|
||||||
|
sizeof(pod);
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t*
|
||||||
|
Export::serialize(uint8_t* cursor) const
|
||||||
|
{
|
||||||
|
cursor = SerializeSig(cursor, sig_);
|
||||||
|
cursor = WriteBytes(cursor, &pod, sizeof(pod));
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
const uint8_t*
|
||||||
|
Export::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
||||||
|
{
|
||||||
|
(cursor = DeserializeSig(cx, cursor, &sig_)) &&
|
||||||
|
(cursor = ReadBytes(cursor, &pod, sizeof(pod)));
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t
|
||||||
|
Export::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
|
{
|
||||||
|
return SizeOfSigExcludingThis(sig_, mallocSizeOf);
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t
|
||||||
|
Import::serializedSize() const
|
||||||
|
{
|
||||||
|
return SerializedSigSize(sig_) +
|
||||||
|
sizeof(pod);
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t*
|
||||||
|
Import::serialize(uint8_t* cursor) const
|
||||||
|
{
|
||||||
|
cursor = SerializeSig(cursor, sig_);
|
||||||
|
cursor = WriteBytes(cursor, &pod, sizeof(pod));
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
const uint8_t*
|
||||||
|
Import::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
||||||
|
{
|
||||||
|
(cursor = DeserializeSig(cx, cursor, &sig_)) &&
|
||||||
|
(cursor = ReadBytes(cursor, &pod, sizeof(pod)));
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t
|
||||||
|
Import::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
|
{
|
||||||
|
return SizeOfSigExcludingThis(sig_, mallocSizeOf);
|
||||||
|
}
|
||||||
|
|
||||||
|
CodeRange::CodeRange(Kind kind, Offsets offsets)
|
||||||
|
: begin_(offsets.begin),
|
||||||
|
profilingReturn_(0),
|
||||||
|
end_(offsets.end),
|
||||||
|
funcIndex_(0),
|
||||||
|
funcLineOrBytecode_(0),
|
||||||
|
funcBeginToTableEntry_(0),
|
||||||
|
funcBeginToTableProfilingJump_(0),
|
||||||
|
funcBeginToNonProfilingEntry_(0),
|
||||||
|
funcProfilingJumpToProfilingReturn_(0),
|
||||||
|
funcProfilingEpilogueToProfilingReturn_(0),
|
||||||
|
kind_(kind)
|
||||||
|
{
|
||||||
|
MOZ_ASSERT(begin_ <= end_);
|
||||||
|
MOZ_ASSERT(kind_ == Entry || kind_ == Inline || kind_ == CallThunk);
|
||||||
|
}
|
||||||
|
|
||||||
|
CodeRange::CodeRange(Kind kind, ProfilingOffsets offsets)
|
||||||
|
: begin_(offsets.begin),
|
||||||
|
profilingReturn_(offsets.profilingReturn),
|
||||||
|
end_(offsets.end),
|
||||||
|
funcIndex_(0),
|
||||||
|
funcLineOrBytecode_(0),
|
||||||
|
funcBeginToTableEntry_(0),
|
||||||
|
funcBeginToTableProfilingJump_(0),
|
||||||
|
funcBeginToNonProfilingEntry_(0),
|
||||||
|
funcProfilingJumpToProfilingReturn_(0),
|
||||||
|
funcProfilingEpilogueToProfilingReturn_(0),
|
||||||
|
kind_(kind)
|
||||||
|
{
|
||||||
|
MOZ_ASSERT(begin_ < profilingReturn_);
|
||||||
|
MOZ_ASSERT(profilingReturn_ < end_);
|
||||||
|
MOZ_ASSERT(kind_ == ImportJitExit || kind_ == ImportInterpExit);
|
||||||
|
}
|
||||||
|
|
||||||
|
CodeRange::CodeRange(uint32_t funcIndex, uint32_t funcLineOrBytecode, FuncOffsets offsets)
|
||||||
|
: begin_(offsets.begin),
|
||||||
|
profilingReturn_(offsets.profilingReturn),
|
||||||
|
end_(offsets.end),
|
||||||
|
funcIndex_(funcIndex),
|
||||||
|
funcLineOrBytecode_(funcLineOrBytecode),
|
||||||
|
funcBeginToTableEntry_(offsets.tableEntry - begin_),
|
||||||
|
funcBeginToTableProfilingJump_(offsets.tableProfilingJump - begin_),
|
||||||
|
funcBeginToNonProfilingEntry_(offsets.nonProfilingEntry - begin_),
|
||||||
|
funcProfilingJumpToProfilingReturn_(profilingReturn_ - offsets.profilingJump),
|
||||||
|
funcProfilingEpilogueToProfilingReturn_(profilingReturn_ - offsets.profilingEpilogue),
|
||||||
|
kind_(Function)
|
||||||
|
{
|
||||||
|
MOZ_ASSERT(begin_ < profilingReturn_);
|
||||||
|
MOZ_ASSERT(profilingReturn_ < end_);
|
||||||
|
MOZ_ASSERT(funcBeginToTableEntry_ == offsets.tableEntry - begin_);
|
||||||
|
MOZ_ASSERT(funcBeginToTableProfilingJump_ == offsets.tableProfilingJump - begin_);
|
||||||
|
MOZ_ASSERT(funcBeginToNonProfilingEntry_ == offsets.nonProfilingEntry - begin_);
|
||||||
|
MOZ_ASSERT(funcProfilingJumpToProfilingReturn_ == profilingReturn_ - offsets.profilingJump);
|
||||||
|
MOZ_ASSERT(funcProfilingEpilogueToProfilingReturn_ == profilingReturn_ - offsets.profilingEpilogue);
|
||||||
|
}
|
||||||
|
|
||||||
|
static size_t
|
||||||
|
NullableStringLength(const char* chars)
|
||||||
|
{
|
||||||
|
return chars ? strlen(chars) : 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t
|
||||||
|
CacheableChars::serializedSize() const
|
||||||
|
{
|
||||||
|
return sizeof(uint32_t) + NullableStringLength(get());
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t*
|
||||||
|
CacheableChars::serialize(uint8_t* cursor) const
|
||||||
|
{
|
||||||
|
uint32_t length = NullableStringLength(get());
|
||||||
|
cursor = WriteBytes(cursor, &length, sizeof(uint32_t));
|
||||||
|
cursor = WriteBytes(cursor, get(), length);
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
const uint8_t*
|
||||||
|
CacheableChars::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
||||||
|
{
|
||||||
|
uint32_t length;
|
||||||
|
cursor = ReadBytes(cursor, &length, sizeof(uint32_t));
|
||||||
|
|
||||||
|
reset(cx->pod_calloc<char>(length + 1));
|
||||||
|
if (!get())
|
||||||
|
return nullptr;
|
||||||
|
|
||||||
|
cursor = ReadBytes(cursor, get(), length);
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t
|
||||||
|
CacheableChars::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
|
{
|
||||||
|
return mallocSizeOf(get());
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t
|
||||||
|
Metadata::serializedSize() const
|
||||||
|
{
|
||||||
|
return sizeof(pod()) +
|
||||||
|
SerializedVectorSize(imports) +
|
||||||
|
SerializedVectorSize(exports) +
|
||||||
|
SerializedPodVectorSize(heapAccesses) +
|
||||||
|
SerializedPodVectorSize(codeRanges) +
|
||||||
|
SerializedPodVectorSize(callSites) +
|
||||||
|
SerializedPodVectorSize(callThunks) +
|
||||||
|
SerializedVectorSize(prettyFuncNames) +
|
||||||
|
filename.serializedSize();
|
||||||
|
}
|
||||||
|
|
||||||
|
uint8_t*
|
||||||
|
Metadata::serialize(uint8_t* cursor) const
|
||||||
|
{
|
||||||
|
cursor = WriteBytes(cursor, &pod(), sizeof(pod()));
|
||||||
|
cursor = SerializeVector(cursor, imports);
|
||||||
|
cursor = SerializeVector(cursor, exports);
|
||||||
|
cursor = SerializePodVector(cursor, heapAccesses);
|
||||||
|
cursor = SerializePodVector(cursor, codeRanges);
|
||||||
|
cursor = SerializePodVector(cursor, callSites);
|
||||||
|
cursor = SerializePodVector(cursor, callThunks);
|
||||||
|
cursor = SerializeVector(cursor, prettyFuncNames);
|
||||||
|
cursor = filename.serialize(cursor);
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* static */ const uint8_t*
|
||||||
|
Metadata::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
||||||
|
{
|
||||||
|
(cursor = ReadBytes(cursor, &pod(), sizeof(pod()))) &&
|
||||||
|
(cursor = DeserializeVector(cx, cursor, &imports)) &&
|
||||||
|
(cursor = DeserializeVector(cx, cursor, &exports)) &&
|
||||||
|
(cursor = DeserializePodVector(cx, cursor, &heapAccesses)) &&
|
||||||
|
(cursor = DeserializePodVector(cx, cursor, &codeRanges)) &&
|
||||||
|
(cursor = DeserializePodVector(cx, cursor, &callSites)) &&
|
||||||
|
(cursor = DeserializePodVector(cx, cursor, &callThunks)) &&
|
||||||
|
(cursor = DeserializeVector(cx, cursor, &prettyFuncNames)) &&
|
||||||
|
(cursor = filename.deserialize(cx, cursor));
|
||||||
|
return cursor;
|
||||||
|
}
|
||||||
|
|
||||||
|
size_t
|
||||||
|
Metadata::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
|
{
|
||||||
|
return SizeOfVectorExcludingThis(imports, mallocSizeOf) +
|
||||||
|
SizeOfVectorExcludingThis(exports, mallocSizeOf) +
|
||||||
|
heapAccesses.sizeOfExcludingThis(mallocSizeOf) +
|
||||||
|
codeRanges.sizeOfExcludingThis(mallocSizeOf) +
|
||||||
|
callSites.sizeOfExcludingThis(mallocSizeOf) +
|
||||||
|
callThunks.sizeOfExcludingThis(mallocSizeOf) +
|
||||||
|
SizeOfVectorExcludingThis(prettyFuncNames, mallocSizeOf) +
|
||||||
|
filename.sizeOfExcludingThis(mallocSizeOf);
|
||||||
|
}
|
|
@ -0,0 +1,342 @@
|
||||||
|
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||||
|
* vim: set ts=8 sts=4 et sw=4 tw=99:
|
||||||
|
*
|
||||||
|
* Copyright 2016 Mozilla Foundation
|
||||||
|
*
|
||||||
|
* Licensed under the Apache License, Version 2.0 (the "License");
|
||||||
|
* you may not use this file except in compliance with the License.
|
||||||
|
* You may obtain a copy of the License at
|
||||||
|
*
|
||||||
|
* http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
*
|
||||||
|
* Unless required by applicable law or agreed to in writing, software
|
||||||
|
* distributed under the License is distributed on an "AS IS" BASIS,
|
||||||
|
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
|
* See the License for the specific language governing permissions and
|
||||||
|
* limitations under the License.
|
||||||
|
*/
|
||||||
|
|
||||||
|
#ifndef wasm_code_h
|
||||||
|
#define wasm_code_h
|
||||||
|
|
||||||
|
#include "asmjs/WasmTypes.h"
|
||||||
|
|
||||||
|
namespace js {
|
||||||
|
namespace wasm {
|
||||||
|
|
||||||
|
// A wasm CodeSegment owns the allocated executable code for a wasm module.
|
||||||
|
// CodeSegment passed to the Module constructor must be allocated via allocate.
|
||||||
|
|
||||||
|
class CodeSegment;
|
||||||
|
typedef UniquePtr<CodeSegment> UniqueCodeSegment;
|
||||||
|
|
||||||
|
class CodeSegment
|
||||||
|
{
|
||||||
|
uint8_t* bytes_;
|
||||||
|
uint32_t codeLength_;
|
||||||
|
uint32_t globalDataLength_;
|
||||||
|
|
||||||
|
CodeSegment(const CodeSegment&) = delete;
|
||||||
|
void operator=(const CodeSegment&) = delete;
|
||||||
|
|
||||||
|
public:
|
||||||
|
static UniqueCodeSegment allocate(ExclusiveContext* cx, uint32_t codeLength, uint32_t dataLength);
|
||||||
|
static UniqueCodeSegment clone(ExclusiveContext* cx, const CodeSegment& code);
|
||||||
|
CodeSegment() : bytes_(nullptr), codeLength_(0), globalDataLength_(0) {}
|
||||||
|
~CodeSegment();
|
||||||
|
|
||||||
|
uint8_t* code() const { return bytes_; }
|
||||||
|
uint8_t* globalData() const { return bytes_ + codeLength_; }
|
||||||
|
uint32_t codeLength() const { return codeLength_; }
|
||||||
|
uint32_t globalDataLength() const { return globalDataLength_; }
|
||||||
|
uint32_t totalLength() const { return codeLength_ + globalDataLength_; }
|
||||||
|
|
||||||
|
WASM_DECLARE_SERIALIZABLE(CodeSegment)
|
||||||
|
};
|
||||||
|
|
||||||
|
// An Export represents a single function inside a wasm Module that has been
|
||||||
|
// exported one or more times.
|
||||||
|
|
||||||
|
class Export
|
||||||
|
{
|
||||||
|
Sig sig_;
|
||||||
|
struct CacheablePod {
|
||||||
|
uint32_t stubOffset_;
|
||||||
|
} pod;
|
||||||
|
|
||||||
|
public:
|
||||||
|
Export() = default;
|
||||||
|
explicit Export(Sig&& sig)
|
||||||
|
: sig_(Move(sig))
|
||||||
|
{
|
||||||
|
pod.stubOffset_ = UINT32_MAX;
|
||||||
|
}
|
||||||
|
void initStubOffset(uint32_t stubOffset) {
|
||||||
|
MOZ_ASSERT(pod.stubOffset_ == UINT32_MAX);
|
||||||
|
pod.stubOffset_ = stubOffset;
|
||||||
|
}
|
||||||
|
|
||||||
|
uint32_t stubOffset() const {
|
||||||
|
return pod.stubOffset_;
|
||||||
|
}
|
||||||
|
const Sig& sig() const {
|
||||||
|
return sig_;
|
||||||
|
}
|
||||||
|
|
||||||
|
WASM_DECLARE_SERIALIZABLE(Export)
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef Vector<Export, 0, SystemAllocPolicy> ExportVector;
|
||||||
|
|
||||||
|
// An Import describes a wasm module import. Currently, only functions can be
|
||||||
|
// imported in wasm. A function import includes the signature used within the
|
||||||
|
// module to call it.
|
||||||
|
|
||||||
|
class Import
|
||||||
|
{
|
||||||
|
Sig sig_;
|
||||||
|
struct CacheablePod {
|
||||||
|
uint32_t exitGlobalDataOffset_;
|
||||||
|
uint32_t interpExitCodeOffset_;
|
||||||
|
uint32_t jitExitCodeOffset_;
|
||||||
|
} pod;
|
||||||
|
|
||||||
|
public:
|
||||||
|
Import() = default;
|
||||||
|
Import(Sig&& sig, uint32_t exitGlobalDataOffset)
|
||||||
|
: sig_(Move(sig))
|
||||||
|
{
|
||||||
|
pod.exitGlobalDataOffset_ = exitGlobalDataOffset;
|
||||||
|
pod.interpExitCodeOffset_ = 0;
|
||||||
|
pod.jitExitCodeOffset_ = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
void initInterpExitOffset(uint32_t off) {
|
||||||
|
MOZ_ASSERT(!pod.interpExitCodeOffset_);
|
||||||
|
pod.interpExitCodeOffset_ = off;
|
||||||
|
}
|
||||||
|
void initJitExitOffset(uint32_t off) {
|
||||||
|
MOZ_ASSERT(!pod.jitExitCodeOffset_);
|
||||||
|
pod.jitExitCodeOffset_ = off;
|
||||||
|
}
|
||||||
|
|
||||||
|
const Sig& sig() const {
|
||||||
|
return sig_;
|
||||||
|
}
|
||||||
|
uint32_t exitGlobalDataOffset() const {
|
||||||
|
return pod.exitGlobalDataOffset_;
|
||||||
|
}
|
||||||
|
uint32_t interpExitCodeOffset() const {
|
||||||
|
return pod.interpExitCodeOffset_;
|
||||||
|
}
|
||||||
|
uint32_t jitExitCodeOffset() const {
|
||||||
|
return pod.jitExitCodeOffset_;
|
||||||
|
}
|
||||||
|
|
||||||
|
WASM_DECLARE_SERIALIZABLE(Import)
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef Vector<Import, 0, SystemAllocPolicy> ImportVector;
|
||||||
|
|
||||||
|
// A CodeRange describes a single contiguous range of code within a wasm
|
||||||
|
// module's code segment. A CodeRange describes what the code does and, for
|
||||||
|
// function bodies, the name and source coordinates of the function.
|
||||||
|
|
||||||
|
class CodeRange
|
||||||
|
{
|
||||||
|
public:
|
||||||
|
enum Kind { Function, Entry, ImportJitExit, ImportInterpExit, Inline, CallThunk };
|
||||||
|
|
||||||
|
private:
|
||||||
|
// All fields are treated as cacheable POD:
|
||||||
|
uint32_t begin_;
|
||||||
|
uint32_t profilingReturn_;
|
||||||
|
uint32_t end_;
|
||||||
|
uint32_t funcIndex_;
|
||||||
|
uint32_t funcLineOrBytecode_;
|
||||||
|
uint8_t funcBeginToTableEntry_;
|
||||||
|
uint8_t funcBeginToTableProfilingJump_;
|
||||||
|
uint8_t funcBeginToNonProfilingEntry_;
|
||||||
|
uint8_t funcProfilingJumpToProfilingReturn_;
|
||||||
|
uint8_t funcProfilingEpilogueToProfilingReturn_;
|
||||||
|
Kind kind_ : 8;
|
||||||
|
|
||||||
|
public:
|
||||||
|
CodeRange() = default;
|
||||||
|
CodeRange(Kind kind, Offsets offsets);
|
||||||
|
CodeRange(Kind kind, ProfilingOffsets offsets);
|
||||||
|
CodeRange(uint32_t funcIndex, uint32_t lineOrBytecode, FuncOffsets offsets);
|
||||||
|
|
||||||
|
// All CodeRanges have a begin and end.
|
||||||
|
|
||||||
|
uint32_t begin() const {
|
||||||
|
return begin_;
|
||||||
|
}
|
||||||
|
uint32_t end() const {
|
||||||
|
return end_;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Other fields are only available for certain CodeRange::Kinds.
|
||||||
|
|
||||||
|
Kind kind() const {
|
||||||
|
return kind_;
|
||||||
|
}
|
||||||
|
|
||||||
|
bool isFunction() const {
|
||||||
|
return kind() == Function;
|
||||||
|
}
|
||||||
|
bool isImportExit() const {
|
||||||
|
return kind() == ImportJitExit || kind() == ImportInterpExit;
|
||||||
|
}
|
||||||
|
bool isInline() const {
|
||||||
|
return kind() == Inline;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Every CodeRange except entry and inline stubs has a profiling return
|
||||||
|
// which is used for asynchronous profiling to determine the frame pointer.
|
||||||
|
|
||||||
|
uint32_t profilingReturn() const {
|
||||||
|
MOZ_ASSERT(isFunction() || isImportExit());
|
||||||
|
return profilingReturn_;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Functions have offsets which allow patching to selectively execute
|
||||||
|
// profiling prologues/epilogues.
|
||||||
|
|
||||||
|
uint32_t funcProfilingEntry() const {
|
||||||
|
MOZ_ASSERT(isFunction());
|
||||||
|
return begin();
|
||||||
|
}
|
||||||
|
uint32_t funcTableEntry() const {
|
||||||
|
MOZ_ASSERT(isFunction());
|
||||||
|
return begin_ + funcBeginToTableEntry_;
|
||||||
|
}
|
||||||
|
uint32_t funcTableProfilingJump() const {
|
||||||
|
MOZ_ASSERT(isFunction());
|
||||||
|
return begin_ + funcBeginToTableProfilingJump_;
|
||||||
|
}
|
||||||
|
uint32_t funcNonProfilingEntry() const {
|
||||||
|
MOZ_ASSERT(isFunction());
|
||||||
|
return begin_ + funcBeginToNonProfilingEntry_;
|
||||||
|
}
|
||||||
|
uint32_t funcProfilingJump() const {
|
||||||
|
MOZ_ASSERT(isFunction());
|
||||||
|
return profilingReturn_ - funcProfilingJumpToProfilingReturn_;
|
||||||
|
}
|
||||||
|
uint32_t funcProfilingEpilogue() const {
|
||||||
|
MOZ_ASSERT(isFunction());
|
||||||
|
return profilingReturn_ - funcProfilingEpilogueToProfilingReturn_;
|
||||||
|
}
|
||||||
|
uint32_t funcIndex() const {
|
||||||
|
MOZ_ASSERT(isFunction());
|
||||||
|
return funcIndex_;
|
||||||
|
}
|
||||||
|
uint32_t funcLineOrBytecode() const {
|
||||||
|
MOZ_ASSERT(isFunction());
|
||||||
|
return funcLineOrBytecode_;
|
||||||
|
}
|
||||||
|
|
||||||
|
// A sorted array of CodeRanges can be looked up via BinarySearch and PC.
|
||||||
|
|
||||||
|
struct PC {
|
||||||
|
size_t offset;
|
||||||
|
explicit PC(size_t offset) : offset(offset) {}
|
||||||
|
bool operator==(const CodeRange& rhs) const {
|
||||||
|
return offset >= rhs.begin() && offset < rhs.end();
|
||||||
|
}
|
||||||
|
bool operator<(const CodeRange& rhs) const {
|
||||||
|
return offset < rhs.begin();
|
||||||
|
}
|
||||||
|
};
|
||||||
|
};
|
||||||
|
|
||||||
|
WASM_DECLARE_POD_VECTOR(CodeRange, CodeRangeVector)
|
||||||
|
|
||||||
|
// A CallThunk describes the offset and target of thunks so that they may be
|
||||||
|
// patched at runtime when profiling is toggled. Thunks are emitted to connect
|
||||||
|
// callsites that are too far away from callees to fit in a single call
|
||||||
|
// instruction's relative offset.
|
||||||
|
|
||||||
|
struct CallThunk
|
||||||
|
{
|
||||||
|
uint32_t offset;
|
||||||
|
union {
|
||||||
|
uint32_t funcIndex;
|
||||||
|
uint32_t codeRangeIndex;
|
||||||
|
} u;
|
||||||
|
|
||||||
|
CallThunk(uint32_t offset, uint32_t funcIndex) : offset(offset) { u.funcIndex = funcIndex; }
|
||||||
|
CallThunk() = default;
|
||||||
|
};
|
||||||
|
|
||||||
|
WASM_DECLARE_POD_VECTOR(CallThunk, CallThunkVector)
|
||||||
|
|
||||||
|
// CacheableChars is used to cacheably store UniqueChars.
|
||||||
|
|
||||||
|
struct CacheableChars : UniqueChars
|
||||||
|
{
|
||||||
|
CacheableChars() = default;
|
||||||
|
explicit CacheableChars(char* ptr) : UniqueChars(ptr) {}
|
||||||
|
MOZ_IMPLICIT CacheableChars(UniqueChars&& rhs) : UniqueChars(Move(rhs)) {}
|
||||||
|
WASM_DECLARE_SERIALIZABLE(CacheableChars)
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef Vector<CacheableChars, 0, SystemAllocPolicy> CacheableCharsVector;
|
||||||
|
|
||||||
|
// A wasm module can either use no heap, a unshared heap (ArrayBuffer) or shared
|
||||||
|
// heap (SharedArrayBuffer).
|
||||||
|
|
||||||
|
enum class HeapUsage
|
||||||
|
{
|
||||||
|
None = false,
|
||||||
|
Unshared = 1,
|
||||||
|
Shared = 2
|
||||||
|
};
|
||||||
|
|
||||||
|
static inline bool
|
||||||
|
UsesHeap(HeapUsage heapUsage)
|
||||||
|
{
|
||||||
|
return bool(heapUsage);
|
||||||
|
}
|
||||||
|
|
||||||
|
// Metadata holds all the data that is needed to describe compiled wasm code
|
||||||
|
// at runtime (as opposed to data that is only used to statically link or
|
||||||
|
// instantiate a module).
|
||||||
|
//
|
||||||
|
// Metadata is built incrementally by ModuleGenerator and then shared immutably
|
||||||
|
// between modules.
|
||||||
|
|
||||||
|
struct MetadataCacheablePod
|
||||||
|
{
|
||||||
|
uint32_t functionLength;
|
||||||
|
ModuleKind kind;
|
||||||
|
HeapUsage heapUsage;
|
||||||
|
CompileArgs compileArgs;
|
||||||
|
|
||||||
|
MetadataCacheablePod() { mozilla::PodZero(this); }
|
||||||
|
};
|
||||||
|
|
||||||
|
struct Metadata : RefCounted<Metadata>, MetadataCacheablePod
|
||||||
|
{
|
||||||
|
MetadataCacheablePod& pod() { return *this; }
|
||||||
|
const MetadataCacheablePod& pod() const { return *this; }
|
||||||
|
|
||||||
|
ImportVector imports;
|
||||||
|
ExportVector exports;
|
||||||
|
HeapAccessVector heapAccesses;
|
||||||
|
CodeRangeVector codeRanges;
|
||||||
|
CallSiteVector callSites;
|
||||||
|
CallThunkVector callThunks;
|
||||||
|
CacheableCharsVector prettyFuncNames;
|
||||||
|
CacheableChars filename;
|
||||||
|
|
||||||
|
WASM_DECLARE_SERIALIZABLE(Metadata);
|
||||||
|
};
|
||||||
|
|
||||||
|
typedef RefPtr<Metadata> MutableMetadata;
|
||||||
|
typedef RefPtr<const Metadata> SharedMetadata;
|
||||||
|
|
||||||
|
} // namespace wasm
|
||||||
|
} // namespace js
|
||||||
|
|
||||||
|
#endif // wasm_code_h
|
|
@ -40,6 +40,7 @@ static const unsigned COMPILATION_LIFO_DEFAULT_CHUNK_SIZE = 64 * 1024;
|
||||||
ModuleGenerator::ModuleGenerator(ExclusiveContext* cx)
|
ModuleGenerator::ModuleGenerator(ExclusiveContext* cx)
|
||||||
: cx_(cx),
|
: cx_(cx),
|
||||||
jcx_(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread())),
|
jcx_(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread())),
|
||||||
|
globalDataLength_(InitialGlobalDataBytes),
|
||||||
slowFuncs_(cx),
|
slowFuncs_(cx),
|
||||||
numSigs_(0),
|
numSigs_(0),
|
||||||
lifo_(GENERATOR_LIFO_DEFAULT_CHUNK_SIZE),
|
lifo_(GENERATOR_LIFO_DEFAULT_CHUNK_SIZE),
|
||||||
|
@ -115,17 +116,16 @@ ModuleGenerator::init(UniqueModuleGeneratorData shared, UniqueChars filename)
|
||||||
if (!funcIndexToExport_.init())
|
if (!funcIndexToExport_.init())
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
module_ = MakeUnique<ModuleData>();
|
metadata_ = js_new<Metadata>();
|
||||||
if (!module_)
|
if (!metadata_)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
module_->globalBytes = InitialGlobalDataBytes;
|
metadata_->compileArgs = shared->args;
|
||||||
module_->compileArgs = shared->args;
|
metadata_->kind = shared->kind;
|
||||||
module_->kind = shared->kind;
|
metadata_->heapUsage = HeapUsage::None;
|
||||||
module_->heapUsage = HeapUsage::None;
|
metadata_->filename = Move(filename);
|
||||||
module_->filename = Move(filename);
|
|
||||||
|
|
||||||
exportMap_ = MakeUnique<ExportMap>();
|
exportMap_ = js_new<ExportMap>();
|
||||||
if (!exportMap_)
|
if (!exportMap_)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
@ -136,24 +136,24 @@ ModuleGenerator::init(UniqueModuleGeneratorData shared, UniqueChars filename)
|
||||||
// module is generated. For wasm, the Vectors are correctly-sized and
|
// module is generated. For wasm, the Vectors are correctly-sized and
|
||||||
// already initialized.
|
// already initialized.
|
||||||
|
|
||||||
if (module_->kind == ModuleKind::Wasm) {
|
if (metadata_->kind == ModuleKind::Wasm) {
|
||||||
numSigs_ = shared_->sigs.length();
|
numSigs_ = shared_->sigs.length();
|
||||||
module_->globalBytes = AlignBytes(module_->globalBytes, sizeof(void*));
|
globalDataLength_ = AlignBytes(globalDataLength_, sizeof(void*));
|
||||||
|
|
||||||
for (ImportModuleGeneratorData& import : shared_->imports) {
|
for (ImportModuleGeneratorData& import : shared_->imports) {
|
||||||
MOZ_ASSERT(!import.globalDataOffset);
|
MOZ_ASSERT(!import.globalDataOffset);
|
||||||
import.globalDataOffset = module_->globalBytes;
|
import.globalDataOffset = globalDataLength_;
|
||||||
module_->globalBytes += Module::SizeOfImportExit;
|
globalDataLength_ += Module::SizeOfImportExit;
|
||||||
if (!addImport(*import.sig, import.globalDataOffset))
|
if (!addImport(*import.sig, import.globalDataOffset))
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
MOZ_ASSERT(module_->globalBytes % sizeof(void*) == 0);
|
MOZ_ASSERT(globalDataLength_ % sizeof(void*) == 0);
|
||||||
MOZ_ASSERT(shared_->asmJSSigToTable.empty());
|
MOZ_ASSERT(shared_->asmJSSigToTable.empty());
|
||||||
MOZ_ASSERT(shared_->wasmTable.numElems == shared_->wasmTable.elemFuncIndices.length());
|
MOZ_ASSERT(shared_->wasmTable.numElems == shared_->wasmTable.elemFuncIndices.length());
|
||||||
MOZ_ASSERT(!shared_->wasmTable.globalDataOffset);
|
MOZ_ASSERT(!shared_->wasmTable.globalDataOffset);
|
||||||
shared_->wasmTable.globalDataOffset = module_->globalBytes;
|
shared_->wasmTable.globalDataOffset = globalDataLength_;
|
||||||
module_->globalBytes += shared_->wasmTable.numElems * sizeof(void*);
|
globalDataLength_ += shared_->wasmTable.numElems * sizeof(void*);
|
||||||
}
|
}
|
||||||
|
|
||||||
return true;
|
return true;
|
||||||
|
@ -199,7 +199,7 @@ const CodeRange&
|
||||||
ModuleGenerator::funcCodeRange(uint32_t funcIndex) const
|
ModuleGenerator::funcCodeRange(uint32_t funcIndex) const
|
||||||
{
|
{
|
||||||
MOZ_ASSERT(funcIsDefined(funcIndex));
|
MOZ_ASSERT(funcIsDefined(funcIndex));
|
||||||
const CodeRange& cr = module_->codeRanges[funcIndexToCodeRange_[funcIndex]];
|
const CodeRange& cr = metadata_->codeRanges[funcIndexToCodeRange_[funcIndex]];
|
||||||
MOZ_ASSERT(cr.isFunction());
|
MOZ_ASSERT(cr.isFunction());
|
||||||
return cr;
|
return cr;
|
||||||
}
|
}
|
||||||
|
@ -251,9 +251,9 @@ ModuleGenerator::convertOutOfRangeBranchesToThunks()
|
||||||
return false;
|
return false;
|
||||||
offsets.end = masm_.currentOffset();
|
offsets.end = masm_.currentOffset();
|
||||||
|
|
||||||
if (!module_->codeRanges.emplaceBack(CodeRange::CallThunk, offsets))
|
if (!metadata_->codeRanges.emplaceBack(CodeRange::CallThunk, offsets))
|
||||||
return false;
|
return false;
|
||||||
if (!module_->callThunks.emplaceBack(thunkOffset, cs.targetIndex()))
|
if (!metadata_->callThunks.emplaceBack(thunkOffset, cs.targetIndex()))
|
||||||
return false;
|
return false;
|
||||||
if (!alreadyThunked.add(p, cs.targetIndex(), offsets.begin))
|
if (!alreadyThunked.add(p, cs.targetIndex(), offsets.begin))
|
||||||
return false;
|
return false;
|
||||||
|
@ -282,7 +282,7 @@ ModuleGenerator::convertOutOfRangeBranchesToThunks()
|
||||||
return false;
|
return false;
|
||||||
offsets.end = masm_.currentOffset();
|
offsets.end = masm_.currentOffset();
|
||||||
|
|
||||||
if (!module_->codeRanges.emplaceBack(CodeRange::Inline, offsets))
|
if (!metadata_->codeRanges.emplaceBack(CodeRange::Inline, offsets))
|
||||||
return false;
|
return false;
|
||||||
if (!jumpThunks_[target].append(thunkOffset))
|
if (!jumpThunks_[target].append(thunkOffset))
|
||||||
return false;
|
return false;
|
||||||
|
@ -316,8 +316,8 @@ ModuleGenerator::finishTask(IonCompileTask* task)
|
||||||
results.offsets().offsetBy(offsetInWhole);
|
results.offsets().offsetBy(offsetInWhole);
|
||||||
|
|
||||||
// Add the CodeRange for this function.
|
// Add the CodeRange for this function.
|
||||||
uint32_t funcCodeRangeIndex = module_->codeRanges.length();
|
uint32_t funcCodeRangeIndex = metadata_->codeRanges.length();
|
||||||
if (!module_->codeRanges.emplaceBack(func.index(), func.lineOrBytecode(), results.offsets()))
|
if (!metadata_->codeRanges.emplaceBack(func.index(), func.lineOrBytecode(), results.offsets()))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// Maintain a mapping from function index to CodeRange index.
|
// Maintain a mapping from function index to CodeRange index.
|
||||||
|
@ -369,7 +369,7 @@ ModuleGenerator::finishCodegen(StaticLinkData* link)
|
||||||
return false;
|
return false;
|
||||||
for (uint32_t i = 0; i < numExports(); i++) {
|
for (uint32_t i = 0; i < numExports(); i++) {
|
||||||
uint32_t target = exportMap_->exportFuncIndices[i];
|
uint32_t target = exportMap_->exportFuncIndices[i];
|
||||||
const Sig& sig = module_->exports[i].sig();
|
const Sig& sig = metadata_->exports[i].sig();
|
||||||
entries[i] = GenerateEntry(masm, target, sig, usesHeap());
|
entries[i] = GenerateEntry(masm, target, sig, usesHeap());
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -378,8 +378,8 @@ ModuleGenerator::finishCodegen(StaticLinkData* link)
|
||||||
if (!jitExits.resize(numImports()))
|
if (!jitExits.resize(numImports()))
|
||||||
return false;
|
return false;
|
||||||
for (uint32_t i = 0; i < numImports(); i++) {
|
for (uint32_t i = 0; i < numImports(); i++) {
|
||||||
interpExits[i] = GenerateInterpExit(masm, module_->imports[i], i);
|
interpExits[i] = GenerateInterpExit(masm, metadata_->imports[i], i);
|
||||||
jitExits[i] = GenerateJitExit(masm, module_->imports[i], usesHeap());
|
jitExits[i] = GenerateJitExit(masm, metadata_->imports[i], usesHeap());
|
||||||
}
|
}
|
||||||
|
|
||||||
for (JumpTarget target : MakeEnumeratedRange(JumpTarget::Limit))
|
for (JumpTarget target : MakeEnumeratedRange(JumpTarget::Limit))
|
||||||
|
@ -396,31 +396,31 @@ ModuleGenerator::finishCodegen(StaticLinkData* link)
|
||||||
|
|
||||||
for (uint32_t i = 0; i < numExports(); i++) {
|
for (uint32_t i = 0; i < numExports(); i++) {
|
||||||
entries[i].offsetBy(offsetInWhole);
|
entries[i].offsetBy(offsetInWhole);
|
||||||
module_->exports[i].initStubOffset(entries[i].begin);
|
metadata_->exports[i].initStubOffset(entries[i].begin);
|
||||||
if (!module_->codeRanges.emplaceBack(CodeRange::Entry, entries[i]))
|
if (!metadata_->codeRanges.emplaceBack(CodeRange::Entry, entries[i]))
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (uint32_t i = 0; i < numImports(); i++) {
|
for (uint32_t i = 0; i < numImports(); i++) {
|
||||||
interpExits[i].offsetBy(offsetInWhole);
|
interpExits[i].offsetBy(offsetInWhole);
|
||||||
module_->imports[i].initInterpExitOffset(interpExits[i].begin);
|
metadata_->imports[i].initInterpExitOffset(interpExits[i].begin);
|
||||||
if (!module_->codeRanges.emplaceBack(CodeRange::ImportInterpExit, interpExits[i]))
|
if (!metadata_->codeRanges.emplaceBack(CodeRange::ImportInterpExit, interpExits[i]))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
jitExits[i].offsetBy(offsetInWhole);
|
jitExits[i].offsetBy(offsetInWhole);
|
||||||
module_->imports[i].initJitExitOffset(jitExits[i].begin);
|
metadata_->imports[i].initJitExitOffset(jitExits[i].begin);
|
||||||
if (!module_->codeRanges.emplaceBack(CodeRange::ImportJitExit, jitExits[i]))
|
if (!metadata_->codeRanges.emplaceBack(CodeRange::ImportJitExit, jitExits[i]))
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
for (JumpTarget target : MakeEnumeratedRange(JumpTarget::Limit)) {
|
for (JumpTarget target : MakeEnumeratedRange(JumpTarget::Limit)) {
|
||||||
jumpTargets[target].offsetBy(offsetInWhole);
|
jumpTargets[target].offsetBy(offsetInWhole);
|
||||||
if (!module_->codeRanges.emplaceBack(CodeRange::Inline, jumpTargets[target]))
|
if (!metadata_->codeRanges.emplaceBack(CodeRange::Inline, jumpTargets[target]))
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
|
||||||
interruptExit.offsetBy(offsetInWhole);
|
interruptExit.offsetBy(offsetInWhole);
|
||||||
if (!module_->codeRanges.emplaceBack(CodeRange::Inline, interruptExit))
|
if (!metadata_->codeRanges.emplaceBack(CodeRange::Inline, interruptExit))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// Fill in StaticLinkData with the offsets of these stubs.
|
// Fill in StaticLinkData with the offsets of these stubs.
|
||||||
|
@ -436,7 +436,7 @@ ModuleGenerator::finishCodegen(StaticLinkData* link)
|
||||||
|
|
||||||
// Now that all thunks have been generated, patch all the thunks.
|
// Now that all thunks have been generated, patch all the thunks.
|
||||||
|
|
||||||
for (CallThunk& callThunk : module_->callThunks) {
|
for (CallThunk& callThunk : metadata_->callThunks) {
|
||||||
uint32_t funcIndex = callThunk.u.funcIndex;
|
uint32_t funcIndex = callThunk.u.funcIndex;
|
||||||
callThunk.u.codeRangeIndex = funcIndexToCodeRange_[funcIndex];
|
callThunk.u.codeRangeIndex = funcIndexToCodeRange_[funcIndex];
|
||||||
masm_.patchThunk(callThunk.offset, funcCodeRange(funcIndex).funcNonProfilingEntry());
|
masm_.patchThunk(callThunk.offset, funcCodeRange(funcIndex).funcNonProfilingEntry());
|
||||||
|
@ -454,7 +454,7 @@ ModuleGenerator::finishCodegen(StaticLinkData* link)
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ModuleGenerator::finishStaticLinkData(uint8_t* code, uint32_t codeBytes, StaticLinkData* link)
|
ModuleGenerator::finishStaticLinkData(uint8_t* code, uint32_t codeLength, StaticLinkData* link)
|
||||||
{
|
{
|
||||||
// Add links to absolute addresses identified symbolically.
|
// Add links to absolute addresses identified symbolically.
|
||||||
StaticLinkData::SymbolicLinkArray& symbolicLinks = link->symbolicLinks;
|
StaticLinkData::SymbolicLinkArray& symbolicLinks = link->symbolicLinks;
|
||||||
|
@ -486,7 +486,7 @@ ModuleGenerator::finishStaticLinkData(uint8_t* code, uint32_t codeBytes, StaticL
|
||||||
AsmJSGlobalAccess a = masm_.asmJSGlobalAccess(i);
|
AsmJSGlobalAccess a = masm_.asmJSGlobalAccess(i);
|
||||||
StaticLinkData::InternalLink inLink(StaticLinkData::InternalLink::RawPointer);
|
StaticLinkData::InternalLink inLink(StaticLinkData::InternalLink::RawPointer);
|
||||||
inLink.patchAtOffset = masm_.labelToPatchOffset(a.patchAt);
|
inLink.patchAtOffset = masm_.labelToPatchOffset(a.patchAt);
|
||||||
inLink.targetOffset = codeBytes + a.globalDataOffset;
|
inLink.targetOffset = codeLength + a.globalDataOffset;
|
||||||
if (!link->internalLinks.append(inLink))
|
if (!link->internalLinks.append(inLink))
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -495,7 +495,7 @@ ModuleGenerator::finishStaticLinkData(uint8_t* code, uint32_t codeBytes, StaticL
|
||||||
#if defined(JS_CODEGEN_X64)
|
#if defined(JS_CODEGEN_X64)
|
||||||
// Global data accesses on x64 use rip-relative addressing and thus do
|
// Global data accesses on x64 use rip-relative addressing and thus do
|
||||||
// not need patching after deserialization.
|
// not need patching after deserialization.
|
||||||
uint8_t* globalData = code + codeBytes;
|
uint8_t* globalData = code + codeLength;
|
||||||
for (size_t i = 0; i < masm_.numAsmJSGlobalAccesses(); i++) {
|
for (size_t i = 0; i < masm_.numAsmJSGlobalAccesses(); i++) {
|
||||||
AsmJSGlobalAccess a = masm_.asmJSGlobalAccess(i);
|
AsmJSGlobalAccess a = masm_.asmJSGlobalAccess(i);
|
||||||
masm_.patchAsmJSGlobalAccess(a.patchAt, code, globalData, a.globalDataOffset);
|
masm_.patchAsmJSGlobalAccess(a.patchAt, code, globalData, a.globalDataOffset);
|
||||||
|
@ -541,23 +541,20 @@ ModuleGenerator::addImport(const Sig& sig, uint32_t globalDataOffset)
|
||||||
if (!copy.clone(sig))
|
if (!copy.clone(sig))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return module_->imports.emplaceBack(Move(copy), globalDataOffset);
|
return metadata_->imports.emplaceBack(Move(copy), globalDataOffset);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ModuleGenerator::allocateGlobalBytes(uint32_t bytes, uint32_t align, uint32_t* globalDataOffset)
|
ModuleGenerator::allocateGlobalBytes(uint32_t bytes, uint32_t align, uint32_t* globalDataOffset)
|
||||||
{
|
{
|
||||||
uint32_t globalBytes = module_->globalBytes;
|
uint32_t pad = ComputeByteAlignment(globalDataLength_, align);
|
||||||
|
if (UINT32_MAX - globalDataLength_ < pad + bytes)
|
||||||
uint32_t pad = ComputeByteAlignment(globalBytes, align);
|
|
||||||
if (UINT32_MAX - globalBytes < pad + bytes)
|
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
globalBytes += pad;
|
globalDataLength_ += pad;
|
||||||
*globalDataOffset = globalBytes;
|
*globalDataOffset = globalDataLength_;
|
||||||
globalBytes += bytes;
|
globalDataLength_ += bytes;
|
||||||
|
|
||||||
module_->globalBytes = globalBytes;
|
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -600,15 +597,15 @@ ModuleGenerator::allocateGlobal(ValType type, bool isConst, uint32_t* index)
|
||||||
void
|
void
|
||||||
ModuleGenerator::initHeapUsage(HeapUsage heapUsage, uint32_t minHeapLength)
|
ModuleGenerator::initHeapUsage(HeapUsage heapUsage, uint32_t minHeapLength)
|
||||||
{
|
{
|
||||||
MOZ_ASSERT(module_->heapUsage == HeapUsage::None);
|
MOZ_ASSERT(metadata_->heapUsage == HeapUsage::None);
|
||||||
module_->heapUsage = heapUsage;
|
metadata_->heapUsage = heapUsage;
|
||||||
shared_->minHeapLength = minHeapLength;
|
shared_->minHeapLength = minHeapLength;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ModuleGenerator::usesHeap() const
|
ModuleGenerator::usesHeap() const
|
||||||
{
|
{
|
||||||
return UsesHeap(module_->heapUsage);
|
return UsesHeap(metadata_->heapUsage);
|
||||||
}
|
}
|
||||||
|
|
||||||
void
|
void
|
||||||
|
@ -663,7 +660,7 @@ ModuleGenerator::initImport(uint32_t importIndex, uint32_t sigIndex)
|
||||||
if (!allocateGlobalBytes(Module::SizeOfImportExit, sizeof(void*), &globalDataOffset))
|
if (!allocateGlobalBytes(Module::SizeOfImportExit, sizeof(void*), &globalDataOffset))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
MOZ_ASSERT(importIndex == module_->imports.length());
|
MOZ_ASSERT(importIndex == metadata_->imports.length());
|
||||||
if (!addImport(sig(sigIndex), globalDataOffset))
|
if (!addImport(sig(sigIndex), globalDataOffset))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
|
@ -677,7 +674,7 @@ ModuleGenerator::initImport(uint32_t importIndex, uint32_t sigIndex)
|
||||||
uint32_t
|
uint32_t
|
||||||
ModuleGenerator::numImports() const
|
ModuleGenerator::numImports() const
|
||||||
{
|
{
|
||||||
return module_->imports.length();
|
return metadata_->imports.length();
|
||||||
}
|
}
|
||||||
|
|
||||||
const ImportModuleGeneratorData&
|
const ImportModuleGeneratorData&
|
||||||
|
@ -700,7 +697,7 @@ ModuleGenerator::declareExport(UniqueChars fieldName, uint32_t funcIndex, uint32
|
||||||
return exportMap_->fieldsToExports.append(p->value());
|
return exportMap_->fieldsToExports.append(p->value());
|
||||||
}
|
}
|
||||||
|
|
||||||
uint32_t newExportIndex = module_->exports.length();
|
uint32_t newExportIndex = metadata_->exports.length();
|
||||||
MOZ_ASSERT(newExportIndex < MaxExports);
|
MOZ_ASSERT(newExportIndex < MaxExports);
|
||||||
|
|
||||||
if (exportIndex)
|
if (exportIndex)
|
||||||
|
@ -710,7 +707,7 @@ ModuleGenerator::declareExport(UniqueChars fieldName, uint32_t funcIndex, uint32
|
||||||
if (!copy.clone(funcSig(funcIndex)))
|
if (!copy.clone(funcSig(funcIndex)))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
return module_->exports.append(Move(copy)) &&
|
return metadata_->exports.append(Move(copy)) &&
|
||||||
funcIndexToExport_.add(p, funcIndex, newExportIndex) &&
|
funcIndexToExport_.add(p, funcIndex, newExportIndex) &&
|
||||||
exportMap_->fieldsToExports.append(newExportIndex) &&
|
exportMap_->fieldsToExports.append(newExportIndex) &&
|
||||||
exportMap_->exportFuncIndices.append(funcIndex);
|
exportMap_->exportFuncIndices.append(funcIndex);
|
||||||
|
@ -719,7 +716,7 @@ ModuleGenerator::declareExport(UniqueChars fieldName, uint32_t funcIndex, uint32
|
||||||
uint32_t
|
uint32_t
|
||||||
ModuleGenerator::numExports() const
|
ModuleGenerator::numExports() const
|
||||||
{
|
{
|
||||||
return module_->exports.length();
|
return metadata_->exports.length();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
|
@ -846,7 +843,7 @@ ModuleGenerator::finishFuncDefs()
|
||||||
for (uint32_t funcIndex = 0; funcIndex < funcIndexToCodeRange_.length(); funcIndex++)
|
for (uint32_t funcIndex = 0; funcIndex < funcIndexToCodeRange_.length(); funcIndex++)
|
||||||
MOZ_ASSERT(funcIsDefined(funcIndex));
|
MOZ_ASSERT(funcIsDefined(funcIndex));
|
||||||
|
|
||||||
module_->functionBytes = masm_.size();
|
metadata_->functionLength = masm_.size();
|
||||||
finishedFuncDefs_ = true;
|
finishedFuncDefs_ = true;
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
@ -884,41 +881,42 @@ ModuleGenerator::initSigTableElems(uint32_t sigIndex, Uint32Vector&& elemFuncInd
|
||||||
|
|
||||||
bool
|
bool
|
||||||
ModuleGenerator::finish(CacheableCharsVector&& prettyFuncNames,
|
ModuleGenerator::finish(CacheableCharsVector&& prettyFuncNames,
|
||||||
UniqueModuleData* module,
|
UniqueCodeSegment* codeSegment,
|
||||||
UniqueStaticLinkData* linkData,
|
SharedMetadata* metadata,
|
||||||
UniqueExportMap* exportMap,
|
SharedStaticLinkData* staticLinkDataOut,
|
||||||
|
SharedExportMap* exportMap,
|
||||||
SlowFunctionVector* slowFuncs)
|
SlowFunctionVector* slowFuncs)
|
||||||
{
|
{
|
||||||
MOZ_ASSERT(!activeFunc_);
|
MOZ_ASSERT(!activeFunc_);
|
||||||
MOZ_ASSERT(finishedFuncDefs_);
|
MOZ_ASSERT(finishedFuncDefs_);
|
||||||
|
|
||||||
UniqueStaticLinkData link = MakeUnique<StaticLinkData>();
|
MutableStaticLinkData staticLinkData = js_new<StaticLinkData>();
|
||||||
if (!link)
|
if (!staticLinkData)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
if (!finishCodegen(link.get()))
|
if (!finishCodegen(staticLinkData.get()))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
module_->prettyFuncNames = Move(prettyFuncNames);
|
metadata_->prettyFuncNames = Move(prettyFuncNames);
|
||||||
|
|
||||||
// Start global data on a new page so JIT code may be given independent
|
// Start global data on a new page so JIT code may be given independent
|
||||||
// protection flags. Note assumption that global data starts right after
|
// protection flags. Note assumption that global data starts right after
|
||||||
// code below.
|
// code below.
|
||||||
module_->codeBytes = AlignBytes(masm_.bytesNeeded(), gc::SystemPageSize());
|
uint32_t codeLength = AlignBytes(masm_.bytesNeeded(), gc::SystemPageSize());
|
||||||
|
|
||||||
// Inflate the global bytes up to page size so that the total bytes are a
|
// Inflate the global bytes up to page size so that the total bytes are a
|
||||||
// page size (as required by the allocator functions).
|
// page size (as required by the allocator functions).
|
||||||
module_->globalBytes = AlignBytes(module_->globalBytes, gc::SystemPageSize());
|
globalDataLength_ = AlignBytes(globalDataLength_, gc::SystemPageSize());
|
||||||
|
|
||||||
// Allocate the code (guarded by a UniquePtr until it is given to the Module).
|
// Allocate the code (guarded by a UniquePtr until it is given to the Module).
|
||||||
module_->code = AllocateCode(cx_, module_->totalBytes());
|
UniqueCodeSegment cs = CodeSegment::allocate(cx_, codeLength, globalDataLength_);
|
||||||
if (!module_->code)
|
if (!cs)
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// Delay flushing until Module::dynamicallyLink. The flush-inhibited range
|
// Delay flushing until Module::dynamicallyLink. The flush-inhibited range
|
||||||
// is set by executableCopy.
|
// is set by executableCopy.
|
||||||
AutoFlushICache afc("ModuleGenerator::finish", /* inhibit = */ true);
|
AutoFlushICache afc("ModuleGenerator::finish", /* inhibit = */ true);
|
||||||
masm_.executableCopy(module_->code.get());
|
masm_.executableCopy(cs->code());
|
||||||
|
|
||||||
// c.f. JitCode::copyFrom
|
// c.f. JitCode::copyFrom
|
||||||
MOZ_ASSERT(masm_.jumpRelocationTableBytes() == 0);
|
MOZ_ASSERT(masm_.jumpRelocationTableBytes() == 0);
|
||||||
|
@ -928,25 +926,26 @@ ModuleGenerator::finish(CacheableCharsVector&& prettyFuncNames,
|
||||||
|
|
||||||
// Convert the CallSiteAndTargetVector (needed during generation) to a
|
// Convert the CallSiteAndTargetVector (needed during generation) to a
|
||||||
// CallSiteVector (what is stored in the Module).
|
// CallSiteVector (what is stored in the Module).
|
||||||
if (!module_->callSites.appendAll(masm_.callSites()))
|
if (!metadata_->callSites.appendAll(masm_.callSites()))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// The MacroAssembler has accumulated all the heap accesses during codegen.
|
// The MacroAssembler has accumulated all the heap accesses during codegen.
|
||||||
module_->heapAccesses = masm_.extractHeapAccesses();
|
metadata_->heapAccesses = masm_.extractHeapAccesses();
|
||||||
|
|
||||||
if (!finishStaticLinkData(module_->code.get(), module_->codeBytes, link.get()))
|
if (!finishStaticLinkData(cs->code(), cs->codeLength(), staticLinkData.get()))
|
||||||
return false;
|
return false;
|
||||||
|
|
||||||
// These Vectors can get large and the excess capacity can be significant,
|
// These Vectors can get large and the excess capacity can be significant,
|
||||||
// so realloc them down to size.
|
// so realloc them down to size.
|
||||||
module_->heapAccesses.podResizeToFit();
|
metadata_->heapAccesses.podResizeToFit();
|
||||||
module_->codeRanges.podResizeToFit();
|
metadata_->codeRanges.podResizeToFit();
|
||||||
module_->callSites.podResizeToFit();
|
metadata_->callSites.podResizeToFit();
|
||||||
module_->callThunks.podResizeToFit();
|
metadata_->callThunks.podResizeToFit();
|
||||||
|
|
||||||
*module = Move(module_);
|
*codeSegment = Move(cs);
|
||||||
*linkData = Move(link);
|
*metadata = metadata_.forget();
|
||||||
*exportMap = Move(exportMap_);
|
*staticLinkDataOut = staticLinkData.forget();
|
||||||
|
*exportMap = exportMap_.forget();
|
||||||
*slowFuncs = Move(slowFuncs_);
|
*slowFuncs = Move(slowFuncs_);
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
|
@ -119,8 +119,9 @@ class MOZ_STACK_CLASS ModuleGenerator
|
||||||
jit::JitContext jcx_;
|
jit::JitContext jcx_;
|
||||||
|
|
||||||
// Data handed back to the caller in finish()
|
// Data handed back to the caller in finish()
|
||||||
UniqueModuleData module_;
|
uint32_t globalDataLength_;
|
||||||
UniqueExportMap exportMap_;
|
MutableMetadata metadata_;
|
||||||
|
MutableExportMap exportMap_;
|
||||||
SlowFunctionVector slowFuncs_;
|
SlowFunctionVector slowFuncs_;
|
||||||
|
|
||||||
// Data scoped to the ModuleGenerator's lifetime
|
// Data scoped to the ModuleGenerator's lifetime
|
||||||
|
@ -152,7 +153,7 @@ class MOZ_STACK_CLASS ModuleGenerator
|
||||||
MOZ_MUST_USE bool convertOutOfRangeBranchesToThunks();
|
MOZ_MUST_USE bool convertOutOfRangeBranchesToThunks();
|
||||||
MOZ_MUST_USE bool finishTask(IonCompileTask* task);
|
MOZ_MUST_USE bool finishTask(IonCompileTask* task);
|
||||||
MOZ_MUST_USE bool finishCodegen(StaticLinkData* link);
|
MOZ_MUST_USE bool finishCodegen(StaticLinkData* link);
|
||||||
MOZ_MUST_USE bool finishStaticLinkData(uint8_t* code, uint32_t codeBytes, StaticLinkData* link);
|
MOZ_MUST_USE bool finishStaticLinkData(uint8_t* code, uint32_t codeLength, StaticLinkData* link);
|
||||||
MOZ_MUST_USE bool addImport(const Sig& sig, uint32_t globalDataOffset);
|
MOZ_MUST_USE bool addImport(const Sig& sig, uint32_t globalDataOffset);
|
||||||
MOZ_MUST_USE bool allocateGlobalBytes(uint32_t bytes, uint32_t align, uint32_t* globalDataOff);
|
MOZ_MUST_USE bool allocateGlobalBytes(uint32_t bytes, uint32_t align, uint32_t* globalDataOff);
|
||||||
|
|
||||||
|
@ -162,8 +163,8 @@ class MOZ_STACK_CLASS ModuleGenerator
|
||||||
|
|
||||||
MOZ_MUST_USE bool init(UniqueModuleGeneratorData shared, UniqueChars filename);
|
MOZ_MUST_USE bool init(UniqueModuleGeneratorData shared, UniqueChars filename);
|
||||||
|
|
||||||
bool isAsmJS() const { return module_->kind == ModuleKind::AsmJS; }
|
bool isAsmJS() const { return metadata_->kind == ModuleKind::AsmJS; }
|
||||||
CompileArgs args() const { return module_->compileArgs; }
|
CompileArgs args() const { return metadata_->compileArgs; }
|
||||||
jit::MacroAssembler& masm() { return masm_; }
|
jit::MacroAssembler& masm() { return masm_; }
|
||||||
|
|
||||||
// Heap usage:
|
// Heap usage:
|
||||||
|
@ -207,13 +208,14 @@ class MOZ_STACK_CLASS ModuleGenerator
|
||||||
void initSigTableElems(uint32_t sigIndex, Uint32Vector&& elemFuncIndices);
|
void initSigTableElems(uint32_t sigIndex, Uint32Vector&& elemFuncIndices);
|
||||||
void bumpMinHeapLength(uint32_t newMinHeapLength);
|
void bumpMinHeapLength(uint32_t newMinHeapLength);
|
||||||
|
|
||||||
// Return a ModuleData object which may be used to construct a Module, the
|
// Return a Metadata object which may be used to construct a Module, the
|
||||||
// StaticLinkData required to call Module::staticallyLink, and the list of
|
// StaticLinkData required to call Module::staticallyLink, and the list of
|
||||||
// functions that took a long time to compile.
|
// functions that took a long time to compile.
|
||||||
MOZ_MUST_USE bool finish(CacheableCharsVector&& prettyFuncNames,
|
MOZ_MUST_USE bool finish(CacheableCharsVector&& prettyFuncNames,
|
||||||
UniqueModuleData* module,
|
UniqueCodeSegment* codeSegment,
|
||||||
UniqueStaticLinkData* staticLinkData,
|
SharedMetadata* metadata,
|
||||||
UniqueExportMap* exportMap,
|
SharedStaticLinkData* staticLinkData,
|
||||||
|
SharedExportMap* exportMap,
|
||||||
SlowFunctionVector* slowFuncs);
|
SlowFunctionVector* slowFuncs);
|
||||||
};
|
};
|
||||||
|
|
||||||
|
|
|
@ -34,7 +34,6 @@
|
||||||
# include "jit/PerfSpewer.h"
|
# include "jit/PerfSpewer.h"
|
||||||
#endif
|
#endif
|
||||||
#include "jit/BaselineJIT.h"
|
#include "jit/BaselineJIT.h"
|
||||||
#include "jit/ExecutableAllocator.h"
|
|
||||||
#include "jit/JitCommon.h"
|
#include "jit/JitCommon.h"
|
||||||
#include "js/MemoryMetrics.h"
|
#include "js/MemoryMetrics.h"
|
||||||
#include "vm/StringBuffer.h"
|
#include "vm/StringBuffer.h"
|
||||||
|
@ -51,7 +50,6 @@
|
||||||
using namespace js;
|
using namespace js;
|
||||||
using namespace js::jit;
|
using namespace js::jit;
|
||||||
using namespace js::wasm;
|
using namespace js::wasm;
|
||||||
using mozilla::Atomic;
|
|
||||||
using mozilla::BinarySearch;
|
using mozilla::BinarySearch;
|
||||||
using mozilla::MakeEnumeratedRange;
|
using mozilla::MakeEnumeratedRange;
|
||||||
using mozilla::PodCopy;
|
using mozilla::PodCopy;
|
||||||
|
@ -59,40 +57,6 @@ using mozilla::PodZero;
|
||||||
using mozilla::Swap;
|
using mozilla::Swap;
|
||||||
using JS::GenericNaN;
|
using JS::GenericNaN;
|
||||||
|
|
||||||
// Limit the number of concurrent wasm code allocations per process. Note that
|
|
||||||
// on Linux, the real maximum is ~32k, as each module requires 2 maps (RW/RX),
|
|
||||||
// and the kernel's default max_map_count is ~65k.
|
|
||||||
static Atomic<uint32_t> wasmCodeAllocations(0);
|
|
||||||
static const uint32_t MaxWasmCodeAllocations = 16384;
|
|
||||||
|
|
||||||
UniqueCodePtr
|
|
||||||
wasm::AllocateCode(ExclusiveContext* cx, size_t bytes)
|
|
||||||
{
|
|
||||||
// Allocate RW memory. DynamicallyLinkModule will reprotect the code as RX.
|
|
||||||
unsigned permissions =
|
|
||||||
ExecutableAllocator::initialProtectionFlags(ExecutableAllocator::Writable);
|
|
||||||
|
|
||||||
void* p = nullptr;
|
|
||||||
if (wasmCodeAllocations++ < MaxWasmCodeAllocations)
|
|
||||||
p = AllocateExecutableMemory(nullptr, bytes, permissions, "asm-js-code", gc::SystemPageSize());
|
|
||||||
if (!p) {
|
|
||||||
wasmCodeAllocations--;
|
|
||||||
ReportOutOfMemory(cx);
|
|
||||||
}
|
|
||||||
|
|
||||||
return UniqueCodePtr((uint8_t*)p, CodeDeleter(bytes));
|
|
||||||
}
|
|
||||||
|
|
||||||
void
|
|
||||||
CodeDeleter::operator()(uint8_t* p)
|
|
||||||
{
|
|
||||||
MOZ_ASSERT(wasmCodeAllocations > 0);
|
|
||||||
wasmCodeAllocations--;
|
|
||||||
|
|
||||||
MOZ_ASSERT(bytes_ != 0);
|
|
||||||
DeallocateExecutableMemory(p, bytes_, gc::SystemPageSize());
|
|
||||||
}
|
|
||||||
|
|
||||||
#if defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
#if defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
||||||
// On MIPS, CodeLabels are instruction immediates so InternalLinks only
|
// On MIPS, CodeLabels are instruction immediates so InternalLinks only
|
||||||
// patch instruction immediates.
|
// patch instruction immediates.
|
||||||
|
@ -149,16 +113,6 @@ StaticLinkData::SymbolicLinkArray::deserialize(ExclusiveContext* cx, const uint8
|
||||||
return cursor;
|
return cursor;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
|
||||||
StaticLinkData::SymbolicLinkArray::clone(JSContext* cx, SymbolicLinkArray* out) const
|
|
||||||
{
|
|
||||||
for (auto imm : MakeEnumeratedRange(SymbolicAddress::Limit)) {
|
|
||||||
if (!ClonePodVector(cx, (*this)[imm], &(*out)[imm]))
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
StaticLinkData::SymbolicLinkArray::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
StaticLinkData::SymbolicLinkArray::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
{
|
{
|
||||||
|
@ -191,13 +145,6 @@ StaticLinkData::FuncPtrTable::deserialize(ExclusiveContext* cx, const uint8_t* c
|
||||||
return cursor;
|
return cursor;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
|
||||||
StaticLinkData::FuncPtrTable::clone(JSContext* cx, FuncPtrTable* out) const
|
|
||||||
{
|
|
||||||
out->globalDataOffset = globalDataOffset;
|
|
||||||
return ClonePodVector(cx, elemOffsets, &out->elemOffsets);
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
StaticLinkData::FuncPtrTable::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
StaticLinkData::FuncPtrTable::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
{
|
{
|
||||||
|
@ -233,15 +180,6 @@ StaticLinkData::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
||||||
return cursor;
|
return cursor;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
|
||||||
StaticLinkData::clone(JSContext* cx, StaticLinkData* out) const
|
|
||||||
{
|
|
||||||
out->pod = pod;
|
|
||||||
return ClonePodVector(cx, internalLinks, &out->internalLinks) &&
|
|
||||||
symbolicLinks.clone(cx, &out->symbolicLinks) &&
|
|
||||||
CloneVector(cx, funcPtrTables, &out->funcPtrTables);
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
StaticLinkData::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
StaticLinkData::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
{
|
{
|
||||||
|
@ -250,227 +188,6 @@ StaticLinkData::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
SizeOfVectorExcludingThis(funcPtrTables, mallocSizeOf);
|
SizeOfVectorExcludingThis(funcPtrTables, mallocSizeOf);
|
||||||
}
|
}
|
||||||
|
|
||||||
static size_t
|
|
||||||
SerializedSigSize(const Sig& sig)
|
|
||||||
{
|
|
||||||
return sizeof(ExprType) +
|
|
||||||
SerializedPodVectorSize(sig.args());
|
|
||||||
}
|
|
||||||
|
|
||||||
static uint8_t*
|
|
||||||
SerializeSig(uint8_t* cursor, const Sig& sig)
|
|
||||||
{
|
|
||||||
cursor = WriteScalar<ExprType>(cursor, sig.ret());
|
|
||||||
cursor = SerializePodVector(cursor, sig.args());
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
static const uint8_t*
|
|
||||||
DeserializeSig(ExclusiveContext* cx, const uint8_t* cursor, Sig* sig)
|
|
||||||
{
|
|
||||||
ExprType ret;
|
|
||||||
cursor = ReadScalar<ExprType>(cursor, &ret);
|
|
||||||
|
|
||||||
ValTypeVector args;
|
|
||||||
cursor = DeserializePodVector(cx, cursor, &args);
|
|
||||||
if (!cursor)
|
|
||||||
return nullptr;
|
|
||||||
|
|
||||||
*sig = Sig(Move(args), ret);
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
static size_t
|
|
||||||
SizeOfSigExcludingThis(const Sig& sig, MallocSizeOf mallocSizeOf)
|
|
||||||
{
|
|
||||||
return sig.args().sizeOfExcludingThis(mallocSizeOf);
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
|
||||||
Export::serializedSize() const
|
|
||||||
{
|
|
||||||
return SerializedSigSize(sig_) +
|
|
||||||
sizeof(pod);
|
|
||||||
}
|
|
||||||
|
|
||||||
uint8_t*
|
|
||||||
Export::serialize(uint8_t* cursor) const
|
|
||||||
{
|
|
||||||
cursor = SerializeSig(cursor, sig_);
|
|
||||||
cursor = WriteBytes(cursor, &pod, sizeof(pod));
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
const uint8_t*
|
|
||||||
Export::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
|
||||||
{
|
|
||||||
(cursor = DeserializeSig(cx, cursor, &sig_)) &&
|
|
||||||
(cursor = ReadBytes(cursor, &pod, sizeof(pod)));
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
Export::clone(JSContext* cx, Export* out) const
|
|
||||||
{
|
|
||||||
out->pod = pod;
|
|
||||||
return out->sig_.clone(sig_);
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
|
||||||
Export::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
|
||||||
{
|
|
||||||
return SizeOfSigExcludingThis(sig_, mallocSizeOf);
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
|
||||||
Import::serializedSize() const
|
|
||||||
{
|
|
||||||
return SerializedSigSize(sig_) +
|
|
||||||
sizeof(pod);
|
|
||||||
}
|
|
||||||
|
|
||||||
uint8_t*
|
|
||||||
Import::serialize(uint8_t* cursor) const
|
|
||||||
{
|
|
||||||
cursor = SerializeSig(cursor, sig_);
|
|
||||||
cursor = WriteBytes(cursor, &pod, sizeof(pod));
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
const uint8_t*
|
|
||||||
Import::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
|
||||||
{
|
|
||||||
(cursor = DeserializeSig(cx, cursor, &sig_)) &&
|
|
||||||
(cursor = ReadBytes(cursor, &pod, sizeof(pod)));
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
Import::clone(JSContext* cx, Import* out) const
|
|
||||||
{
|
|
||||||
out->pod = pod;
|
|
||||||
return out->sig_.clone(sig_);
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
|
||||||
Import::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
|
||||||
{
|
|
||||||
return SizeOfSigExcludingThis(sig_, mallocSizeOf);
|
|
||||||
}
|
|
||||||
|
|
||||||
CodeRange::CodeRange(Kind kind, Offsets offsets)
|
|
||||||
: begin_(offsets.begin),
|
|
||||||
profilingReturn_(0),
|
|
||||||
end_(offsets.end),
|
|
||||||
funcIndex_(0),
|
|
||||||
funcLineOrBytecode_(0),
|
|
||||||
funcBeginToTableEntry_(0),
|
|
||||||
funcBeginToTableProfilingJump_(0),
|
|
||||||
funcBeginToNonProfilingEntry_(0),
|
|
||||||
funcProfilingJumpToProfilingReturn_(0),
|
|
||||||
funcProfilingEpilogueToProfilingReturn_(0),
|
|
||||||
kind_(kind)
|
|
||||||
{
|
|
||||||
MOZ_ASSERT(begin_ <= end_);
|
|
||||||
MOZ_ASSERT(kind_ == Entry || kind_ == Inline || kind_ == CallThunk);
|
|
||||||
}
|
|
||||||
|
|
||||||
CodeRange::CodeRange(Kind kind, ProfilingOffsets offsets)
|
|
||||||
: begin_(offsets.begin),
|
|
||||||
profilingReturn_(offsets.profilingReturn),
|
|
||||||
end_(offsets.end),
|
|
||||||
funcIndex_(0),
|
|
||||||
funcLineOrBytecode_(0),
|
|
||||||
funcBeginToTableEntry_(0),
|
|
||||||
funcBeginToTableProfilingJump_(0),
|
|
||||||
funcBeginToNonProfilingEntry_(0),
|
|
||||||
funcProfilingJumpToProfilingReturn_(0),
|
|
||||||
funcProfilingEpilogueToProfilingReturn_(0),
|
|
||||||
kind_(kind)
|
|
||||||
{
|
|
||||||
MOZ_ASSERT(begin_ < profilingReturn_);
|
|
||||||
MOZ_ASSERT(profilingReturn_ < end_);
|
|
||||||
MOZ_ASSERT(kind_ == ImportJitExit || kind_ == ImportInterpExit);
|
|
||||||
}
|
|
||||||
|
|
||||||
CodeRange::CodeRange(uint32_t funcIndex, uint32_t funcLineOrBytecode, FuncOffsets offsets)
|
|
||||||
: begin_(offsets.begin),
|
|
||||||
profilingReturn_(offsets.profilingReturn),
|
|
||||||
end_(offsets.end),
|
|
||||||
funcIndex_(funcIndex),
|
|
||||||
funcLineOrBytecode_(funcLineOrBytecode),
|
|
||||||
funcBeginToTableEntry_(offsets.tableEntry - begin_),
|
|
||||||
funcBeginToTableProfilingJump_(offsets.tableProfilingJump - begin_),
|
|
||||||
funcBeginToNonProfilingEntry_(offsets.nonProfilingEntry - begin_),
|
|
||||||
funcProfilingJumpToProfilingReturn_(profilingReturn_ - offsets.profilingJump),
|
|
||||||
funcProfilingEpilogueToProfilingReturn_(profilingReturn_ - offsets.profilingEpilogue),
|
|
||||||
kind_(Function)
|
|
||||||
{
|
|
||||||
MOZ_ASSERT(begin_ < profilingReturn_);
|
|
||||||
MOZ_ASSERT(profilingReturn_ < end_);
|
|
||||||
MOZ_ASSERT(funcBeginToTableEntry_ == offsets.tableEntry - begin_);
|
|
||||||
MOZ_ASSERT(funcBeginToTableProfilingJump_ == offsets.tableProfilingJump - begin_);
|
|
||||||
MOZ_ASSERT(funcBeginToNonProfilingEntry_ == offsets.nonProfilingEntry - begin_);
|
|
||||||
MOZ_ASSERT(funcProfilingJumpToProfilingReturn_ == profilingReturn_ - offsets.profilingJump);
|
|
||||||
MOZ_ASSERT(funcProfilingEpilogueToProfilingReturn_ == profilingReturn_ - offsets.profilingEpilogue);
|
|
||||||
}
|
|
||||||
|
|
||||||
static size_t
|
|
||||||
NullableStringLength(const char* chars)
|
|
||||||
{
|
|
||||||
return chars ? strlen(chars) : 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
|
||||||
CacheableChars::serializedSize() const
|
|
||||||
{
|
|
||||||
return sizeof(uint32_t) + NullableStringLength(get());
|
|
||||||
}
|
|
||||||
|
|
||||||
uint8_t*
|
|
||||||
CacheableChars::serialize(uint8_t* cursor) const
|
|
||||||
{
|
|
||||||
uint32_t length = NullableStringLength(get());
|
|
||||||
cursor = WriteBytes(cursor, &length, sizeof(uint32_t));
|
|
||||||
cursor = WriteBytes(cursor, get(), length);
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
const uint8_t*
|
|
||||||
CacheableChars::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
|
||||||
{
|
|
||||||
uint32_t length;
|
|
||||||
cursor = ReadBytes(cursor, &length, sizeof(uint32_t));
|
|
||||||
|
|
||||||
reset(cx->pod_calloc<char>(length + 1));
|
|
||||||
if (!get())
|
|
||||||
return nullptr;
|
|
||||||
|
|
||||||
cursor = ReadBytes(cursor, get(), length);
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
CacheableChars::clone(JSContext* cx, CacheableChars* out) const
|
|
||||||
{
|
|
||||||
uint32_t length = NullableStringLength(get());
|
|
||||||
|
|
||||||
UniqueChars chars(cx->pod_calloc<char>(length + 1));
|
|
||||||
if (!chars)
|
|
||||||
return false;
|
|
||||||
|
|
||||||
PodCopy(chars.get(), get(), length);
|
|
||||||
|
|
||||||
*out = Move(chars);
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
|
||||||
CacheableChars::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
|
||||||
{
|
|
||||||
return mallocSizeOf(get());
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
ExportMap::serializedSize() const
|
ExportMap::serializedSize() const
|
||||||
{
|
{
|
||||||
|
@ -497,14 +214,6 @@ ExportMap::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
||||||
return cursor;
|
return cursor;
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
|
||||||
ExportMap::clone(JSContext* cx, ExportMap* map) const
|
|
||||||
{
|
|
||||||
return CloneVector(cx, fieldNames, &map->fieldNames) &&
|
|
||||||
ClonePodVector(cx, fieldsToExports, &map->fieldsToExports) &&
|
|
||||||
ClonePodVector(cx, exportFuncIndices, &map->exportFuncIndices);
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
size_t
|
||||||
ExportMap::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
ExportMap::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
{
|
{
|
||||||
|
@ -513,92 +222,6 @@ ExportMap::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
||||||
exportFuncIndices.sizeOfExcludingThis(mallocSizeOf);
|
exportFuncIndices.sizeOfExcludingThis(mallocSizeOf);
|
||||||
}
|
}
|
||||||
|
|
||||||
size_t
|
|
||||||
ModuleData::serializedSize() const
|
|
||||||
{
|
|
||||||
return sizeof(pod()) +
|
|
||||||
codeBytes +
|
|
||||||
SerializedVectorSize(imports) +
|
|
||||||
SerializedVectorSize(exports) +
|
|
||||||
SerializedPodVectorSize(heapAccesses) +
|
|
||||||
SerializedPodVectorSize(codeRanges) +
|
|
||||||
SerializedPodVectorSize(callSites) +
|
|
||||||
SerializedPodVectorSize(callThunks) +
|
|
||||||
SerializedVectorSize(prettyFuncNames) +
|
|
||||||
filename.serializedSize();
|
|
||||||
}
|
|
||||||
|
|
||||||
uint8_t*
|
|
||||||
ModuleData::serialize(uint8_t* cursor) const
|
|
||||||
{
|
|
||||||
cursor = WriteBytes(cursor, &pod(), sizeof(pod()));
|
|
||||||
cursor = WriteBytes(cursor, code.get(), codeBytes);
|
|
||||||
cursor = SerializeVector(cursor, imports);
|
|
||||||
cursor = SerializeVector(cursor, exports);
|
|
||||||
cursor = SerializePodVector(cursor, heapAccesses);
|
|
||||||
cursor = SerializePodVector(cursor, codeRanges);
|
|
||||||
cursor = SerializePodVector(cursor, callSites);
|
|
||||||
cursor = SerializePodVector(cursor, callThunks);
|
|
||||||
cursor = SerializeVector(cursor, prettyFuncNames);
|
|
||||||
cursor = filename.serialize(cursor);
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* static */ const uint8_t*
|
|
||||||
ModuleData::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
|
|
||||||
{
|
|
||||||
cursor = ReadBytes(cursor, &pod(), sizeof(pod()));
|
|
||||||
|
|
||||||
code = AllocateCode(cx, totalBytes());
|
|
||||||
if (!code)
|
|
||||||
return nullptr;
|
|
||||||
cursor = ReadBytes(cursor, code.get(), codeBytes);
|
|
||||||
|
|
||||||
(cursor = DeserializeVector(cx, cursor, &imports)) &&
|
|
||||||
(cursor = DeserializeVector(cx, cursor, &exports)) &&
|
|
||||||
(cursor = DeserializePodVector(cx, cursor, &heapAccesses)) &&
|
|
||||||
(cursor = DeserializePodVector(cx, cursor, &codeRanges)) &&
|
|
||||||
(cursor = DeserializePodVector(cx, cursor, &callSites)) &&
|
|
||||||
(cursor = DeserializePodVector(cx, cursor, &callThunks)) &&
|
|
||||||
(cursor = DeserializeVector(cx, cursor, &prettyFuncNames)) &&
|
|
||||||
(cursor = filename.deserialize(cx, cursor));
|
|
||||||
return cursor;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool
|
|
||||||
ModuleData::clone(JSContext* cx, ModuleData* out) const
|
|
||||||
{
|
|
||||||
out->pod() = pod();
|
|
||||||
|
|
||||||
out->code = AllocateCode(cx, totalBytes());
|
|
||||||
if (!out->code)
|
|
||||||
return false;
|
|
||||||
memcpy(out->code.get(), code.get(), codeBytes);
|
|
||||||
|
|
||||||
return CloneVector(cx, imports, &out->imports) &&
|
|
||||||
CloneVector(cx, exports, &out->exports) &&
|
|
||||||
ClonePodVector(cx, heapAccesses, &out->heapAccesses) &&
|
|
||||||
ClonePodVector(cx, codeRanges, &out->codeRanges) &&
|
|
||||||
ClonePodVector(cx, callSites, &out->callSites) &&
|
|
||||||
ClonePodVector(cx, callThunks, &out->callThunks) &&
|
|
||||||
CloneVector(cx, prettyFuncNames, &out->prettyFuncNames) &&
|
|
||||||
filename.clone(cx, &out->filename);
|
|
||||||
}
|
|
||||||
|
|
||||||
size_t
|
|
||||||
ModuleData::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
|
|
||||||
{
|
|
||||||
// Module::addSizeOfMisc takes care of code and global memory.
|
|
||||||
return SizeOfVectorExcludingThis(imports, mallocSizeOf) +
|
|
||||||
SizeOfVectorExcludingThis(exports, mallocSizeOf) +
|
|
||||||
heapAccesses.sizeOfExcludingThis(mallocSizeOf) +
|
|
||||||
codeRanges.sizeOfExcludingThis(mallocSizeOf) +
|
|
||||||
callSites.sizeOfExcludingThis(mallocSizeOf) +
|
|
||||||
callThunks.sizeOfExcludingThis(mallocSizeOf) +
|
|
||||||
SizeOfVectorExcludingThis(prettyFuncNames, mallocSizeOf) +
|
|
||||||
filename.sizeOfExcludingThis(mallocSizeOf);
|
|
||||||
}
|
|
||||||
|
|
||||||
uint8_t*
|
uint8_t*
|
||||||
Module::rawHeapPtr() const
|
Module::rawHeapPtr() const
|
||||||
{
|
{
|
||||||
|
@ -634,7 +257,7 @@ Module::specializeToHeap(ArrayBufferObjectMaybeShared* heap)
|
||||||
// i.e. ptr > heapLength - data-type-byte-size - offset. data-type-byte-size
|
// i.e. ptr > heapLength - data-type-byte-size - offset. data-type-byte-size
|
||||||
// and offset are already included in the addend so we
|
// and offset are already included in the addend so we
|
||||||
// just have to add the heap length here.
|
// just have to add the heap length here.
|
||||||
for (const HeapAccess& access : module_->heapAccesses) {
|
for (const HeapAccess& access : metadata_->heapAccesses) {
|
||||||
if (access.hasLengthCheck())
|
if (access.hasLengthCheck())
|
||||||
X86Encoding::AddInt32(access.patchLengthAt(code()), heapLength);
|
X86Encoding::AddInt32(access.patchLengthAt(code()), heapLength);
|
||||||
void* addr = access.patchHeapPtrImmAt(code());
|
void* addr = access.patchHeapPtrImmAt(code());
|
||||||
|
@ -650,14 +273,14 @@ Module::specializeToHeap(ArrayBufferObjectMaybeShared* heap)
|
||||||
// checks at the right places. All accesses that have been recorded are the
|
// checks at the right places. All accesses that have been recorded are the
|
||||||
// only ones that need bound checks (see also
|
// only ones that need bound checks (see also
|
||||||
// CodeGeneratorX64::visitAsmJS{Load,Store,CompareExchange,Exchange,AtomicBinop}Heap)
|
// CodeGeneratorX64::visitAsmJS{Load,Store,CompareExchange,Exchange,AtomicBinop}Heap)
|
||||||
for (const HeapAccess& access : module_->heapAccesses) {
|
for (const HeapAccess& access : metadata_->heapAccesses) {
|
||||||
// See comment above for x86 codegen.
|
// See comment above for x86 codegen.
|
||||||
if (access.hasLengthCheck())
|
if (access.hasLengthCheck())
|
||||||
X86Encoding::AddInt32(access.patchLengthAt(code()), heapLength);
|
X86Encoding::AddInt32(access.patchLengthAt(code()), heapLength);
|
||||||
}
|
}
|
||||||
#elif defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || \
|
#elif defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || \
|
||||||
defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
|
||||||
for (const HeapAccess& access : module_->heapAccesses)
|
for (const HeapAccess& access : metadata_->heapAccesses)
|
||||||
Assembler::UpdateBoundsCheck(heapLength, (Instruction*)(access.insnOffset() + code()));
|
Assembler::UpdateBoundsCheck(heapLength, (Instruction*)(access.insnOffset() + code()));
|
||||||
#endif
|
#endif
|
||||||
|
|
||||||
|
@ -677,8 +300,8 @@ Module::despecializeFromHeap(ArrayBufferObjectMaybeShared* heap)
|
||||||
#if defined(JS_CODEGEN_X86)
|
#if defined(JS_CODEGEN_X86)
|
||||||
uint32_t heapLength = heap->byteLength();
|
uint32_t heapLength = heap->byteLength();
|
||||||
uint8_t* ptrBase = heap->dataPointerEither().unwrap(/*safe - used for value*/);
|
uint8_t* ptrBase = heap->dataPointerEither().unwrap(/*safe - used for value*/);
|
||||||
for (unsigned i = 0; i < module_->heapAccesses.length(); i++) {
|
for (unsigned i = 0; i < metadata_->heapAccesses.length(); i++) {
|
||||||
const HeapAccess& access = module_->heapAccesses[i];
|
const HeapAccess& access = metadata_->heapAccesses[i];
|
||||||
if (access.hasLengthCheck())
|
if (access.hasLengthCheck())
|
||||||
X86Encoding::AddInt32(access.patchLengthAt(code()), -heapLength);
|
X86Encoding::AddInt32(access.patchLengthAt(code()), -heapLength);
|
||||||
void* addr = access.patchHeapPtrImmAt(code());
|
void* addr = access.patchHeapPtrImmAt(code());
|
||||||
|
@ -688,8 +311,8 @@ Module::despecializeFromHeap(ArrayBufferObjectMaybeShared* heap)
|
||||||
}
|
}
|
||||||
#elif defined(JS_CODEGEN_X64)
|
#elif defined(JS_CODEGEN_X64)
|
||||||
uint32_t heapLength = heap->byteLength();
|
uint32_t heapLength = heap->byteLength();
|
||||||
for (unsigned i = 0; i < module_->heapAccesses.length(); i++) {
|
for (unsigned i = 0; i < metadata_->heapAccesses.length(); i++) {
|
||||||
const HeapAccess& access = module_->heapAccesses[i];
|
const HeapAccess& access = metadata_->heapAccesses[i];
|
||||||
if (access.hasLengthCheck())
|
if (access.hasLengthCheck())
|
||||||
X86Encoding::AddInt32(access.patchLengthAt(code()), -heapLength);
|
X86Encoding::AddInt32(access.patchLengthAt(code()), -heapLength);
|
||||||
}
|
}
|
||||||
|
@ -712,7 +335,7 @@ Module::sendCodeRangesToProfiler(JSContext* cx)
|
||||||
if (!enabled)
|
if (!enabled)
|
||||||
return true;
|
return true;
|
||||||
|
|
||||||
for (const CodeRange& codeRange : module_->codeRanges) {
|
for (const CodeRange& codeRange : metadata_->codeRanges) {
|
||||||
if (!codeRange.isFunction())
|
if (!codeRange.isFunction())
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
@ -732,7 +355,7 @@ Module::sendCodeRangesToProfiler(JSContext* cx)
|
||||||
|
|
||||||
#ifdef JS_ION_PERF
|
#ifdef JS_ION_PERF
|
||||||
if (PerfFuncEnabled()) {
|
if (PerfFuncEnabled()) {
|
||||||
const char* file = module_->filename.get();
|
const char* file = metadata_->filename.get();
|
||||||
unsigned line = codeRange.funcLineOrBytecode();
|
unsigned line = codeRange.funcLineOrBytecode();
|
||||||
unsigned column = 0;
|
unsigned column = 0;
|
||||||
writePerfSpewerAsmJSFunctionMap(start, size, file, line, column, name);
|
writePerfSpewerAsmJSFunctionMap(start, size, file, line, column, name);
|
||||||
|
@ -775,7 +398,7 @@ Module::setProfilingEnabled(JSContext* cx, bool enabled)
|
||||||
// do it now since, once we start sampling, we'll be in a signal-handing
|
// do it now since, once we start sampling, we'll be in a signal-handing
|
||||||
// context where we cannot malloc.
|
// context where we cannot malloc.
|
||||||
if (enabled) {
|
if (enabled) {
|
||||||
for (const CodeRange& codeRange : module_->codeRanges) {
|
for (const CodeRange& codeRange : metadata_->codeRanges) {
|
||||||
if (!codeRange.isFunction())
|
if (!codeRange.isFunction())
|
||||||
continue;
|
continue;
|
||||||
|
|
||||||
|
@ -786,7 +409,7 @@ Module::setProfilingEnabled(JSContext* cx, bool enabled)
|
||||||
|
|
||||||
UniqueChars label(JS_smprintf("%s (%s:%u)",
|
UniqueChars label(JS_smprintf("%s (%s:%u)",
|
||||||
funcName,
|
funcName,
|
||||||
module_->filename.get(),
|
metadata_->filename.get(),
|
||||||
codeRange.funcLineOrBytecode()));
|
codeRange.funcLineOrBytecode()));
|
||||||
if (!label) {
|
if (!label) {
|
||||||
ReportOutOfMemory(cx);
|
ReportOutOfMemory(cx);
|
||||||
|
@ -805,17 +428,17 @@ Module::setProfilingEnabled(JSContext* cx, bool enabled)
|
||||||
|
|
||||||
// Patch callsites and returns to execute profiling prologues/epilogues.
|
// Patch callsites and returns to execute profiling prologues/epilogues.
|
||||||
{
|
{
|
||||||
AutoWritableJitCode awjc(cx->runtime(), code(), codeBytes());
|
AutoWritableJitCode awjc(cx->runtime(), code(), codeLength());
|
||||||
AutoFlushICache afc("Module::setProfilingEnabled");
|
AutoFlushICache afc("Module::setProfilingEnabled");
|
||||||
AutoFlushICache::setRange(uintptr_t(code()), codeBytes());
|
AutoFlushICache::setRange(uintptr_t(code()), codeLength());
|
||||||
|
|
||||||
for (const CallSite& callSite : module_->callSites)
|
for (const CallSite& callSite : metadata_->callSites)
|
||||||
ToggleProfiling(*this, callSite, enabled);
|
ToggleProfiling(*this, callSite, enabled);
|
||||||
|
|
||||||
for (const CallThunk& callThunk : module_->callThunks)
|
for (const CallThunk& callThunk : metadata_->callThunks)
|
||||||
ToggleProfiling(*this, callThunk, enabled);
|
ToggleProfiling(*this, callThunk, enabled);
|
||||||
|
|
||||||
for (const CodeRange& codeRange : module_->codeRanges)
|
for (const CodeRange& codeRange : metadata_->codeRanges)
|
||||||
ToggleProfiling(*this, codeRange, enabled);
|
ToggleProfiling(*this, codeRange, enabled);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -853,14 +476,17 @@ Module::clone(JSContext* cx, const StaticLinkData& link, Module* out) const
|
||||||
{
|
{
|
||||||
MOZ_ASSERT(dynamicallyLinked_);
|
MOZ_ASSERT(dynamicallyLinked_);
|
||||||
|
|
||||||
// The out->module_ field was already cloned and initialized when 'out' was
|
// The out->metadata_ field was already cloned and initialized when 'out' was
|
||||||
// constructed. This function should clone the rest.
|
// constructed. This function should clone the rest.
|
||||||
MOZ_ASSERT(out->module_);
|
MOZ_ASSERT(out->metadata_);
|
||||||
|
|
||||||
|
// Copy the profiling state over too since the cloned machine code
|
||||||
|
// implicitly brings the profiling mode.
|
||||||
out->profilingEnabled_ = profilingEnabled_;
|
out->profilingEnabled_ = profilingEnabled_;
|
||||||
|
for (const CacheableChars& label : funcLabels_) {
|
||||||
if (!CloneVector(cx, funcLabels_, &out->funcLabels_))
|
if (!out->funcLabels_.emplaceBack(DuplicateString(label.get())))
|
||||||
return false;
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
// Put the symbolic links back to -1 so PatchDataWithValueCheck assertions
|
// Put the symbolic links back to -1 so PatchDataWithValueCheck assertions
|
||||||
|
@ -869,9 +495,9 @@ Module::clone(JSContext* cx, const StaticLinkData& link, Module* out) const
|
||||||
void* callee = AddressOf(imm, cx);
|
void* callee = AddressOf(imm, cx);
|
||||||
const Uint32Vector& offsets = link.symbolicLinks[imm];
|
const Uint32Vector& offsets = link.symbolicLinks[imm];
|
||||||
for (uint32_t offset : offsets) {
|
for (uint32_t offset : offsets) {
|
||||||
jit::Assembler::PatchDataWithValueCheck(jit::CodeLocationLabel(out->code() + offset),
|
Assembler::PatchDataWithValueCheck(CodeLocationLabel(out->code() + offset),
|
||||||
jit::PatchedImmPtr((void*)-1),
|
PatchedImmPtr((void*)-1),
|
||||||
jit::PatchedImmPtr(callee));
|
PatchedImmPtr(callee));
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
#endif
|
#endif
|
||||||
|
@ -884,9 +510,9 @@ Module::clone(JSContext* cx, const StaticLinkData& link, Module* out) const
|
||||||
return true;
|
return true;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
Module::Module(UniqueCodeSegment codeSegment, const Metadata& metadata)
|
||||||
Module::Module(UniqueModuleData module)
|
: codeSegment_(Move(codeSegment)),
|
||||||
: module_(Move(module)),
|
metadata_(&metadata),
|
||||||
staticallyLinked_(false),
|
staticallyLinked_(false),
|
||||||
interrupt_(nullptr),
|
interrupt_(nullptr),
|
||||||
outOfBounds_(nullptr),
|
outOfBounds_(nullptr),
|
||||||
|
@ -898,7 +524,7 @@ Module::Module(UniqueModuleData module)
|
||||||
|
|
||||||
#ifdef DEBUG
|
#ifdef DEBUG
|
||||||
uint32_t lastEnd = 0;
|
uint32_t lastEnd = 0;
|
||||||
for (const CodeRange& cr : module_->codeRanges) {
|
for (const CodeRange& cr : metadata_->codeRanges) {
|
||||||
MOZ_ASSERT(cr.begin() >= lastEnd);
|
MOZ_ASSERT(cr.begin() >= lastEnd);
|
||||||
lastEnd = cr.end();
|
lastEnd = cr.end();
|
||||||
}
|
}
|
||||||
|
@ -935,11 +561,11 @@ Module::readBarrier()
|
||||||
/* virtual */ void
|
/* virtual */ void
|
||||||
Module::addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code, size_t* data)
|
Module::addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code, size_t* data)
|
||||||
{
|
{
|
||||||
*code += codeBytes();
|
*code += codeSegment_->codeLength();
|
||||||
*data += mallocSizeOf(this) +
|
*data += mallocSizeOf(this) +
|
||||||
globalBytes() +
|
codeSegment_->globalDataLength() +
|
||||||
mallocSizeOf(module_.get()) +
|
mallocSizeOf(metadata_.get()) +
|
||||||
module_->sizeOfExcludingThis(mallocSizeOf) +
|
metadata_->sizeOfExcludingThis(mallocSizeOf) +
|
||||||
source_.sizeOfExcludingThis(mallocSizeOf) +
|
source_.sizeOfExcludingThis(mallocSizeOf) +
|
||||||
funcPtrTables_.sizeOfExcludingThis(mallocSizeOf) +
|
funcPtrTables_.sizeOfExcludingThis(mallocSizeOf) +
|
||||||
SizeOfVectorExcludingThis(funcLabels_, mallocSizeOf);
|
SizeOfVectorExcludingThis(funcLabels_, mallocSizeOf);
|
||||||
|
@ -964,13 +590,13 @@ Module::displayURL() const
|
||||||
bool
|
bool
|
||||||
Module::containsFunctionPC(void* pc) const
|
Module::containsFunctionPC(void* pc) const
|
||||||
{
|
{
|
||||||
return pc >= code() && pc < (code() + module_->functionBytes);
|
return pc >= code() && pc < (code() + metadata_->functionLength);
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
Module::containsCodePC(void* pc) const
|
Module::containsCodePC(void* pc) const
|
||||||
{
|
{
|
||||||
return pc >= code() && pc < (code() + codeBytes());
|
return pc >= code() && pc < (code() + codeLength());
|
||||||
}
|
}
|
||||||
|
|
||||||
struct CallSiteRetAddrOffset
|
struct CallSiteRetAddrOffset
|
||||||
|
@ -987,13 +613,13 @@ Module::lookupCallSite(void* returnAddress) const
|
||||||
{
|
{
|
||||||
uint32_t target = ((uint8_t*)returnAddress) - code();
|
uint32_t target = ((uint8_t*)returnAddress) - code();
|
||||||
size_t lowerBound = 0;
|
size_t lowerBound = 0;
|
||||||
size_t upperBound = module_->callSites.length();
|
size_t upperBound = metadata_->callSites.length();
|
||||||
|
|
||||||
size_t match;
|
size_t match;
|
||||||
if (!BinarySearch(CallSiteRetAddrOffset(module_->callSites), lowerBound, upperBound, target, &match))
|
if (!BinarySearch(CallSiteRetAddrOffset(metadata_->callSites), lowerBound, upperBound, target, &match))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
return &module_->callSites[match];
|
return &metadata_->callSites[match];
|
||||||
}
|
}
|
||||||
|
|
||||||
const CodeRange*
|
const CodeRange*
|
||||||
|
@ -1001,13 +627,13 @@ Module::lookupCodeRange(void* pc) const
|
||||||
{
|
{
|
||||||
CodeRange::PC target((uint8_t*)pc - code());
|
CodeRange::PC target((uint8_t*)pc - code());
|
||||||
size_t lowerBound = 0;
|
size_t lowerBound = 0;
|
||||||
size_t upperBound = module_->codeRanges.length();
|
size_t upperBound = metadata_->codeRanges.length();
|
||||||
|
|
||||||
size_t match;
|
size_t match;
|
||||||
if (!BinarySearch(module_->codeRanges, lowerBound, upperBound, target, &match))
|
if (!BinarySearch(metadata_->codeRanges, lowerBound, upperBound, target, &match))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
return &module_->codeRanges[match];
|
return &metadata_->codeRanges[match];
|
||||||
}
|
}
|
||||||
|
|
||||||
struct HeapAccessOffset
|
struct HeapAccessOffset
|
||||||
|
@ -1026,13 +652,13 @@ Module::lookupHeapAccess(void* pc) const
|
||||||
|
|
||||||
uint32_t target = ((uint8_t*)pc) - code();
|
uint32_t target = ((uint8_t*)pc) - code();
|
||||||
size_t lowerBound = 0;
|
size_t lowerBound = 0;
|
||||||
size_t upperBound = module_->heapAccesses.length();
|
size_t upperBound = metadata_->heapAccesses.length();
|
||||||
|
|
||||||
size_t match;
|
size_t match;
|
||||||
if (!BinarySearch(HeapAccessOffset(module_->heapAccesses), lowerBound, upperBound, target, &match))
|
if (!BinarySearch(HeapAccessOffset(metadata_->heapAccesses), lowerBound, upperBound, target, &match))
|
||||||
return nullptr;
|
return nullptr;
|
||||||
|
|
||||||
return &module_->heapAccesses[match];
|
return &metadata_->heapAccesses[match];
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
bool
|
||||||
|
@ -1047,7 +673,7 @@ Module::staticallyLink(ExclusiveContext* cx, const StaticLinkData& linkData)
|
||||||
JitContext jcx(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread()));
|
JitContext jcx(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread()));
|
||||||
MOZ_ASSERT(IsCompilingAsmJS());
|
MOZ_ASSERT(IsCompilingAsmJS());
|
||||||
AutoFlushICache afc("Module::staticallyLink", /* inhibit = */ true);
|
AutoFlushICache afc("Module::staticallyLink", /* inhibit = */ true);
|
||||||
AutoFlushICache::setRange(uintptr_t(code()), codeBytes());
|
AutoFlushICache::setRange(uintptr_t(code()), codeLength());
|
||||||
|
|
||||||
interrupt_ = code() + linkData.pod.interruptOffset;
|
interrupt_ = code() + linkData.pod.interruptOffset;
|
||||||
outOfBounds_ = code() + linkData.pod.outOfBoundsOffset;
|
outOfBounds_ = code() + linkData.pod.outOfBoundsOffset;
|
||||||
|
@ -1226,7 +852,7 @@ Module::dynamicallyLink(JSContext* cx,
|
||||||
JitContext jcx(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread()));
|
JitContext jcx(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread()));
|
||||||
MOZ_ASSERT(IsCompilingAsmJS());
|
MOZ_ASSERT(IsCompilingAsmJS());
|
||||||
AutoFlushICache afc("Module::dynamicallyLink");
|
AutoFlushICache afc("Module::dynamicallyLink");
|
||||||
AutoFlushICache::setRange(uintptr_t(code()), codeBytes());
|
AutoFlushICache::setRange(uintptr_t(code()), codeLength());
|
||||||
|
|
||||||
// Initialize imports with actual imported values.
|
// Initialize imports with actual imported values.
|
||||||
MOZ_ASSERT(importArgs.length() == imports().length());
|
MOZ_ASSERT(importArgs.length() == imports().length());
|
||||||
|
@ -1242,8 +868,8 @@ Module::dynamicallyLink(JSContext* cx,
|
||||||
if (usesHeap())
|
if (usesHeap())
|
||||||
specializeToHeap(heap);
|
specializeToHeap(heap);
|
||||||
|
|
||||||
// See AllocateCode comment above.
|
// See CodeSegment::allocate comment above.
|
||||||
if (!ExecutableAllocator::makeExecutable(code(), codeBytes())) {
|
if (!ExecutableAllocator::makeExecutable(code(), codeLength())) {
|
||||||
ReportOutOfMemory(cx);
|
ReportOutOfMemory(cx);
|
||||||
return false;
|
return false;
|
||||||
}
|
}
|
||||||
|
@ -1701,9 +1327,9 @@ Module::callImport_f64(int32_t importIndex, int32_t argc, uint64_t* argv)
|
||||||
const char*
|
const char*
|
||||||
Module::maybePrettyFuncName(uint32_t funcIndex) const
|
Module::maybePrettyFuncName(uint32_t funcIndex) const
|
||||||
{
|
{
|
||||||
if (funcIndex >= module_->prettyFuncNames.length())
|
if (funcIndex >= metadata_->prettyFuncNames.length())
|
||||||
return nullptr;
|
return nullptr;
|
||||||
return module_->prettyFuncNames[funcIndex].get();
|
return metadata_->prettyFuncNames[funcIndex].get();
|
||||||
}
|
}
|
||||||
|
|
||||||
const char*
|
const char*
|
||||||
|
@ -1833,16 +1459,13 @@ WasmModuleObject::create(ExclusiveContext* cx)
|
||||||
return &obj->as<WasmModuleObject>();
|
return &obj->as<WasmModuleObject>();
|
||||||
}
|
}
|
||||||
|
|
||||||
bool
|
void
|
||||||
WasmModuleObject::init(Module* module)
|
WasmModuleObject::init(Module& module)
|
||||||
{
|
{
|
||||||
MOZ_ASSERT(is<WasmModuleObject>());
|
MOZ_ASSERT(is<WasmModuleObject>());
|
||||||
MOZ_ASSERT(!hasModule());
|
MOZ_ASSERT(!hasModule());
|
||||||
if (!module)
|
module.setOwner(this);
|
||||||
return false;
|
setReservedSlot(MODULE_SLOT, PrivateValue(&module));
|
||||||
module->setOwner(this);
|
|
||||||
setReservedSlot(MODULE_SLOT, PrivateValue(module));
|
|
||||||
return true;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Module&
|
Module&
|
||||||
|
|
|
@ -21,7 +21,7 @@
|
||||||
|
|
||||||
#include "mozilla/LinkedList.h"
|
#include "mozilla/LinkedList.h"
|
||||||
|
|
||||||
#include "asmjs/WasmTypes.h"
|
#include "asmjs/WasmCode.h"
|
||||||
#include "gc/Barrier.h"
|
#include "gc/Barrier.h"
|
||||||
#include "vm/MallocProvider.h"
|
#include "vm/MallocProvider.h"
|
||||||
#include "vm/NativeObject.h"
|
#include "vm/NativeObject.h"
|
||||||
|
@ -37,8 +37,11 @@ namespace wasm {
|
||||||
|
|
||||||
// The StaticLinkData contains all the metadata necessary to perform
|
// The StaticLinkData contains all the metadata necessary to perform
|
||||||
// Module::staticallyLink but is not necessary afterwards.
|
// Module::staticallyLink but is not necessary afterwards.
|
||||||
|
//
|
||||||
|
// StaticLinkData is built incrementing by ModuleGenerator and then shared
|
||||||
|
// immutably between modules.
|
||||||
|
|
||||||
struct StaticLinkData
|
struct StaticLinkData : RefCounted<StaticLinkData>
|
||||||
{
|
{
|
||||||
struct InternalLink {
|
struct InternalLink {
|
||||||
enum Kind {
|
enum Kind {
|
||||||
|
@ -84,244 +87,8 @@ struct StaticLinkData
|
||||||
WASM_DECLARE_SERIALIZABLE(StaticLinkData)
|
WASM_DECLARE_SERIALIZABLE(StaticLinkData)
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef UniquePtr<StaticLinkData> UniqueStaticLinkData;
|
typedef RefPtr<StaticLinkData> MutableStaticLinkData;
|
||||||
|
typedef RefPtr<const StaticLinkData> SharedStaticLinkData;
|
||||||
// An Export represents a single function inside a wasm Module that has been
|
|
||||||
// exported one or more times.
|
|
||||||
|
|
||||||
class Export
|
|
||||||
{
|
|
||||||
Sig sig_;
|
|
||||||
struct CacheablePod {
|
|
||||||
uint32_t stubOffset_;
|
|
||||||
} pod;
|
|
||||||
|
|
||||||
public:
|
|
||||||
Export() = default;
|
|
||||||
explicit Export(Sig&& sig)
|
|
||||||
: sig_(Move(sig))
|
|
||||||
{
|
|
||||||
pod.stubOffset_ = UINT32_MAX;
|
|
||||||
}
|
|
||||||
Export(Export&& rhs)
|
|
||||||
: sig_(Move(rhs.sig_)),
|
|
||||||
pod(rhs.pod)
|
|
||||||
{}
|
|
||||||
|
|
||||||
void initStubOffset(uint32_t stubOffset) {
|
|
||||||
MOZ_ASSERT(pod.stubOffset_ == UINT32_MAX);
|
|
||||||
pod.stubOffset_ = stubOffset;
|
|
||||||
}
|
|
||||||
|
|
||||||
uint32_t stubOffset() const {
|
|
||||||
return pod.stubOffset_;
|
|
||||||
}
|
|
||||||
const Sig& sig() const {
|
|
||||||
return sig_;
|
|
||||||
}
|
|
||||||
|
|
||||||
WASM_DECLARE_SERIALIZABLE(Export)
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef Vector<Export, 0, SystemAllocPolicy> ExportVector;
|
|
||||||
|
|
||||||
// An Import describes a wasm module import. Currently, only functions can be
|
|
||||||
// imported in wasm. A function import includes the signature used within the
|
|
||||||
// module to call it.
|
|
||||||
|
|
||||||
class Import
|
|
||||||
{
|
|
||||||
Sig sig_;
|
|
||||||
struct CacheablePod {
|
|
||||||
uint32_t exitGlobalDataOffset_;
|
|
||||||
uint32_t interpExitCodeOffset_;
|
|
||||||
uint32_t jitExitCodeOffset_;
|
|
||||||
} pod;
|
|
||||||
|
|
||||||
public:
|
|
||||||
Import() {}
|
|
||||||
Import(Import&& rhs) : sig_(Move(rhs.sig_)), pod(rhs.pod) {}
|
|
||||||
Import(Sig&& sig, uint32_t exitGlobalDataOffset)
|
|
||||||
: sig_(Move(sig))
|
|
||||||
{
|
|
||||||
pod.exitGlobalDataOffset_ = exitGlobalDataOffset;
|
|
||||||
pod.interpExitCodeOffset_ = 0;
|
|
||||||
pod.jitExitCodeOffset_ = 0;
|
|
||||||
}
|
|
||||||
|
|
||||||
void initInterpExitOffset(uint32_t off) {
|
|
||||||
MOZ_ASSERT(!pod.interpExitCodeOffset_);
|
|
||||||
pod.interpExitCodeOffset_ = off;
|
|
||||||
}
|
|
||||||
void initJitExitOffset(uint32_t off) {
|
|
||||||
MOZ_ASSERT(!pod.jitExitCodeOffset_);
|
|
||||||
pod.jitExitCodeOffset_ = off;
|
|
||||||
}
|
|
||||||
|
|
||||||
const Sig& sig() const {
|
|
||||||
return sig_;
|
|
||||||
}
|
|
||||||
uint32_t exitGlobalDataOffset() const {
|
|
||||||
return pod.exitGlobalDataOffset_;
|
|
||||||
}
|
|
||||||
uint32_t interpExitCodeOffset() const {
|
|
||||||
return pod.interpExitCodeOffset_;
|
|
||||||
}
|
|
||||||
uint32_t jitExitCodeOffset() const {
|
|
||||||
return pod.jitExitCodeOffset_;
|
|
||||||
}
|
|
||||||
|
|
||||||
WASM_DECLARE_SERIALIZABLE(Import)
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef Vector<Import, 0, SystemAllocPolicy> ImportVector;
|
|
||||||
|
|
||||||
// A CodeRange describes a single contiguous range of code within a wasm
|
|
||||||
// module's code segment. A CodeRange describes what the code does and, for
|
|
||||||
// function bodies, the name and source coordinates of the function.
|
|
||||||
|
|
||||||
class CodeRange
|
|
||||||
{
|
|
||||||
public:
|
|
||||||
enum Kind { Function, Entry, ImportJitExit, ImportInterpExit, Inline, CallThunk };
|
|
||||||
|
|
||||||
private:
|
|
||||||
// All fields are treated as cacheable POD:
|
|
||||||
uint32_t begin_;
|
|
||||||
uint32_t profilingReturn_;
|
|
||||||
uint32_t end_;
|
|
||||||
uint32_t funcIndex_;
|
|
||||||
uint32_t funcLineOrBytecode_;
|
|
||||||
uint8_t funcBeginToTableEntry_;
|
|
||||||
uint8_t funcBeginToTableProfilingJump_;
|
|
||||||
uint8_t funcBeginToNonProfilingEntry_;
|
|
||||||
uint8_t funcProfilingJumpToProfilingReturn_;
|
|
||||||
uint8_t funcProfilingEpilogueToProfilingReturn_;
|
|
||||||
Kind kind_ : 8;
|
|
||||||
|
|
||||||
public:
|
|
||||||
CodeRange() = default;
|
|
||||||
CodeRange(Kind kind, Offsets offsets);
|
|
||||||
CodeRange(Kind kind, ProfilingOffsets offsets);
|
|
||||||
CodeRange(uint32_t funcIndex, uint32_t lineOrBytecode, FuncOffsets offsets);
|
|
||||||
|
|
||||||
// All CodeRanges have a begin and end.
|
|
||||||
|
|
||||||
uint32_t begin() const {
|
|
||||||
return begin_;
|
|
||||||
}
|
|
||||||
uint32_t end() const {
|
|
||||||
return end_;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Other fields are only available for certain CodeRange::Kinds.
|
|
||||||
|
|
||||||
Kind kind() const {
|
|
||||||
return kind_;
|
|
||||||
}
|
|
||||||
|
|
||||||
bool isFunction() const {
|
|
||||||
return kind() == Function;
|
|
||||||
}
|
|
||||||
bool isImportExit() const {
|
|
||||||
return kind() == ImportJitExit || kind() == ImportInterpExit;
|
|
||||||
}
|
|
||||||
bool isInline() const {
|
|
||||||
return kind() == Inline;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Every CodeRange except entry and inline stubs has a profiling return
|
|
||||||
// which is used for asynchronous profiling to determine the frame pointer.
|
|
||||||
|
|
||||||
uint32_t profilingReturn() const {
|
|
||||||
MOZ_ASSERT(isFunction() || isImportExit());
|
|
||||||
return profilingReturn_;
|
|
||||||
}
|
|
||||||
|
|
||||||
// Functions have offsets which allow patching to selectively execute
|
|
||||||
// profiling prologues/epilogues.
|
|
||||||
|
|
||||||
uint32_t funcProfilingEntry() const {
|
|
||||||
MOZ_ASSERT(isFunction());
|
|
||||||
return begin();
|
|
||||||
}
|
|
||||||
uint32_t funcTableEntry() const {
|
|
||||||
MOZ_ASSERT(isFunction());
|
|
||||||
return begin_ + funcBeginToTableEntry_;
|
|
||||||
}
|
|
||||||
uint32_t funcTableProfilingJump() const {
|
|
||||||
MOZ_ASSERT(isFunction());
|
|
||||||
return begin_ + funcBeginToTableProfilingJump_;
|
|
||||||
}
|
|
||||||
uint32_t funcNonProfilingEntry() const {
|
|
||||||
MOZ_ASSERT(isFunction());
|
|
||||||
return begin_ + funcBeginToNonProfilingEntry_;
|
|
||||||
}
|
|
||||||
uint32_t funcProfilingJump() const {
|
|
||||||
MOZ_ASSERT(isFunction());
|
|
||||||
return profilingReturn_ - funcProfilingJumpToProfilingReturn_;
|
|
||||||
}
|
|
||||||
uint32_t funcProfilingEpilogue() const {
|
|
||||||
MOZ_ASSERT(isFunction());
|
|
||||||
return profilingReturn_ - funcProfilingEpilogueToProfilingReturn_;
|
|
||||||
}
|
|
||||||
uint32_t funcIndex() const {
|
|
||||||
MOZ_ASSERT(isFunction());
|
|
||||||
return funcIndex_;
|
|
||||||
}
|
|
||||||
uint32_t funcLineOrBytecode() const {
|
|
||||||
MOZ_ASSERT(isFunction());
|
|
||||||
return funcLineOrBytecode_;
|
|
||||||
}
|
|
||||||
|
|
||||||
// A sorted array of CodeRanges can be looked up via BinarySearch and PC.
|
|
||||||
|
|
||||||
struct PC {
|
|
||||||
size_t offset;
|
|
||||||
explicit PC(size_t offset) : offset(offset) {}
|
|
||||||
bool operator==(const CodeRange& rhs) const {
|
|
||||||
return offset >= rhs.begin() && offset < rhs.end();
|
|
||||||
}
|
|
||||||
bool operator<(const CodeRange& rhs) const {
|
|
||||||
return offset < rhs.begin();
|
|
||||||
}
|
|
||||||
};
|
|
||||||
};
|
|
||||||
|
|
||||||
WASM_DECLARE_POD_VECTOR(CodeRange, CodeRangeVector)
|
|
||||||
|
|
||||||
// A CallThunk describes the offset and target of thunks so that they may be
|
|
||||||
// patched at runtime when profiling is toggled. Thunks are emitted to connect
|
|
||||||
// callsites that are too far away from callees to fit in a single call
|
|
||||||
// instruction's relative offset.
|
|
||||||
|
|
||||||
struct CallThunk
|
|
||||||
{
|
|
||||||
uint32_t offset;
|
|
||||||
union {
|
|
||||||
uint32_t funcIndex;
|
|
||||||
uint32_t codeRangeIndex;
|
|
||||||
} u;
|
|
||||||
|
|
||||||
CallThunk(uint32_t offset, uint32_t funcIndex) : offset(offset) { u.funcIndex = funcIndex; }
|
|
||||||
CallThunk() = default;
|
|
||||||
};
|
|
||||||
|
|
||||||
WASM_DECLARE_POD_VECTOR(CallThunk, CallThunkVector)
|
|
||||||
|
|
||||||
// CacheableChars is used to cacheably store UniqueChars.
|
|
||||||
|
|
||||||
struct CacheableChars : UniqueChars
|
|
||||||
{
|
|
||||||
CacheableChars() = default;
|
|
||||||
explicit CacheableChars(char* ptr) : UniqueChars(ptr) {}
|
|
||||||
MOZ_IMPLICIT CacheableChars(UniqueChars&& rhs) : UniqueChars(Move(rhs)) {}
|
|
||||||
CacheableChars(CacheableChars&& rhs) : UniqueChars(Move(rhs)) {}
|
|
||||||
void operator=(CacheableChars&& rhs) { UniqueChars::operator=(Move(rhs)); }
|
|
||||||
WASM_DECLARE_SERIALIZABLE(CacheableChars)
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef Vector<CacheableChars, 0, SystemAllocPolicy> CacheableCharsVector;
|
|
||||||
|
|
||||||
// The ExportMap describes how Exports are mapped to the fields of the export
|
// The ExportMap describes how Exports are mapped to the fields of the export
|
||||||
// object. This allows a single Export to be used in multiple fields.
|
// object. This allows a single Export to be used in multiple fields.
|
||||||
|
@ -332,10 +99,13 @@ typedef Vector<CacheableChars, 0, SystemAllocPolicy> CacheableCharsVector;
|
||||||
// ExportMap's exportFuncIndices vector).
|
// ExportMap's exportFuncIndices vector).
|
||||||
// Lastly, the 'exportFuncIndices' vector provides, for each exported function,
|
// Lastly, the 'exportFuncIndices' vector provides, for each exported function,
|
||||||
// the internal index of the function.
|
// the internal index of the function.
|
||||||
|
//
|
||||||
|
// The ExportMap is built incrementally by ModuleGenerator and then shared
|
||||||
|
// immutably between modules.
|
||||||
|
|
||||||
static const uint32_t MemoryExport = UINT32_MAX;
|
static const uint32_t MemoryExport = UINT32_MAX;
|
||||||
|
|
||||||
struct ExportMap
|
struct ExportMap : RefCounted<ExportMap>
|
||||||
{
|
{
|
||||||
CacheableCharsVector fieldNames;
|
CacheableCharsVector fieldNames;
|
||||||
Uint32Vector fieldsToExports;
|
Uint32Vector fieldsToExports;
|
||||||
|
@ -344,80 +114,8 @@ struct ExportMap
|
||||||
WASM_DECLARE_SERIALIZABLE(ExportMap)
|
WASM_DECLARE_SERIALIZABLE(ExportMap)
|
||||||
};
|
};
|
||||||
|
|
||||||
typedef UniquePtr<ExportMap> UniqueExportMap;
|
typedef RefPtr<ExportMap> MutableExportMap;
|
||||||
|
typedef RefPtr<const ExportMap> SharedExportMap;
|
||||||
// A UniqueCodePtr owns allocated executable code. Code passed to the Module
|
|
||||||
// constructor must be allocated via AllocateCode.
|
|
||||||
|
|
||||||
class CodeDeleter
|
|
||||||
{
|
|
||||||
uint32_t bytes_;
|
|
||||||
public:
|
|
||||||
CodeDeleter() : bytes_(0) {}
|
|
||||||
explicit CodeDeleter(uint32_t bytes) : bytes_(bytes) {}
|
|
||||||
void operator()(uint8_t* p);
|
|
||||||
};
|
|
||||||
typedef UniquePtr<uint8_t, CodeDeleter> UniqueCodePtr;
|
|
||||||
|
|
||||||
UniqueCodePtr
|
|
||||||
AllocateCode(ExclusiveContext* cx, size_t bytes);
|
|
||||||
|
|
||||||
// A wasm module can either use no heap, a unshared heap (ArrayBuffer) or shared
|
|
||||||
// heap (SharedArrayBuffer).
|
|
||||||
|
|
||||||
enum class HeapUsage
|
|
||||||
{
|
|
||||||
None = false,
|
|
||||||
Unshared = 1,
|
|
||||||
Shared = 2
|
|
||||||
};
|
|
||||||
|
|
||||||
static inline bool
|
|
||||||
UsesHeap(HeapUsage heapUsage)
|
|
||||||
{
|
|
||||||
return bool(heapUsage);
|
|
||||||
}
|
|
||||||
|
|
||||||
// ModuleCacheablePod holds the trivially-memcpy()able serializable portion of
|
|
||||||
// ModuleData.
|
|
||||||
|
|
||||||
struct ModuleCacheablePod
|
|
||||||
{
|
|
||||||
uint32_t functionBytes;
|
|
||||||
uint32_t codeBytes;
|
|
||||||
uint32_t globalBytes;
|
|
||||||
ModuleKind kind;
|
|
||||||
HeapUsage heapUsage;
|
|
||||||
CompileArgs compileArgs;
|
|
||||||
|
|
||||||
uint32_t totalBytes() const { return codeBytes + globalBytes; }
|
|
||||||
};
|
|
||||||
|
|
||||||
// ModuleData holds the guts of a Module. ModuleData is mutably built up by
|
|
||||||
// ModuleGenerator and then handed over to the Module constructor in finish(),
|
|
||||||
// where it is stored immutably.
|
|
||||||
|
|
||||||
struct ModuleData : ModuleCacheablePod
|
|
||||||
{
|
|
||||||
ModuleData() : loadedFromCache(false) { mozilla::PodZero(&pod()); }
|
|
||||||
ModuleCacheablePod& pod() { return *this; }
|
|
||||||
const ModuleCacheablePod& pod() const { return *this; }
|
|
||||||
|
|
||||||
UniqueCodePtr code;
|
|
||||||
ImportVector imports;
|
|
||||||
ExportVector exports;
|
|
||||||
HeapAccessVector heapAccesses;
|
|
||||||
CodeRangeVector codeRanges;
|
|
||||||
CallSiteVector callSites;
|
|
||||||
CallThunkVector callThunks;
|
|
||||||
CacheableCharsVector prettyFuncNames;
|
|
||||||
CacheableChars filename;
|
|
||||||
bool loadedFromCache;
|
|
||||||
|
|
||||||
WASM_DECLARE_SERIALIZABLE(ModuleData);
|
|
||||||
};
|
|
||||||
|
|
||||||
typedef UniquePtr<ModuleData> UniqueModuleData;
|
|
||||||
|
|
||||||
// Module represents a compiled WebAssembly module which lives until the last
|
// Module represents a compiled WebAssembly module which lives until the last
|
||||||
// reference to any exported functions is dropped. Modules must be wrapped by a
|
// reference to any exported functions is dropped. Modules must be wrapped by a
|
||||||
|
@ -441,7 +139,6 @@ typedef UniquePtr<ModuleData> UniqueModuleData;
|
||||||
|
|
||||||
class Module : public mozilla::LinkedListElement<Module>
|
class Module : public mozilla::LinkedListElement<Module>
|
||||||
{
|
{
|
||||||
typedef UniquePtr<const ModuleData> UniqueConstModuleData;
|
|
||||||
struct ImportExit {
|
struct ImportExit {
|
||||||
void* code;
|
void* code;
|
||||||
jit::BaselineScript* baselineScript;
|
jit::BaselineScript* baselineScript;
|
||||||
|
@ -467,7 +164,8 @@ class Module : public mozilla::LinkedListElement<Module>
|
||||||
typedef GCPtr<WasmModuleObject*> ModuleObjectPtr;
|
typedef GCPtr<WasmModuleObject*> ModuleObjectPtr;
|
||||||
|
|
||||||
// Initialized when constructed:
|
// Initialized when constructed:
|
||||||
const UniqueConstModuleData module_;
|
const UniqueCodeSegment codeSegment_;
|
||||||
|
const SharedMetadata metadata_;
|
||||||
|
|
||||||
// Initialized during staticallyLink:
|
// Initialized during staticallyLink:
|
||||||
bool staticallyLinked_;
|
bool staticallyLinked_;
|
||||||
|
@ -509,7 +207,8 @@ class Module : public mozilla::LinkedListElement<Module>
|
||||||
friend void* wasm::AddressOf(SymbolicAddress, ExclusiveContext*);
|
friend void* wasm::AddressOf(SymbolicAddress, ExclusiveContext*);
|
||||||
|
|
||||||
protected:
|
protected:
|
||||||
const ModuleData& base() const { return *module_; }
|
const CodeSegment& codeSegment() const { return *codeSegment_; }
|
||||||
|
const Metadata& metadata() const { return *metadata_; }
|
||||||
MOZ_MUST_USE bool clone(JSContext* cx, const StaticLinkData& link, Module* clone) const;
|
MOZ_MUST_USE bool clone(JSContext* cx, const StaticLinkData& link, Module* clone) const;
|
||||||
|
|
||||||
public:
|
public:
|
||||||
|
@ -517,7 +216,7 @@ class Module : public mozilla::LinkedListElement<Module>
|
||||||
static const unsigned OffsetOfImportExitFun = offsetof(ImportExit, fun);
|
static const unsigned OffsetOfImportExitFun = offsetof(ImportExit, fun);
|
||||||
static const unsigned SizeOfEntryArg = sizeof(EntryArg);
|
static const unsigned SizeOfEntryArg = sizeof(EntryArg);
|
||||||
|
|
||||||
explicit Module(UniqueModuleData module);
|
explicit Module(UniqueCodeSegment codeSegment, const Metadata& metadata);
|
||||||
virtual ~Module();
|
virtual ~Module();
|
||||||
virtual void trace(JSTracer* trc);
|
virtual void trace(JSTracer* trc);
|
||||||
virtual void readBarrier();
|
virtual void readBarrier();
|
||||||
|
@ -528,19 +227,18 @@ class Module : public mozilla::LinkedListElement<Module>
|
||||||
|
|
||||||
void setSource(Bytes&& source) { source_ = Move(source); }
|
void setSource(Bytes&& source) { source_ = Move(source); }
|
||||||
|
|
||||||
uint8_t* code() const { return module_->code.get(); }
|
uint8_t* code() const { return codeSegment_->code(); }
|
||||||
uint32_t codeBytes() const { return module_->codeBytes; }
|
uint32_t codeLength() const { return codeSegment_->codeLength(); }
|
||||||
uint8_t* globalData() const { return code() + module_->codeBytes; }
|
uint8_t* globalData() const { return codeSegment_->globalData(); }
|
||||||
uint32_t globalBytes() const { return module_->globalBytes; }
|
uint32_t globalDataLength() const { return codeSegment_->globalDataLength(); }
|
||||||
HeapUsage heapUsage() const { return module_->heapUsage; }
|
HeapUsage heapUsage() const { return metadata_->heapUsage; }
|
||||||
bool usesHeap() const { return UsesHeap(module_->heapUsage); }
|
bool usesHeap() const { return UsesHeap(metadata_->heapUsage); }
|
||||||
bool hasSharedHeap() const { return module_->heapUsage == HeapUsage::Shared; }
|
bool hasSharedHeap() const { return metadata_->heapUsage == HeapUsage::Shared; }
|
||||||
CompileArgs compileArgs() const { return module_->compileArgs; }
|
CompileArgs compileArgs() const { return metadata_->compileArgs; }
|
||||||
const ImportVector& imports() const { return module_->imports; }
|
const ImportVector& imports() const { return metadata_->imports; }
|
||||||
const ExportVector& exports() const { return module_->exports; }
|
const ExportVector& exports() const { return metadata_->exports; }
|
||||||
const CodeRangeVector& codeRanges() const { return module_->codeRanges; }
|
const CodeRangeVector& codeRanges() const { return metadata_->codeRanges; }
|
||||||
const char* filename() const { return module_->filename.get(); }
|
const char* filename() const { return metadata_->filename.get(); }
|
||||||
bool loadedFromCache() const { return module_->loadedFromCache; }
|
|
||||||
bool staticallyLinked() const { return staticallyLinked_; }
|
bool staticallyLinked() const { return staticallyLinked_; }
|
||||||
bool dynamicallyLinked() const { return dynamicallyLinked_; }
|
bool dynamicallyLinked() const { return dynamicallyLinked_; }
|
||||||
|
|
||||||
|
@ -549,14 +247,14 @@ class Module : public mozilla::LinkedListElement<Module>
|
||||||
// semantics. The asAsmJS() member may be used as a checked downcast when
|
// semantics. The asAsmJS() member may be used as a checked downcast when
|
||||||
// isAsmJS() is true.
|
// isAsmJS() is true.
|
||||||
|
|
||||||
bool isAsmJS() const { return module_->kind == ModuleKind::AsmJS; }
|
bool isAsmJS() const { return metadata_->kind == ModuleKind::AsmJS; }
|
||||||
AsmJSModule& asAsmJS() { MOZ_ASSERT(isAsmJS()); return *(AsmJSModule*)this; }
|
AsmJSModule& asAsmJS() { MOZ_ASSERT(isAsmJS()); return *(AsmJSModule*)this; }
|
||||||
const AsmJSModule& asAsmJS() const { MOZ_ASSERT(isAsmJS()); return *(const AsmJSModule*)this; }
|
const AsmJSModule& asAsmJS() const { MOZ_ASSERT(isAsmJS()); return *(const AsmJSModule*)this; }
|
||||||
virtual bool mutedErrors() const;
|
virtual bool mutedErrors() const;
|
||||||
virtual const char16_t* displayURL() const;
|
virtual const char16_t* displayURL() const;
|
||||||
virtual ScriptSource* maybeScriptSource() const { return nullptr; }
|
virtual ScriptSource* maybeScriptSource() const { return nullptr; }
|
||||||
|
|
||||||
// The range [0, functionBytes) is a subrange of [0, codeBytes) that
|
// The range [0, functionLength) is a subrange of [0, codeLength) that
|
||||||
// contains only function body code, not the stub code. This distinction is
|
// contains only function body code, not the stub code. This distinction is
|
||||||
// used by the async interrupt handler to only interrupt when the pc is in
|
// used by the async interrupt handler to only interrupt when the pc is in
|
||||||
// function code which, in turn, simplifies reasoning about how stubs
|
// function code which, in turn, simplifies reasoning about how stubs
|
||||||
|
@ -666,7 +364,7 @@ class WasmModuleObject : public NativeObject
|
||||||
public:
|
public:
|
||||||
static const unsigned RESERVED_SLOTS = 1;
|
static const unsigned RESERVED_SLOTS = 1;
|
||||||
static WasmModuleObject* create(ExclusiveContext* cx);
|
static WasmModuleObject* create(ExclusiveContext* cx);
|
||||||
MOZ_MUST_USE bool init(wasm::Module* module);
|
void init(wasm::Module& module);
|
||||||
wasm::Module& module() const;
|
wasm::Module& module() const;
|
||||||
void addSizeOfMisc(mozilla::MallocSizeOf mallocSizeOf, size_t* code, size_t* data);
|
void addSizeOfMisc(mozilla::MallocSizeOf mallocSizeOf, size_t* code, size_t* data);
|
||||||
static const Class class_;
|
static const Class class_;
|
||||||
|
|
|
@ -164,20 +164,6 @@ DeserializeVector(ExclusiveContext* cx, const uint8_t* cursor,
|
||||||
return cursor;
|
return cursor;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class T, size_t N>
|
|
||||||
static inline MOZ_MUST_USE bool
|
|
||||||
CloneVector(JSContext* cx, const mozilla::Vector<T, N, SystemAllocPolicy>& in,
|
|
||||||
mozilla::Vector<T, N, SystemAllocPolicy>* out)
|
|
||||||
{
|
|
||||||
if (!out->resize(in.length()))
|
|
||||||
return false;
|
|
||||||
for (size_t i = 0; i < in.length(); i++) {
|
|
||||||
if (!in[i].clone(cx, &(*out)[i]))
|
|
||||||
return false;
|
|
||||||
}
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
template <class T, size_t N>
|
template <class T, size_t N>
|
||||||
static inline size_t
|
static inline size_t
|
||||||
SizeOfVectorExcludingThis(const mozilla::Vector<T, N, SystemAllocPolicy>& vec,
|
SizeOfVectorExcludingThis(const mozilla::Vector<T, N, SystemAllocPolicy>& vec,
|
||||||
|
@ -219,17 +205,6 @@ DeserializePodVector(ExclusiveContext* cx, const uint8_t* cursor,
|
||||||
return cursor;
|
return cursor;
|
||||||
}
|
}
|
||||||
|
|
||||||
template <class T, size_t N>
|
|
||||||
static inline MOZ_MUST_USE bool
|
|
||||||
ClonePodVector(JSContext* cx, const mozilla::Vector<T, N, SystemAllocPolicy>& in,
|
|
||||||
mozilla::Vector<T, N, SystemAllocPolicy>* out)
|
|
||||||
{
|
|
||||||
if (!out->resize(in.length()))
|
|
||||||
return false;
|
|
||||||
mozilla::PodCopy(out->begin(), in.begin(), in.length());
|
|
||||||
return true;
|
|
||||||
}
|
|
||||||
|
|
||||||
static inline MOZ_MUST_USE bool
|
static inline MOZ_MUST_USE bool
|
||||||
GetCPUID(uint32_t* cpuId)
|
GetCPUID(uint32_t* cpuId)
|
||||||
{
|
{
|
||||||
|
|
|
@ -23,6 +23,8 @@
|
||||||
#include "mozilla/HashFunctions.h"
|
#include "mozilla/HashFunctions.h"
|
||||||
#include "mozilla/Maybe.h"
|
#include "mozilla/Maybe.h"
|
||||||
#include "mozilla/Move.h"
|
#include "mozilla/Move.h"
|
||||||
|
#include "mozilla/RefCounted.h"
|
||||||
|
#include "mozilla/RefPtr.h"
|
||||||
|
|
||||||
#include "NamespaceImports.h"
|
#include "NamespaceImports.h"
|
||||||
|
|
||||||
|
@ -44,6 +46,7 @@ using mozilla::EnumeratedArray;
|
||||||
using mozilla::Maybe;
|
using mozilla::Maybe;
|
||||||
using mozilla::Move;
|
using mozilla::Move;
|
||||||
using mozilla::MallocSizeOf;
|
using mozilla::MallocSizeOf;
|
||||||
|
using mozilla::RefCounted;
|
||||||
|
|
||||||
typedef Vector<uint32_t, 0, SystemAllocPolicy> Uint32Vector;
|
typedef Vector<uint32_t, 0, SystemAllocPolicy> Uint32Vector;
|
||||||
|
|
||||||
|
@ -57,17 +60,16 @@ template <> struct IsPod<js::wasm::Type> : TrueType {};
|
||||||
} namespace js { namespace wasm { \
|
} namespace js { namespace wasm { \
|
||||||
typedef Vector<Type, 0, SystemAllocPolicy> VectorName;
|
typedef Vector<Type, 0, SystemAllocPolicy> VectorName;
|
||||||
|
|
||||||
// A wasm Module and everything it contains must support serialization,
|
// A wasm Module and everything it contains must support serialization and
|
||||||
// deserialization and cloning. Some data can be simply copied as raw bytes and,
|
// deserialization. Some data can be simply copied as raw bytes and,
|
||||||
// as a convention, is stored in an inline CacheablePod struct. Everything else
|
// as a convention, is stored in an inline CacheablePod struct. Everything else
|
||||||
// should implement the below methods which are called recusively by the
|
// should implement the below methods which are called recusively by the
|
||||||
// containing Module. See comments for these methods in wasm::Module.
|
// containing Module.
|
||||||
|
|
||||||
#define WASM_DECLARE_SERIALIZABLE(Type) \
|
#define WASM_DECLARE_SERIALIZABLE(Type) \
|
||||||
size_t serializedSize() const; \
|
size_t serializedSize() const; \
|
||||||
uint8_t* serialize(uint8_t* cursor) const; \
|
uint8_t* serialize(uint8_t* cursor) const; \
|
||||||
const uint8_t* deserialize(ExclusiveContext* cx, const uint8_t* cursor); \
|
const uint8_t* deserialize(ExclusiveContext* cx, const uint8_t* cursor); \
|
||||||
MOZ_MUST_USE bool clone(JSContext* cx, Type* out) const; \
|
|
||||||
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
|
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
|
||||||
|
|
||||||
// ValType/ExprType utilities
|
// ValType/ExprType utilities
|
||||||
|
|
|
@ -160,6 +160,7 @@ UNIFIED_SOURCES += [
|
||||||
'asmjs/WasmBinaryToAST.cpp',
|
'asmjs/WasmBinaryToAST.cpp',
|
||||||
'asmjs/WasmBinaryToExperimentalText.cpp',
|
'asmjs/WasmBinaryToExperimentalText.cpp',
|
||||||
'asmjs/WasmBinaryToText.cpp',
|
'asmjs/WasmBinaryToText.cpp',
|
||||||
|
'asmjs/WasmCode.cpp',
|
||||||
'asmjs/WasmFrameIterator.cpp',
|
'asmjs/WasmFrameIterator.cpp',
|
||||||
'asmjs/WasmGenerator.cpp',
|
'asmjs/WasmGenerator.cpp',
|
||||||
'asmjs/WasmIonCompile.cpp',
|
'asmjs/WasmIonCompile.cpp',
|
||||||
|
|
Загрузка…
Ссылка в новой задаче