Bug 1237508 - Odin: make AsmJSModule derive wasm::Module (r=bbouvier)

--HG--
extra : commitid : FRmfuJ5MQZo
extra : rebase_source : 603b10e7fcf1cd6daa9587cd8e7cab570c519230
This commit is contained in:
Luke Wagner 2016-01-08 12:35:23 -06:00
Родитель b66e3e5581
Коммит ce49978208
9 изменённых файлов: 1276 добавлений и 1492 удалений

Просмотреть файл

@ -460,6 +460,14 @@ namespace JS {
template<typename T>
struct DeletePolicy
{
MOZ_CONSTEXPR DeletePolicy() {}
template<typename U>
MOZ_IMPLICIT DeletePolicy(DeletePolicy<U> other,
typename mozilla::EnableIf<mozilla::IsConvertible<U*, T*>::value,
int>::Type dummy = 0)
{}
void operator()(const T* ptr) {
js_delete(const_cast<T*>(ptr));
}

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -19,11 +19,10 @@
#ifndef asmjs_asmjs_h
#define asmjs_asmjs_h
#include "vm/NativeObject.h"
#include "NamespaceImports.h"
namespace js {
class AsmJSModule;
class ExclusiveContext;
namespace frontend {
template <typename ParseHandler> class Parser;
@ -34,29 +33,6 @@ namespace frontend {
typedef frontend::Parser<frontend::FullParseHandler> AsmJSParser;
typedef frontend::ParseContext<frontend::FullParseHandler> AsmJSParseContext;
// An AsmJSModuleObject is an internal implementation object (i.e., not exposed
// directly to user script) which traces and owns an AsmJSModule. The
// AsmJSModuleObject is referenced by the extended slots of the content-visible
// module and export JSFunctions.
class AsmJSModuleObject : public NativeObject
{
static const unsigned MODULE_SLOT = 0;
public:
static const unsigned RESERVED_SLOTS = 1;
bool hasModule() const;
void setModule(AsmJSModule* module);
AsmJSModule& module() const;
void addSizeOfMisc(mozilla::MallocSizeOf mallocSizeOf, size_t* code, size_t* data);
static const Class class_;
};
typedef Handle<AsmJSModuleObject*> HandleAsmJSModule;
// This function takes over parsing of a function starting with "use asm". The
// return value indicates whether an error was reported which the caller should
// propagate. If no error was reported, the function may still fail to validate

Просмотреть файл

@ -35,21 +35,18 @@ static const unsigned COMPILATION_LIFO_DEFAULT_CHUNK_SIZE = 64 * 1024;
ModuleGenerator::ModuleGenerator(ExclusiveContext* cx)
: cx_(cx),
args_(cx),
globalBytes_(InitialGlobalDataBytes),
slowFuncs_(cx),
lifo_(GENERATOR_LIFO_DEFAULT_CHUNK_SIZE),
jcx_(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread())),
alloc_(&lifo_),
masm_(MacroAssembler::AsmJSToken(), alloc_),
sigs_(cx),
funcEntryOffsets_(cx),
exportFuncIndices_(cx),
parallel_(false),
outstanding_(0),
tasks_(cx),
freeTasks_(cx),
funcBytes_(0),
funcEntryOffsets_(cx),
exportFuncIndices_(cx),
activeFunc_(nullptr),
finishedFuncs_(false)
{
@ -109,8 +106,15 @@ ParallelCompilationEnabled(ExclusiveContext* cx)
bool
ModuleGenerator::init()
{
staticLinkData_ = cx_->make_unique<StaticLinkData>();
if (!staticLinkData_)
module_ = cx_->make_unique<ModuleData>();
if (!module_)
return false;
module_->globalBytes = InitialGlobalDataBytes;
module_->compileArgs = CompileArgs(cx_);
link_ = cx_->make_unique<StaticLinkData>();
if (!link_)
return false;
if (!sigs_.init())
@ -139,7 +143,7 @@ ModuleGenerator::init()
return false;
JSRuntime* runtime = cx_->compartment()->runtimeFromAnyThread();
for (size_t i = 0; i < numTasks; i++)
tasks_.infallibleEmplaceBack(runtime, args_, COMPILATION_LIFO_DEFAULT_CHUNK_SIZE);
tasks_.infallibleEmplaceBack(runtime, args(), COMPILATION_LIFO_DEFAULT_CHUNK_SIZE);
if (!freeTasks_.reserve(numTasks))
return false;
@ -152,13 +156,17 @@ ModuleGenerator::init()
bool
ModuleGenerator::allocateGlobalBytes(uint32_t bytes, uint32_t align, uint32_t* globalDataOffset)
{
uint32_t pad = ComputeByteAlignment(globalBytes_, align);
if (UINT32_MAX - globalBytes_ < pad + bytes)
uint32_t globalBytes = module_->globalBytes;
uint32_t pad = ComputeByteAlignment(globalBytes, align);
if (UINT32_MAX - globalBytes < pad + bytes)
return false;
globalBytes_ += pad;
*globalDataOffset = globalBytes_;
globalBytes_ += bytes;
globalBytes += pad;
*globalDataOffset = globalBytes;
globalBytes += bytes;
module_->globalBytes = globalBytes;
return true;
}
@ -217,10 +225,10 @@ ModuleGenerator::finishTask(IonCompileTask* task)
CacheableChars funcName = StringToNewUTF8CharsZ(cx_, *func.name());
if (!funcName)
return false;
uint32_t nameIndex = funcNames_.length();
if (!funcNames_.emplaceBack(Move(funcName)))
uint32_t nameIndex = module_->funcNames.length();
if (!module_->funcNames.emplaceBack(Move(funcName)))
return false;
if (!codeRanges_.emplaceBack(nameIndex, func.line(), results.offsets()))
if (!module_->codeRanges.emplaceBack(nameIndex, func.line(), results.offsets()))
return false;
// Keep a record of slow functions for printing in the final console message.
@ -279,42 +287,42 @@ ModuleGenerator::declareImport(MallocSig&& sig, unsigned* index)
if (!allocateGlobalBytes(Module::SizeOfImportExit, sizeof(void*), &globalDataOffset))
return false;
*index = unsigned(imports_.length());
return imports_.emplaceBack(Move(sig), globalDataOffset);
*index = unsigned(module_->imports.length());
return module_->imports.emplaceBack(Move(sig), globalDataOffset);
}
uint32_t
ModuleGenerator::numDeclaredImports() const
{
return imports_.length();
return module_->imports.length();
}
uint32_t
ModuleGenerator::importExitGlobalDataOffset(uint32_t index) const
{
return imports_[index].exitGlobalDataOffset();
return module_->imports[index].exitGlobalDataOffset();
}
const MallocSig&
ModuleGenerator::importSig(uint32_t index) const
{
return imports_[index].sig();
return module_->imports[index].sig();
}
bool
ModuleGenerator::defineImport(uint32_t index, ProfilingOffsets interpExit, ProfilingOffsets jitExit)
{
Import& import = imports_[index];
Import& import = module_->imports[index];
import.initInterpExitOffset(interpExit.begin);
import.initJitExitOffset(jitExit.begin);
return codeRanges_.emplaceBack(CodeRange::ImportInterpExit, interpExit) &&
codeRanges_.emplaceBack(CodeRange::ImportJitExit, jitExit);
return module_->codeRanges.emplaceBack(CodeRange::ImportInterpExit, interpExit) &&
module_->codeRanges.emplaceBack(CodeRange::ImportJitExit, jitExit);
}
bool
ModuleGenerator::declareExport(MallocSig&& sig, uint32_t funcIndex)
{
return exports_.emplaceBack(Move(sig)) &&
return module_->exports.emplaceBack(Move(sig)) &&
exportFuncIndices_.append(funcIndex);
}
@ -327,20 +335,20 @@ ModuleGenerator::exportFuncIndex(uint32_t index) const
const MallocSig&
ModuleGenerator::exportSig(uint32_t index) const
{
return exports_[index].sig();
return module_->exports[index].sig();
}
uint32_t
ModuleGenerator::numDeclaredExports() const
{
return exports_.length();
return module_->exports.length();
}
bool
ModuleGenerator::defineExport(uint32_t index, Offsets offsets)
{
exports_[index].initStubOffset(offsets.begin);
return codeRanges_.emplaceBack(CodeRange::Entry, offsets);
module_->exports[index].initStubOffset(offsets.begin);
return module_->codeRanges.emplaceBack(CodeRange::Entry, offsets);
}
bool
@ -430,7 +438,7 @@ ModuleGenerator::finishFuncs()
masm_.patchCall(callerOffset, calleeOffset);
}
funcBytes_ = masm_.size();
module_->functionBytes = masm_.size();
finishedFuncs_ = true;
return true;
}
@ -450,7 +458,7 @@ ModuleGenerator::declareFuncPtrTable(uint32_t numElems, uint32_t* index)
if (!allocateGlobalBytes(numElems * sizeof(void*), sizeof(void*), &globalDataOffset))
return false;
StaticLinkData::FuncPtrTableVector& tables = staticLinkData_->funcPtrTables;
StaticLinkData::FuncPtrTableVector& tables = link_->funcPtrTables;
*index = tables.length();
if (!tables.emplaceBack(globalDataOffset))
@ -465,7 +473,7 @@ ModuleGenerator::declareFuncPtrTable(uint32_t numElems, uint32_t* index)
uint32_t
ModuleGenerator::funcPtrTableGlobalDataOffset(uint32_t index) const
{
return staticLinkData_->funcPtrTables[index].globalDataOffset;
return link_->funcPtrTables[index].globalDataOffset;
}
void
@ -473,7 +481,7 @@ ModuleGenerator::defineFuncPtrTable(uint32_t index, const Vector<uint32_t>& elem
{
MOZ_ASSERT(finishedFuncs_);
StaticLinkData::FuncPtrTable& table = staticLinkData_->funcPtrTables[index];
StaticLinkData::FuncPtrTable& table = link_->funcPtrTables[index];
MOZ_ASSERT(table.elemOffsets.length() == elemFuncIndices.length());
for (size_t i = 0; i < elemFuncIndices.length(); i++)
@ -484,69 +492,74 @@ bool
ModuleGenerator::defineInlineStub(Offsets offsets)
{
MOZ_ASSERT(finishedFuncs_);
return codeRanges_.emplaceBack(CodeRange::Inline, offsets);
return module_->codeRanges.emplaceBack(CodeRange::Inline, offsets);
}
bool
ModuleGenerator::defineSyncInterruptStub(ProfilingOffsets offsets)
{
MOZ_ASSERT(finishedFuncs_);
return codeRanges_.emplaceBack(CodeRange::Interrupt, offsets);
return module_->codeRanges.emplaceBack(CodeRange::Interrupt, offsets);
}
bool
ModuleGenerator::defineAsyncInterruptStub(Offsets offsets)
{
MOZ_ASSERT(finishedFuncs_);
staticLinkData_->pod.interruptOffset = offsets.begin;
return codeRanges_.emplaceBack(CodeRange::Inline, offsets);
link_->pod.interruptOffset = offsets.begin;
return module_->codeRanges.emplaceBack(CodeRange::Inline, offsets);
}
bool
ModuleGenerator::defineOutOfBoundsStub(Offsets offsets)
{
MOZ_ASSERT(finishedFuncs_);
staticLinkData_->pod.outOfBoundsOffset = offsets.begin;
return codeRanges_.emplaceBack(CodeRange::Inline, offsets);
link_->pod.outOfBoundsOffset = offsets.begin;
return module_->codeRanges.emplaceBack(CodeRange::Inline, offsets);
}
Module*
bool
ModuleGenerator::finish(HeapUsage heapUsage,
Module::MutedBool mutedErrors,
MutedErrorsBool mutedErrors,
CacheableChars filename,
CacheableTwoByteChars displayURL,
UniqueStaticLinkData* staticLinkData,
UniqueModuleData* module,
UniqueStaticLinkData* linkData,
SlowFunctionVector* slowFuncs)
{
MOZ_ASSERT(!activeFunc_);
MOZ_ASSERT(finishedFuncs_);
module_->heapUsage = heapUsage;
module_->mutedErrors = mutedErrors;
module_->filename = Move(filename);
if (!GenerateStubs(*this, UsesHeap(heapUsage)))
return nullptr;
return false;
masm_.finish();
if (masm_.oom())
return nullptr;
return false;
// Start global data on a new page so JIT code may be given independent
// protection flags. Note assumption that global data starts right after
// code below.
uint32_t codeBytes = AlignBytes(masm_.bytesNeeded(), AsmJSPageSize);
module_->codeBytes = AlignBytes(masm_.bytesNeeded(), AsmJSPageSize);
// Inflate the global bytes up to page size so that the total bytes are a
// page size (as required by the allocator functions).
globalBytes_ = AlignBytes(globalBytes_, AsmJSPageSize);
uint32_t totalBytes = codeBytes + globalBytes_;
module_->globalBytes = AlignBytes(module_->globalBytes, AsmJSPageSize);
// Allocate the code (guarded by a UniquePtr until it is given to the Module).
UniqueCodePtr code = AllocateCode(cx_, totalBytes);
if (!code)
return nullptr;
module_->code = AllocateCode(cx_, module_->totalBytes());
if (!module_->code)
return false;
// Delay flushing until Module::dynamicallyLink. The flush-inhibited range
// is set by executableCopy.
AutoFlushICache afc("ModuleGenerator::finish", /* inhibit = */ true);
masm_.executableCopy(code.get());
uint8_t* code = module_->code.get();
masm_.executableCopy(code);
// c.f. JitCode::copyFrom
MOZ_ASSERT(masm_.jumpRelocationTableBytes() == 0);
@ -556,16 +569,18 @@ ModuleGenerator::finish(HeapUsage heapUsage,
// Convert the CallSiteAndTargetVector (needed during generation) to a
// CallSiteVector (what is stored in the Module).
CallSiteVector callSites;
if (!callSites.appendAll(masm_.callSites()))
return nullptr;
if (!module_->callSites.appendAll(masm_.callSites()))
return false;
// The MacroAssembler has accumulated all the heap accesses during codegen.
module_->heapAccesses = masm_.extractHeapAccesses();
// Add links to absolute addresses identified symbolically.
StaticLinkData::SymbolicLinkArray& symbolicLinks = staticLinkData_->symbolicLinks;
StaticLinkData::SymbolicLinkArray& symbolicLinks = link_->symbolicLinks;
for (size_t i = 0; i < masm_.numAsmJSAbsoluteAddresses(); i++) {
AsmJSAbsoluteAddress src = masm_.asmJSAbsoluteAddress(i);
if (!symbolicLinks[src.target].append(src.patchAt.offset()))
return nullptr;
return false;
}
// Relative link metadata: absolute addresses that refer to another point within
@ -578,8 +593,8 @@ ModuleGenerator::finish(HeapUsage heapUsage,
StaticLinkData::InternalLink link(StaticLinkData::InternalLink::CodeLabel);
link.patchAtOffset = masm_.labelToPatchOffset(*cl.patchAt());
link.targetOffset = cl.target()->offset();
if (!staticLinkData_->internalLinks.append(link))
return nullptr;
if (!link_->internalLinks.append(link))
return false;
}
#if defined(JS_CODEGEN_X86)
@ -590,9 +605,9 @@ ModuleGenerator::finish(HeapUsage heapUsage,
AsmJSGlobalAccess a = masm_.asmJSGlobalAccess(i);
StaticLinkData::InternalLink link(StaticLinkData::InternalLink::RawPointer);
link.patchAtOffset = masm_.labelToPatchOffset(a.patchAt);
link.targetOffset = codeBytes + a.globalDataOffset;
if (!staticLinkData_->internalLinks.append(link))
return nullptr;
link.targetOffset = module_->codeBytes + a.globalDataOffset;
if (!link_->internalLinks.append(link))
return false;
}
#endif
@ -604,38 +619,24 @@ ModuleGenerator::finish(HeapUsage heapUsage,
size_t off = masm_.longJump(i);
StaticLinkData::InternalLink link(StaticLinkData::InternalLink::InstructionImmediate);
link.patchAtOffset = off;
link.targetOffset = Assembler::ExtractInstructionImmediate(code.get() + off) -
uintptr_t(code.get());
if (!staticLinkData_->internalLinks.append(link))
return nullptr;
link.targetOffset = Assembler::ExtractInstructionImmediate(code + off) - uintptr_t(code);
if (!link_->internalLinks.append(link))
return false;
}
#endif
#if defined(JS_CODEGEN_X64)
// Global data accesses on x64 use rip-relative addressing and thus do
// not need patching after deserialization.
uint8_t* globalData = code.get() + codeBytes;
uint8_t* globalData = code + module_->codeBytes;
for (size_t i = 0; i < masm_.numAsmJSGlobalAccesses(); i++) {
AsmJSGlobalAccess a = masm_.asmJSGlobalAccess(i);
masm_.patchAsmJSGlobalAccess(a.patchAt, code.get(), globalData, a.globalDataOffset);
masm_.patchAsmJSGlobalAccess(a.patchAt, code, globalData, a.globalDataOffset);
}
#endif
*staticLinkData = Move(staticLinkData_);
*module = Move(module_);
*linkData = Move(link_);
*slowFuncs = Move(slowFuncs_);
return cx_->new_<Module>(args_,
funcBytes_,
codeBytes,
globalBytes_,
heapUsage,
mutedErrors,
Move(code),
Move(imports_),
Move(exports_),
masm_.extractHeapAccesses(),
Move(codeRanges_),
Move(callSites),
Move(funcNames_),
Move(filename),
Move(displayURL));
return true;
}

Просмотреть файл

@ -65,17 +65,10 @@ class MOZ_STACK_CLASS ModuleGenerator
typedef HashSet<const LifoSig*, SigHashPolicy> SigSet;
ExclusiveContext* cx_;
CompileArgs args_;
// Data handed over to the Module in finish()
uint32_t globalBytes_;
ImportVector imports_;
ExportVector exports_;
CodeRangeVector codeRanges_;
CacheableCharsVector funcNames_;
// Data handed back to the caller in finish()
UniqueStaticLinkData staticLinkData_;
UniqueModuleData module_;
UniqueStaticLinkData link_;
SlowFunctionVector slowFuncs_;
// Data scoped to the ModuleGenerator's lifetime
@ -84,6 +77,8 @@ class MOZ_STACK_CLASS ModuleGenerator
jit::TempAllocator alloc_;
jit::MacroAssembler masm_;
SigSet sigs_;
FuncOffsetVector funcEntryOffsets_;
FuncIndexVector exportFuncIndices_;
// Parallel compilation
bool parallel_;
@ -91,10 +86,7 @@ class MOZ_STACK_CLASS ModuleGenerator
Vector<IonCompileTask> tasks_;
Vector<IonCompileTask*> freeTasks_;
// Function compilation
uint32_t funcBytes_;
FuncOffsetVector funcEntryOffsets_;
FuncIndexVector exportFuncIndices_;
// Assertions
DebugOnly<FunctionGenerator*> activeFunc_;
DebugOnly<bool> finishedFuncs_;
@ -108,7 +100,7 @@ class MOZ_STACK_CLASS ModuleGenerator
bool init();
CompileArgs args() const { return args_; }
CompileArgs args() const { return module_->compileArgs; }
jit::MacroAssembler& masm() { return masm_; }
const FuncOffsetVector& funcEntryOffsets() const { return funcEntryOffsets_; }
@ -149,14 +141,16 @@ class MOZ_STACK_CLASS ModuleGenerator
bool defineAsyncInterruptStub(Offsets offsets);
bool defineOutOfBoundsStub(Offsets offsets);
// Null return indicates failure. The caller must immediately root a
// non-null return value.
Module* finish(HeapUsage heapUsage,
Module::MutedBool mutedErrors,
CacheableChars filename,
CacheableTwoByteChars displayURL,
UniqueStaticLinkData* staticLinkData,
SlowFunctionVector* slowFuncs);
// Return a ModuleData object which may be used to construct a Module, the
// StaticLinkData required to call Module::staticallyLink, and the list of
// functions that took a long time to compile.
bool finish(HeapUsage heapUsage,
MutedErrorsBool mutedErrors,
CacheableChars filename,
CacheableTwoByteChars displayURL,
UniqueModuleData* module,
UniqueStaticLinkData* staticLinkData,
SlowFunctionVector* slowFuncs);
};
// A FunctionGenerator encapsulates the generation of a single function body.

Просмотреть файл

@ -76,6 +76,7 @@ wasm::AllocateCode(ExclusiveContext* cx, size_t bytes)
void
CodeDeleter::operator()(uint8_t* p)
{
MOZ_ASSERT(bytes_ != 0);
DeallocateExecutableMemory(p, bytes_, AsmJSPageSize);
}
@ -462,6 +463,13 @@ CacheableUniquePtr<CharT>::clone(JSContext* cx, CacheableUniquePtr* out) const
return true;
}
template <class CharT>
size_t
CacheableUniquePtr<CharT>::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
{
return mallocSizeOf(this->get());
}
namespace js {
namespace wasm {
template struct CacheableUniquePtr<char>;
@ -469,24 +477,90 @@ template struct CacheableUniquePtr<char16_t>;
}
}
class Module::AutoMutateCode
size_t
ModuleData::serializedSize() const
{
AutoWritableJitCode awjc_;
AutoFlushICache afc_;
return sizeof(pod()) +
codeBytes +
SerializedVectorSize(imports) +
SerializedVectorSize(exports) +
SerializedPodVectorSize(heapAccesses) +
SerializedPodVectorSize(codeRanges) +
SerializedPodVectorSize(callSites) +
SerializedVectorSize(funcNames) +
filename.serializedSize() +
displayURL.serializedSize();
}
public:
AutoMutateCode(JSContext* cx, Module& module, const char* name)
: awjc_(cx->runtime(), module.code(), module.pod.codeBytes_),
afc_(name)
{
AutoFlushICache::setRange(uintptr_t(module.code()), module.pod.codeBytes_);
}
};
uint32_t
Module::totalBytes() const
uint8_t*
ModuleData::serialize(uint8_t* cursor) const
{
return pod.codeBytes_ + pod.globalBytes_;
cursor = WriteBytes(cursor, &pod(), sizeof(pod()));
cursor = WriteBytes(cursor, code.get(), codeBytes);
cursor = SerializeVector(cursor, imports);
cursor = SerializeVector(cursor, exports);
cursor = SerializePodVector(cursor, heapAccesses);
cursor = SerializePodVector(cursor, codeRanges);
cursor = SerializePodVector(cursor, callSites);
cursor = SerializeVector(cursor, funcNames);
cursor = filename.serialize(cursor);
cursor = displayURL.serialize(cursor);
return cursor;
}
/* static */ const uint8_t*
ModuleData::deserialize(ExclusiveContext* cx, const uint8_t* cursor)
{
cursor = ReadBytes(cursor, &pod(), sizeof(pod()));
code = AllocateCode(cx, totalBytes());
if (!code)
return nullptr;
cursor = ReadBytes(cursor, code.get(), codeBytes);
(cursor = DeserializeVector(cx, cursor, &imports)) &&
(cursor = DeserializeVector(cx, cursor, &exports)) &&
(cursor = DeserializePodVector(cx, cursor, &heapAccesses)) &&
(cursor = DeserializePodVector(cx, cursor, &codeRanges)) &&
(cursor = DeserializePodVector(cx, cursor, &callSites)) &&
(cursor = DeserializeVector(cx, cursor, &funcNames)) &&
(cursor = filename.deserialize(cx, cursor)) &&
(cursor = displayURL.deserialize(cx, cursor));
return cursor;
}
bool
ModuleData::clone(JSContext* cx, ModuleData* out) const
{
out->pod() = pod();
out->code = AllocateCode(cx, totalBytes());
if (!out->code)
return false;
memcpy(out->code.get(), code.get(), codeBytes);
return CloneVector(cx, imports, &out->imports) &&
CloneVector(cx, exports, &out->exports) &&
ClonePodVector(cx, heapAccesses, &out->heapAccesses) &&
ClonePodVector(cx, codeRanges, &out->codeRanges) &&
ClonePodVector(cx, callSites, &out->callSites) &&
CloneVector(cx, funcNames, &out->funcNames) &&
filename.clone(cx, &out->filename) &&
displayURL.clone(cx, &out->displayURL);
}
size_t
ModuleData::sizeOfExcludingThis(MallocSizeOf mallocSizeOf) const
{
// Module::addSizeOfMisc takes care of code and global memory.
return SizeOfVectorExcludingThis(imports, mallocSizeOf) +
SizeOfVectorExcludingThis(exports, mallocSizeOf) +
heapAccesses.sizeOfExcludingThis(mallocSizeOf) +
codeRanges.sizeOfExcludingThis(mallocSizeOf) +
callSites.sizeOfExcludingThis(mallocSizeOf) +
funcNames.sizeOfExcludingThis(mallocSizeOf) +
filename.sizeOfExcludingThis(mallocSizeOf) +
displayURL.sizeOfExcludingThis(mallocSizeOf);
}
uint8_t*
@ -524,7 +598,7 @@ Module::specializeToHeap(ArrayBufferObjectMaybeShared* heap)
// i.e. ptr > heapLength - data-type-byte-size - offset. data-type-byte-size
// and offset are already included in the addend so we
// just have to add the heap length here.
for (const HeapAccess& access : heapAccesses_) {
for (const HeapAccess& access : module_->heapAccesses) {
if (access.hasLengthCheck())
X86Encoding::AddInt32(access.patchLengthAt(code()), heapLength);
void* addr = access.patchHeapPtrImmAt(code());
@ -540,14 +614,14 @@ Module::specializeToHeap(ArrayBufferObjectMaybeShared* heap)
// checks at the right places. All accesses that have been recorded are the
// only ones that need bound checks (see also
// CodeGeneratorX64::visitAsmJS{Load,Store,CompareExchange,Exchange,AtomicBinop}Heap)
for (const HeapAccess& access : heapAccesses_) {
for (const HeapAccess& access : module_->heapAccesses) {
// See comment above for x86 codegen.
if (access.hasLengthCheck())
X86Encoding::AddInt32(access.patchLengthAt(code()), heapLength);
}
#elif defined(JS_CODEGEN_ARM) || defined(JS_CODEGEN_ARM64) || \
defined(JS_CODEGEN_MIPS32) || defined(JS_CODEGEN_MIPS64)
for (const HeapAccess& access : heapAccesses_)
for (const HeapAccess& access : module_->heapAccesses)
Assembler::UpdateBoundsCheck(heapLength, (Instruction*)(access.insnOffset() + code()));
#endif
@ -567,8 +641,8 @@ Module::despecializeFromHeap(ArrayBufferObjectMaybeShared* heap)
#if defined(JS_CODEGEN_X86)
uint32_t heapLength = heap->byteLength();
uint8_t* ptrBase = heap->dataPointerEither().unwrap(/*safe - used for value*/);
for (unsigned i = 0; i < heapAccesses_.length(); i++) {
const HeapAccess& access = heapAccesses_[i];
for (unsigned i = 0; i < module_->heapAccesses.length(); i++) {
const HeapAccess& access = module_->heapAccesses[i];
if (access.hasLengthCheck())
X86Encoding::AddInt32(access.patchLengthAt(code()), -heapLength);
void* addr = access.patchHeapPtrImmAt(code());
@ -578,8 +652,8 @@ Module::despecializeFromHeap(ArrayBufferObjectMaybeShared* heap)
}
#elif defined(JS_CODEGEN_X64)
uint32_t heapLength = heap->byteLength();
for (unsigned i = 0; i < heapAccesses_.length(); i++) {
const HeapAccess& access = heapAccesses_[i];
for (unsigned i = 0; i < module_->heapAccesses.length(); i++) {
const HeapAccess& access = module_->heapAccesses[i];
if (access.hasLengthCheck())
X86Encoding::AddInt32(access.patchLengthAt(code()), -heapLength);
}
@ -594,17 +668,17 @@ Module::sendCodeRangesToProfiler(JSContext* cx)
{
#ifdef JS_ION_PERF
if (PerfFuncEnabled()) {
for (const CodeRange& codeRange : codeRanges_) {
for (const CodeRange& codeRange : module_->codeRanges) {
if (!codeRange.isFunction())
continue;
uintptr_t start = uintptr_t(code() + codeRange.begin());
uintptr_t end = uintptr_t(code() + codeRange.end());
uintptr_t size = end - start;
const char* file = filename_.get();
const char* file = module_->filename.get();
unsigned line = codeRange.funcLineNumber();
unsigned column = 0;
const char* name = funcNames_[codeRange.funcNameIndex()].get();
const char* name = module_->funcNames[codeRange.funcNameIndex()].get();
writePerfSpewerAsmJSFunctionMap(start, size, file, line, column, name);
}
@ -612,14 +686,14 @@ Module::sendCodeRangesToProfiler(JSContext* cx)
#endif
#ifdef MOZ_VTUNE
if (IsVTuneProfilingActive()) {
for (const CodeRange& codeRange : codeRanges_) {
for (const CodeRange& codeRange : module_->codeRanges) {
if (!codeRange.isFunction())
continue;
uintptr_t start = uintptr_t(code() + codeRange.begin());
uintptr_t end = uintptr_t(code() + codeRange.end());
uintptr_t size = end - start;
const char* name = funcNames_[codeRange.funcNameIndex()].get();
const char* name = module_->funcNames[codeRange.funcNameIndex()].get();
unsigned method_id = iJIT_GetNewMethodID();
if (method_id == 0)
@ -654,16 +728,16 @@ Module::setProfilingEnabled(JSContext* cx, bool enabled)
// do it now since, once we start sampling, we'll be in a signal-handing
// context where we cannot malloc.
if (enabled) {
if (!funcLabels_.resize(funcNames_.length())) {
if (!funcLabels_.resize(module_->funcNames.length())) {
ReportOutOfMemory(cx);
return false;
}
for (const CodeRange& codeRange : codeRanges_) {
for (const CodeRange& codeRange : module_->codeRanges) {
if (!codeRange.isFunction())
continue;
unsigned lineno = codeRange.funcLineNumber();
const char* name = funcNames_[codeRange.funcNameIndex()].get();
UniqueChars label(JS_smprintf("%s (%s:%u)", name, filename_.get(), lineno));
const char* name = module_->funcNames[codeRange.funcNameIndex()].get();
UniqueChars label(JS_smprintf("%s (%s:%u)", name, module_->filename.get(), lineno));
if (!label) {
ReportOutOfMemory(cx);
return false;
@ -674,14 +748,16 @@ Module::setProfilingEnabled(JSContext* cx, bool enabled)
funcLabels_.clear();
}
// Patch callsites and returns to execute profiling prologues/epililogues.
// Patch callsites and returns to execute profiling prologues/epilogues.
{
AutoMutateCode amc(cx, *this, "Module::setProfilingEnabled");
AutoWritableJitCode awjc(cx->runtime(), code(), codeBytes());
AutoFlushICache afc("Module::setProfilingEnabled");
AutoFlushICache::setRange(uintptr_t(code()), codeBytes());
for (const CallSite& callSite : callSites_)
for (const CallSite& callSite : module_->callSites)
EnableProfilingPrologue(*this, callSite, enabled);
for (const CodeRange& codeRange : codeRanges_)
for (const CodeRange& codeRange : module_->codeRanges)
EnableProfilingEpilogue(*this, codeRange, enabled);
}
@ -709,114 +785,70 @@ Module::importToExit(const Import& import)
return *reinterpret_cast<ImportExit*>(globalData() + import.exitGlobalDataOffset());
}
/* static */ Module::CacheablePod
Module::zeroPod()
bool
Module::clone(JSContext* cx, const StaticLinkData& link, Module* out) const
{
CacheablePod pod = {0, 0, 0, HeapUsage::None, false, false, false};
return pod;
MOZ_ASSERT(dynamicallyLinked_);
// The out->module_ field was already cloned and initialized when 'out' was
// constructed. This function should clone the rest.
MOZ_ASSERT(out->module_);
out->isAsmJS_ = isAsmJS_;
out->profilingEnabled_ = profilingEnabled_;
if (!CloneVector(cx, funcLabels_, &out->funcLabels_))
return false;
#ifdef DEBUG
// Put the symbolic links back to -1 so PatchDataWithValueCheck assertions
// in Module::staticallyLink are valid.
for (auto imm : MakeEnumeratedRange(SymbolicAddress::Limit)) {
void* callee = AddressOf(imm, cx);
const StaticLinkData::OffsetVector& offsets = link.symbolicLinks[imm];
for (uint32_t offset : offsets) {
jit::Assembler::PatchDataWithValueCheck(jit::CodeLocationLabel(out->code() + offset),
jit::PatchedImmPtr((void*)-1),
jit::PatchedImmPtr(callee));
}
}
#endif
// If the copied machine code has been specialized to the heap, it must be
// unspecialized in the copy.
if (usesHeap())
out->despecializeFromHeap(heap_);
return true;
}
void
Module::init()
{
staticallyLinked_ = false;
interrupt_ = nullptr;
outOfBounds_ = nullptr;
dynamicallyLinked_ = false;
Module::Module(UniqueModuleData module, AsmJSBool isAsmJS)
: module_(Move(module)),
isAsmJS_(bool(isAsmJS)),
staticallyLinked_(false),
interrupt_(nullptr),
outOfBounds_(nullptr),
dynamicallyLinked_(false),
profilingEnabled_(false)
{
*(double*)(globalData() + NaN64GlobalDataOffset) = GenericNaN();
*(float*)(globalData() + NaN32GlobalDataOffset) = GenericNaN();
}
// Private constructor used for deserialization and cloning.
Module::Module(const CacheablePod& pod,
UniqueCodePtr code,
ImportVector&& imports,
ExportVector&& exports,
HeapAccessVector&& heapAccesses,
CodeRangeVector&& codeRanges,
CallSiteVector&& callSites,
CacheableCharsVector&& funcNames,
CacheableChars filename,
CacheableTwoByteChars displayURL,
CacheBool loadedFromCache,
ProfilingBool profilingEnabled,
FuncLabelVector&& funcLabels)
: pod(pod),
code_(Move(code)),
imports_(Move(imports)),
exports_(Move(exports)),
heapAccesses_(Move(heapAccesses)),
codeRanges_(Move(codeRanges)),
callSites_(Move(callSites)),
funcNames_(Move(funcNames)),
filename_(Move(filename)),
displayURL_(Move(displayURL)),
loadedFromCache_(loadedFromCache),
profilingEnabled_(profilingEnabled),
funcLabels_(Move(funcLabels))
{
MOZ_ASSERT_IF(!profilingEnabled, funcLabels_.empty());
MOZ_ASSERT_IF(profilingEnabled, funcNames_.length() == funcLabels_.length());
init();
}
// Public constructor for compilation.
Module::Module(CompileArgs args,
uint32_t functionBytes,
uint32_t codeBytes,
uint32_t globalBytes,
HeapUsage heapUsage,
MutedBool mutedErrors,
UniqueCodePtr code,
ImportVector&& imports,
ExportVector&& exports,
HeapAccessVector&& heapAccesses,
CodeRangeVector&& codeRanges,
CallSiteVector&& callSites,
CacheableCharsVector&& funcNames,
CacheableChars filename,
CacheableTwoByteChars displayURL)
: pod(zeroPod()),
code_(Move(code)),
imports_(Move(imports)),
exports_(Move(exports)),
heapAccesses_(Move(heapAccesses)),
codeRanges_(Move(codeRanges)),
callSites_(Move(callSites)),
funcNames_(Move(funcNames)),
filename_(Move(filename)),
displayURL_(Move(displayURL)),
loadedFromCache_(false),
profilingEnabled_(false)
{
// Work around MSVC 2013 bug around {} member initialization.
const_cast<uint32_t&>(pod.functionBytes_) = functionBytes;
const_cast<uint32_t&>(pod.codeBytes_) = codeBytes;
const_cast<uint32_t&>(pod.globalBytes_) = globalBytes;
const_cast<HeapUsage&>(pod.heapUsage_) = heapUsage;
const_cast<bool&>(pod.mutedErrors_) = bool(mutedErrors);
const_cast<bool&>(pod.usesSignalHandlersForOOB_) = args.useSignalHandlersForOOB;
const_cast<bool&>(pod.usesSignalHandlersForInterrupt_) = args.useSignalHandlersForInterrupt;
init();
}
Module::~Module()
{
if (code_) {
for (unsigned i = 0; i < imports_.length(); i++) {
ImportExit& exit = importToExit(imports_[i]);
if (exit.baselineScript)
exit.baselineScript->removeDependentWasmModule(*this, i);
}
for (unsigned i = 0; i < imports().length(); i++) {
ImportExit& exit = importToExit(imports()[i]);
if (exit.baselineScript)
exit.baselineScript->removeDependentWasmModule(*this, i);
}
}
void
Module::trace(JSTracer* trc)
{
for (const Import& import : imports_) {
for (const Import& import : imports()) {
if (importToExit(import).fun)
TraceEdge(trc, &importToExit(import).fun, "wasm function import");
}
@ -825,25 +857,16 @@ Module::trace(JSTracer* trc)
TraceEdge(trc, &heap_, "wasm buffer");
}
CompileArgs
Module::compileArgs() const
{
CompileArgs args;
args.useSignalHandlersForOOB = pod.usesSignalHandlersForOOB_;
args.useSignalHandlersForInterrupt = pod.usesSignalHandlersForInterrupt_;
return args;
}
bool
Module::containsFunctionPC(void* pc) const
{
return pc >= code() && pc < (code() + pod.functionBytes_);
return pc >= code() && pc < (code() + module_->functionBytes);
}
bool
Module::containsCodePC(void* pc) const
{
return pc >= code() && pc < (code() + pod.codeBytes_);
return pc >= code() && pc < (code() + codeBytes());
}
struct CallSiteRetAddrOffset
@ -860,13 +883,13 @@ Module::lookupCallSite(void* returnAddress) const
{
uint32_t target = ((uint8_t*)returnAddress) - code();
size_t lowerBound = 0;
size_t upperBound = callSites_.length();
size_t upperBound = module_->callSites.length();
size_t match;
if (!BinarySearch(CallSiteRetAddrOffset(callSites_), lowerBound, upperBound, target, &match))
if (!BinarySearch(CallSiteRetAddrOffset(module_->callSites), lowerBound, upperBound, target, &match))
return nullptr;
return &callSites_[match];
return &module_->callSites[match];
}
const CodeRange*
@ -874,13 +897,13 @@ Module::lookupCodeRange(void* pc) const
{
CodeRange::PC target((uint8_t*)pc - code());
size_t lowerBound = 0;
size_t upperBound = codeRanges_.length();
size_t upperBound = module_->codeRanges.length();
size_t match;
if (!BinarySearch(codeRanges_, lowerBound, upperBound, target, &match))
if (!BinarySearch(module_->codeRanges, lowerBound, upperBound, target, &match))
return nullptr;
return &codeRanges_[match];
return &module_->codeRanges[match];
}
struct HeapAccessOffset
@ -899,13 +922,13 @@ Module::lookupHeapAccess(void* pc) const
uint32_t target = ((uint8_t*)pc) - code();
size_t lowerBound = 0;
size_t upperBound = heapAccesses_.length();
size_t upperBound = module_->heapAccesses.length();
size_t match;
if (!BinarySearch(HeapAccessOffset(heapAccesses_), lowerBound, upperBound, target, &match))
if (!BinarySearch(HeapAccessOffset(module_->heapAccesses), lowerBound, upperBound, target, &match))
return nullptr;
return &heapAccesses_[match];
return &module_->heapAccesses[match];
}
bool
@ -920,7 +943,7 @@ Module::staticallyLink(ExclusiveContext* cx, const StaticLinkData& linkData)
JitContext jcx(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread()));
MOZ_ASSERT(IsCompilingAsmJS());
AutoFlushICache afc("Module::staticallyLink", /* inhibit = */ true);
AutoFlushICache::setRange(uintptr_t(code()), pod.codeBytes_);
AutoFlushICache::setRange(uintptr_t(code()), codeBytes());
interrupt_ = code() + linkData.pod.interruptOffset;
outOfBounds_ = code() + linkData.pod.outOfBoundsOffset;
@ -983,7 +1006,7 @@ Module::staticallyLink(ExclusiveContext* cx, const StaticLinkData& linkData)
bool
Module::dynamicallyLink(JSContext* cx, Handle<ArrayBufferObjectMaybeShared*> heap,
const AutoVectorRooter<JSFunction*>& imports)
const AutoVectorRooter<JSFunction*>& importArgs)
{
MOZ_ASSERT(staticallyLinked_);
MOZ_ASSERT(!dynamicallyLinked_);
@ -994,15 +1017,15 @@ Module::dynamicallyLink(JSContext* cx, Handle<ArrayBufferObjectMaybeShared*> hea
JitContext jcx(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread()));
MOZ_ASSERT(IsCompilingAsmJS());
AutoFlushICache afc("Module::dynamicallyLink");
AutoFlushICache::setRange(uintptr_t(code()), pod.codeBytes_);
AutoFlushICache::setRange(uintptr_t(code()), codeBytes());
// Initialize imports with actual imported values.
MOZ_ASSERT(imports.length() == imports_.length());
for (size_t i = 0; i < imports_.length(); i++) {
const Import& import = imports_[i];
MOZ_ASSERT(importArgs.length() == imports().length());
for (size_t i = 0; i < imports().length(); i++) {
const Import& import = imports()[i];
ImportExit& exit = importToExit(import);
exit.code = code() + import.interpExitCodeOffset();
exit.fun = imports[i];
exit.fun = importArgs[i];
exit.baselineScript = nullptr;
}
@ -1011,7 +1034,7 @@ Module::dynamicallyLink(JSContext* cx, Handle<ArrayBufferObjectMaybeShared*> hea
specializeToHeap(heap);
// See AllocateCode comment above.
if (!ExecutableAllocator::makeExecutable(code(), pod.codeBytes_)) {
if (!ExecutableAllocator::makeExecutable(code(), codeBytes())) {
ReportOutOfMemory(cx);
return false;
}
@ -1043,7 +1066,7 @@ void
Module::deoptimizeImportExit(uint32_t importIndex)
{
MOZ_ASSERT(dynamicallyLinked_);
const Import& import = imports_[importIndex];
const Import& import = imports()[importIndex];
ImportExit& exit = importToExit(import);
exit.code = code() + import.interpExitCodeOffset();
exit.baselineScript = nullptr;
@ -1054,7 +1077,7 @@ Module::callExport(JSContext* cx, uint32_t exportIndex, CallArgs args)
{
MOZ_ASSERT(dynamicallyLinked_);
const Export& exp = exports_[exportIndex];
const Export& exp = exports()[exportIndex];
// Enable/disable profiling in the Module to match the current global
// profiling state. Don't do this if the Module is already active on the
@ -1190,7 +1213,7 @@ Module::callImport(JSContext* cx, uint32_t importIndex, unsigned argc, const Val
{
MOZ_ASSERT(dynamicallyLinked_);
const Import& import = imports_[importIndex];
const Import& import = imports()[importIndex];
RootedValue fval(cx, ObjectValue(*importToExit(import).fun));
if (!Invoke(cx, UndefinedValue(), fval, argc, argv, rval))
@ -1264,211 +1287,93 @@ Module::profilingLabel(uint32_t funcIndex) const
return funcLabels_[funcIndex].get();
}
size_t
Module::serializedSize() const
void
Module::addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code, size_t* data)
{
return sizeof(pod) +
pod.codeBytes_ +
SerializedVectorSize(imports_) +
SerializedVectorSize(exports_) +
SerializedPodVectorSize(heapAccesses_) +
SerializedPodVectorSize(codeRanges_) +
SerializedPodVectorSize(callSites_) +
SerializedVectorSize(funcNames_) +
filename_.serializedSize() +
displayURL_.serializedSize();
*code += codeBytes();
*data += mallocSizeOf(this) +
globalBytes() +
mallocSizeOf(module_.get()) +
module_->sizeOfExcludingThis(mallocSizeOf) +
funcPtrTables_.sizeOfExcludingThis(mallocSizeOf) +
SizeOfVectorExcludingThis(funcLabels_, mallocSizeOf);
}
uint8_t*
Module::serialize(uint8_t* cursor) const
{
MOZ_ASSERT(!profilingEnabled_, "assumed by Module::deserialize");
const Class WasmModuleObject::class_ = {
"WasmModuleObject",
JSCLASS_IS_ANONYMOUS | JSCLASS_DELAY_METADATA_CALLBACK |
JSCLASS_HAS_RESERVED_SLOTS(WasmModuleObject::RESERVED_SLOTS),
nullptr, /* addProperty */
nullptr, /* delProperty */
nullptr, /* getProperty */
nullptr, /* setProperty */
nullptr, /* enumerate */
nullptr, /* resolve */
nullptr, /* mayResolve */
WasmModuleObject::finalize,
nullptr, /* call */
nullptr, /* hasInstance */
nullptr, /* construct */
WasmModuleObject::trace
};
cursor = WriteBytes(cursor, &pod, sizeof(pod));
cursor = WriteBytes(cursor, code(), pod.codeBytes_);
cursor = SerializeVector(cursor, imports_);
cursor = SerializeVector(cursor, exports_);
cursor = SerializePodVector(cursor, heapAccesses_);
cursor = SerializePodVector(cursor, codeRanges_);
cursor = SerializePodVector(cursor, callSites_);
cursor = SerializeVector(cursor, funcNames_);
cursor = filename_.serialize(cursor);
cursor = displayURL_.serialize(cursor);
return cursor;
bool
WasmModuleObject::hasModule() const
{
MOZ_ASSERT(is<WasmModuleObject>());
return !getReservedSlot(MODULE_SLOT).isUndefined();
}
/* static */ const uint8_t*
Module::deserialize(ExclusiveContext* cx, const uint8_t* cursor, UniqueModule* out)
/* static */ void
WasmModuleObject::finalize(FreeOp* fop, JSObject* obj)
{
CacheablePod pod = zeroPod();
cursor = ReadBytes(cursor, &pod, sizeof(pod));
if (!cursor)
return nullptr;
UniqueCodePtr code = AllocateCode(cx, pod.codeBytes_ + pod.globalBytes_);
if (!code)
return nullptr;
cursor = ReadBytes(cursor, code.get(), pod.codeBytes_);
ImportVector imports;
cursor = DeserializeVector(cx, cursor, &imports);
if (!cursor)
return nullptr;
ExportVector exports;
cursor = DeserializeVector(cx, cursor, &exports);
if (!cursor)
return nullptr;
HeapAccessVector heapAccesses;
cursor = DeserializePodVector(cx, cursor, &heapAccesses);
if (!cursor)
return nullptr;
CodeRangeVector codeRanges;
cursor = DeserializePodVector(cx, cursor, &codeRanges);
if (!cursor)
return nullptr;
CallSiteVector callSites;
cursor = DeserializePodVector(cx, cursor, &callSites);
if (!cursor)
return nullptr;
CacheableCharsVector funcNames;
cursor = DeserializeVector(cx, cursor, &funcNames);
if (!cursor)
return nullptr;
CacheableChars filename;
cursor = filename.deserialize(cx, cursor);
if (!cursor)
return nullptr;
CacheableTwoByteChars displayURL;
cursor = displayURL.deserialize(cx, cursor);
if (!cursor)
return nullptr;
*out = cx->make_unique<Module>(pod,
Move(code),
Move(imports),
Move(exports),
Move(heapAccesses),
Move(codeRanges),
Move(callSites),
Move(funcNames),
Move(filename),
Move(displayURL),
Module::LoadedFromCache,
Module::ProfilingDisabled,
FuncLabelVector());
return cursor;
WasmModuleObject& moduleObj = obj->as<WasmModuleObject>();
if (moduleObj.hasModule())
fop->delete_(&moduleObj.module());
}
Module::UniqueModule
Module::clone(JSContext* cx, const StaticLinkData& linkData) const
/* static */ void
WasmModuleObject::trace(JSTracer* trc, JSObject* obj)
{
MOZ_ASSERT(dynamicallyLinked_);
WasmModuleObject& moduleObj = obj->as<WasmModuleObject>();
if (moduleObj.hasModule())
moduleObj.module().trace(trc);
}
UniqueCodePtr code = AllocateCode(cx, totalBytes());
if (!code)
/* static */ WasmModuleObject*
WasmModuleObject::create(ExclusiveContext* cx)
{
AutoSetNewObjectMetadata metadata(cx);
JSObject* obj = NewObjectWithGivenProto(cx, &WasmModuleObject::class_, nullptr);
if (!obj)
return nullptr;
memcpy(code.get(), this->code(), pod.codeBytes_);
return &obj->as<WasmModuleObject>();
}
#ifdef DEBUG
// Put the symbolic links back to -1 so PatchDataWithValueCheck assertions
// in Module::staticallyLink are valid.
for (auto imm : MakeEnumeratedRange(SymbolicAddress::Limit)) {
void* callee = AddressOf(imm, cx);
const StaticLinkData::OffsetVector& offsets = linkData.symbolicLinks[imm];
for (uint32_t offset : offsets) {
jit::Assembler::PatchDataWithValueCheck(jit::CodeLocationLabel(code.get() + offset),
jit::PatchedImmPtr((void*)-1),
jit::PatchedImmPtr(callee));
}
}
#endif
bool
WasmModuleObject::init(Module* module)
{
MOZ_ASSERT(is<WasmModuleObject>());
MOZ_ASSERT(!hasModule());
if (!module)
return false;
setReservedSlot(MODULE_SLOT, PrivateValue(module));
return true;
}
ImportVector imports;
if (!CloneVector(cx, imports_, &imports))
return nullptr;
ExportVector exports;
if (!CloneVector(cx, exports_, &exports))
return nullptr;
HeapAccessVector heapAccesses;
if (!ClonePodVector(cx, heapAccesses_, &heapAccesses))
return nullptr;
CodeRangeVector codeRanges;
if (!ClonePodVector(cx, codeRanges_, &codeRanges))
return nullptr;
CallSiteVector callSites;
if (!ClonePodVector(cx, callSites_, &callSites))
return nullptr;
CacheableCharsVector funcNames;
if (!CloneVector(cx, funcNames_, &funcNames))
return nullptr;
CacheableChars filename;
if (!filename_.clone(cx, &filename))
return nullptr;
CacheableTwoByteChars displayURL;
if (!displayURL_.clone(cx, &displayURL))
return nullptr;
FuncLabelVector funcLabels;
if (!CloneVector(cx, funcLabels_, &funcLabels))
return nullptr;
// Must not GC between Module allocation and (successful) return.
auto out = cx->make_unique<Module>(pod,
Move(code),
Move(imports),
Move(exports),
Move(heapAccesses),
Move(codeRanges),
Move(callSites),
Move(funcNames),
Move(filename),
Move(displayURL),
CacheBool::NotLoadedFromCache,
ProfilingBool(profilingEnabled_),
Move(funcLabels));
if (!out)
return nullptr;
// If the copied machine code has been specialized to the heap, it must be
// unspecialized in the copy.
if (usesHeap())
out->despecializeFromHeap(heap_);
if (!out->staticallyLink(cx, linkData))
return nullptr;
return Move(out);
Module&
WasmModuleObject::module() const
{
MOZ_ASSERT(is<WasmModuleObject>());
MOZ_ASSERT(hasModule());
return *(Module*)getReservedSlot(MODULE_SLOT).toPrivate();
}
void
Module::addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* asmJSModuleCode, size_t* asmJSModuleData)
WasmModuleObject::addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code, size_t* data)
{
*asmJSModuleCode += pod.codeBytes_;
*asmJSModuleData += mallocSizeOf(this) +
pod.globalBytes_ +
SizeOfVectorExcludingThis(imports_, mallocSizeOf) +
SizeOfVectorExcludingThis(exports_, mallocSizeOf) +
heapAccesses_.sizeOfExcludingThis(mallocSizeOf) +
codeRanges_.sizeOfExcludingThis(mallocSizeOf) +
callSites_.sizeOfExcludingThis(mallocSizeOf) +
funcNames_.sizeOfExcludingThis(mallocSizeOf) +
funcPtrTables_.sizeOfExcludingThis(mallocSizeOf);
if (hasModule())
module().addSizeOfMisc(mallocSizeOf, code, data);
}

Просмотреть файл

@ -25,6 +25,7 @@
namespace js {
class AsmJSModule;
class WasmActivation;
namespace jit { struct BaselineScript; }
@ -34,8 +35,7 @@ namespace wasm {
// deserialization and cloning. Some data can be simply copied as raw bytes and,
// as a convention, is stored in an inline CacheablePod struct. Everything else
// should implement the below methods which are called recusively by the
// containing Module. The implementation of all these methods are grouped
// together in WasmSerialize.cpp.
// containing Module. See comments for these methods in wasm::Module.
#define WASM_DECLARE_SERIALIZABLE(Type) \
size_t serializedSize() const; \
@ -312,6 +312,7 @@ class CodeDeleter
{
uint32_t bytes_;
public:
CodeDeleter() : bytes_(0) {}
explicit CodeDeleter(uint32_t bytes) : bytes_(bytes) {}
void operator()(uint8_t* p);
};
@ -336,6 +337,55 @@ UsesHeap(HeapUsage heapUsage)
return bool(heapUsage);
}
// See mutedErrors comment in jsapi.h.
enum class MutedErrorsBool
{
DontMuteErrors = false,
MuteErrors = true
};
// ModuleCacheablePod holds the trivially-memcpy()able serializable portion of
// ModuleData.
struct ModuleCacheablePod
{
uint32_t functionBytes;
uint32_t codeBytes;
uint32_t globalBytes;
HeapUsage heapUsage;
MutedErrorsBool mutedErrors;
CompileArgs compileArgs;
uint32_t totalBytes() const { return codeBytes + globalBytes; }
};
// ModuleData holds the guts of a Module. ModuleData is mutably built up by
// ModuleGenerator and then handed over to the Module constructor in finish(),
// where it is stored immutably.
struct ModuleData : ModuleCacheablePod
{
ModuleData() : loadedFromCache(false) { mozilla::PodZero(&pod()); }
ModuleCacheablePod& pod() { return *this; }
const ModuleCacheablePod& pod() const { return *this; }
UniqueCodePtr code;
ImportVector imports;
ExportVector exports;
HeapAccessVector heapAccesses;
CodeRangeVector codeRanges;
CallSiteVector callSites;
CacheableCharsVector funcNames;
CacheableChars filename;
CacheableTwoByteChars displayURL;
bool loadedFromCache;
WASM_DECLARE_SERIALIZABLE(ModuleData);
};
typedef UniquePtr<ModuleData, JS::DeletePolicy<ModuleData>> UniqueModuleData;
// Module represents a compiled WebAssembly module which lives until the last
// reference to any exported functions is dropped. Modules must be wrapped by a
// rooted JSObject immediately after creation so that Module::trace() is called
@ -358,6 +408,7 @@ UsesHeap(HeapUsage heapUsage)
class Module
{
typedef UniquePtr<const ModuleData, JS::DeletePolicy<const ModuleData>> UniqueConstModuleData;
struct ImportExit {
void* code;
jit::BaselineScript* baselineScript;
@ -382,25 +433,8 @@ class Module
typedef RelocatablePtrArrayBufferObjectMaybeShared BufferPtr;
// Initialized when constructed:
struct CacheablePod {
const uint32_t functionBytes_;
const uint32_t codeBytes_;
const uint32_t globalBytes_;
const HeapUsage heapUsage_;
const bool mutedErrors_;
const bool usesSignalHandlersForOOB_;
const bool usesSignalHandlersForInterrupt_;
} pod;
const UniqueCodePtr code_;
const ImportVector imports_;
const ExportVector exports_;
const HeapAccessVector heapAccesses_;
const CodeRangeVector codeRanges_;
const CallSiteVector callSites_;
const CacheableCharsVector funcNames_;
const CacheableChars filename_;
const CacheableTwoByteChars displayURL_;
const bool loadedFromCache_;
const UniqueConstModuleData module_;
bool isAsmJS_;
// Initialized during staticallyLink:
bool staticallyLinked_;
@ -416,9 +450,6 @@ class Module
bool profilingEnabled_;
FuncLabelVector funcLabels_;
class AutoMutateCode;
uint32_t totalBytes() const;
uint8_t* rawHeapPtr() const;
uint8_t*& rawHeapPtr();
WasmActivation*& activation();
@ -428,70 +459,50 @@ class Module
MOZ_WARN_UNUSED_RESULT bool setProfilingEnabled(JSContext* cx, bool enabled);
ImportExit& importToExit(const Import& import);
enum CacheBool { NotLoadedFromCache = false, LoadedFromCache = true };
enum ProfilingBool { ProfilingDisabled = false, ProfilingEnabled = true };
static CacheablePod zeroPod();
void init();
Module(const CacheablePod& pod,
UniqueCodePtr code,
ImportVector&& imports,
ExportVector&& exports,
HeapAccessVector&& heapAccesses,
CodeRangeVector&& codeRanges,
CallSiteVector&& callSites,
CacheableCharsVector&& funcNames,
CacheableChars filename,
CacheableTwoByteChars displayURL,
CacheBool loadedFromCache,
ProfilingBool profilingEnabled,
FuncLabelVector&& funcLabels);
template <class> friend struct js::MallocProvider;
friend class js::WasmActivation;
protected:
enum AsmJSBool { NotAsmJS = false, IsAsmJS = true };
const ModuleData& base() const { return *module_; }
bool clone(JSContext* cx, const StaticLinkData& link, Module* clone) const;
public:
static const unsigned SizeOfImportExit = sizeof(ImportExit);
static const unsigned OffsetOfImportExitFun = offsetof(ImportExit, fun);
static const unsigned SizeOfEntryArg = sizeof(EntryArg);
enum MutedBool { DontMuteErrors = false, MuteErrors = true };
explicit Module(UniqueModuleData module, AsmJSBool = NotAsmJS);
virtual ~Module();
virtual void trace(JSTracer* trc);
virtual void addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* code, size_t* data);
Module(CompileArgs args,
uint32_t functionBytes,
uint32_t codeBytes,
uint32_t globalBytes,
HeapUsage heapUsage,
MutedBool mutedErrors,
UniqueCodePtr code,
ImportVector&& imports,
ExportVector&& exports,
HeapAccessVector&& heapAccesses,
CodeRangeVector&& codeRanges,
CallSiteVector&& callSites,
CacheableCharsVector&& funcNames,
CacheableChars filename,
CacheableTwoByteChars displayURL);
~Module();
void trace(JSTracer* trc);
uint8_t* code() const { return code_.get(); }
uint8_t* globalData() const { return code() + pod.codeBytes_; }
uint32_t globalBytes() const { return pod.globalBytes_; }
HeapUsage heapUsage() const { return pod.heapUsage_; }
bool usesHeap() const { return UsesHeap(pod.heapUsage_); }
bool hasSharedHeap() const { return pod.heapUsage_ == HeapUsage::Shared; }
bool mutedErrors() const { return pod.mutedErrors_; }
CompileArgs compileArgs() const;
const ImportVector& imports() const { return imports_; }
const ExportVector& exports() const { return exports_; }
const char* functionName(uint32_t i) const { return funcNames_[i].get(); }
const char* filename() const { return filename_.get(); }
const char16_t* displayURL() const { return displayURL_.get(); }
bool loadedFromCache() const { return loadedFromCache_; }
uint8_t* code() const { return module_->code.get(); }
uint32_t codeBytes() const { return module_->codeBytes; }
uint8_t* globalData() const { return code() + module_->codeBytes; }
uint32_t globalBytes() const { return module_->globalBytes; }
HeapUsage heapUsage() const { return module_->heapUsage; }
bool usesHeap() const { return UsesHeap(module_->heapUsage); }
bool hasSharedHeap() const { return module_->heapUsage == HeapUsage::Shared; }
bool mutedErrors() const { return bool(module_->mutedErrors); }
CompileArgs compileArgs() const { return module_->compileArgs; }
const ImportVector& imports() const { return module_->imports; }
const ExportVector& exports() const { return module_->exports; }
const char* functionName(uint32_t i) const { return module_->funcNames[i].get(); }
const char* filename() const { return module_->filename.get(); }
const char16_t* displayURL() const { return module_->displayURL.get(); }
bool loadedFromCache() const { return module_->loadedFromCache; }
bool staticallyLinked() const { return staticallyLinked_; }
bool dynamicallyLinked() const { return dynamicallyLinked_; }
// Some wasm::Module's have the most-derived type AsmJSModule. The
// AsmJSModule stores the extra metadata necessary to implement asm.js (JS)
// semantics. The asAsmJS() member may be used as a checked downcast when
// isAsmJS() is true.
bool isAsmJS() const { return isAsmJS_; }
AsmJSModule& asAsmJS() { MOZ_ASSERT(isAsmJS_); return *(AsmJSModule*)this; }
const AsmJSModule& asAsmJS() const { MOZ_ASSERT(isAsmJS_); return *(const AsmJSModule*)this; }
// The range [0, functionBytes) is a subrange of [0, codeBytes) that
// contains only function body code, not the stub code. This distinction is
// used by the async interrupt handler to only interrupt when the pc is in
@ -508,7 +519,7 @@ class Module
// statically-linked state. The given StaticLinkData must have come from the
// compilation of this module.
bool staticallyLink(ExclusiveContext* cx, const StaticLinkData& linkData);
bool staticallyLink(ExclusiveContext* cx, const StaticLinkData& link);
// This function transitions the module from a statically-linked state to a
// dynamically-linked state. If this module usesHeap(), a non-null heap
@ -553,19 +564,32 @@ class Module
bool profilingEnabled() const { return profilingEnabled_; }
const char* profilingLabel(uint32_t funcIndex) const;
};
// See WASM_DECLARE_SERIALIZABLE.
size_t serializedSize() const;
uint8_t* serialize(uint8_t* cursor) const;
typedef UniquePtr<Module, JS::DeletePolicy<Module>> UniqueModule;
static const uint8_t* deserialize(ExclusiveContext* cx, const uint8_t* cursor,
UniqueModule* out);
UniqueModule clone(JSContext* cx, const StaticLinkData& linkData) const;
void addSizeOfMisc(MallocSizeOf mallocSizeOf, size_t* asmJSModuleCode,
size_t* asmJSModuleData);
typedef UniquePtr<Module, JS::DeletePolicy<Module>> UniqueModule;
} // namespace wasm
// An WasmModuleObject is an internal object (i.e., not exposed directly to user
// code) which traces and owns a wasm::Module. The WasmModuleObject is
// referenced by the extended slots of exported JSFunctions and serves to keep
// the wasm::Module alive until its last GC reference is dead.
class WasmModuleObject : public NativeObject
{
static const unsigned MODULE_SLOT = 0;
bool hasModule() const;
static void finalize(FreeOp* fop, JSObject* obj);
static void trace(JSTracer* trc, JSObject* obj);
public:
static const unsigned RESERVED_SLOTS = 1;
static WasmModuleObject* create(ExclusiveContext* cx);
bool init(wasm::Module* module);
wasm::Module& module() const;
void addSizeOfMisc(mozilla::MallocSizeOf mallocSizeOf, size_t* code, size_t* data);
static const Class class_;
};
} // namespace js
} // namespace wasm
#endif // wasm_module_h

Просмотреть файл

@ -38,7 +38,7 @@
#include "jswin.h"
#include "jswrapper.h"
#include "asmjs/AsmJS.h"
#include "asmjs/WasmModule.h"
#include "builtin/Eval.h"
#include "builtin/Object.h"
#include "builtin/SymbolObject.h"
@ -3791,9 +3791,9 @@ JSObject::addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf, JS::ClassIn
ArrayBufferObject::addSizeOfExcludingThis(this, mallocSizeOf, info);
} else if (is<SharedArrayBufferObject>()) {
SharedArrayBufferObject::addSizeOfExcludingThis(this, mallocSizeOf, info);
} else if (is<AsmJSModuleObject>()) {
as<AsmJSModuleObject>().addSizeOfMisc(mallocSizeOf, &info->objectsNonHeapCodeAsmJS,
&info->objectsMallocHeapMisc);
} else if (is<WasmModuleObject>()) {
as<WasmModuleObject>().addSizeOfMisc(mallocSizeOf, &info->objectsNonHeapCodeAsmJS,
&info->objectsMallocHeapMisc);
#ifdef JS_HAS_CTYPES
} else {
// This must be the last case.

Просмотреть файл

@ -816,6 +816,9 @@ class ScriptSourceHolder
{
ScriptSource* ss;
public:
ScriptSourceHolder()
: ss(nullptr)
{}
explicit ScriptSourceHolder(ScriptSource* ss)
: ss(ss)
{
@ -823,7 +826,14 @@ class ScriptSourceHolder
}
~ScriptSourceHolder()
{
ss->decref();
if (ss)
ss->decref();
}
void reset(ScriptSource* newss) {
if (ss)
ss->decref();
ss = newss;
ss->incref();
}
ScriptSource* get() const {
return ss;