Backed out changeset 74a0a0207e08 (bug 1338217) for linux failures

This commit is contained in:
Carsten "Tomcat" Book 2017-05-03 15:27:26 +02:00
Родитель b3a8a13d69
Коммит 0b5ec8c1f2
13 изменённых файлов: 217 добавлений и 502 удалений

Просмотреть файл

@ -272,7 +272,6 @@ struct StatsClosure
SourceSet seenSources;
wasm::Metadata::SeenSet wasmSeenMetadata;
wasm::ShareableBytes::SeenSet wasmSeenBytes;
wasm::Code::SeenSet wasmSeenCode;
wasm::Table::SeenSet wasmSeenTables;
bool anonymize;
@ -286,7 +285,6 @@ struct StatsClosure
return seenSources.init() &&
wasmSeenMetadata.init() &&
wasmSeenBytes.init() &&
wasmSeenCode.init() &&
wasmSeenTables.init();
}
};
@ -477,7 +475,6 @@ StatsCellCallback(JSRuntime* rt, void* data, void* thing, JS::TraceKind traceKin
module.addSizeOfMisc(rtStats->mallocSizeOf_,
&closure->wasmSeenMetadata,
&closure->wasmSeenBytes,
&closure->wasmSeenCode,
&info.objectsNonHeapCodeWasm,
&info.objectsMallocHeapMisc);
} else if (obj->is<WasmInstanceObject>()) {
@ -487,7 +484,6 @@ StatsCellCallback(JSRuntime* rt, void* data, void* thing, JS::TraceKind traceKin
instance.addSizeOfMisc(rtStats->mallocSizeOf_,
&closure->wasmSeenMetadata,
&closure->wasmSeenBytes,
&closure->wasmSeenCode,
&closure->wasmSeenTables,
&info.objectsNonHeapCodeWasm,
&info.objectsMallocHeapMisc);

Просмотреть файл

@ -34,6 +34,7 @@
_(GeckoProfilerStrings, 500) \
_(ProtectedRegionTree, 500) \
_(WasmSigIdSet, 500) \
_(WasmCodeProfilingLabels, 500) \
_(ShellOffThreadState, 500) \
_(SimulatorCacheLock, 500) \
_(Arm64SimulatorLock, 500) \
@ -45,9 +46,6 @@
_(IcuTimeZoneStateMutex, 500) \
_(ProcessExecutableRegion, 500) \
\
/* WasmCodeProfilingLabels > PromiseTaskPtrVector */ \
_(WasmCodeProfilingLabels, 550) \
\
_(TraceLoggerGraphState, 600) \
_(VTuneLock, 600)

Просмотреть файл

@ -64,7 +64,7 @@ RoundupCodeLength(uint32_t codeLength)
}
static uint8_t*
AllocateCodeBytes(uint32_t codeLength)
AllocateCodeSegment(JSContext* cx, uint32_t codeLength)
{
codeLength = RoundupCodeLength(codeLength);
@ -83,22 +83,20 @@ AllocateCodeBytes(uint32_t codeLength)
}
}
if (!p)
if (!p) {
ReportOutOfMemory(cx);
return nullptr;
}
// We account for the bytes allocated in WasmModuleObject::create, where we
// have the necessary JSContext.
cx->zone()->updateJitCodeMallocBytes(codeLength);
wasmCodeAllocations++;
return (uint8_t*)p;
}
static void
FreeCodeBytes(uint8_t* bytes, uint32_t codeLength)
FreeCodeSegment(uint8_t* bytes, uint32_t codeLength)
{
MOZ_ASSERT(wasmCodeAllocations > 0);
wasmCodeAllocations--;
codeLength = RoundupCodeLength(codeLength);
#ifdef MOZ_VTUNE
vtune::UnmarkBytes(bytes, codeLength);
@ -138,33 +136,6 @@ StaticallyLink(const CodeSegment& cs, const LinkData& linkData)
return true;
}
static void
StaticallyUnlink(uint8_t* base, const LinkData& linkData)
{
for (LinkData::InternalLink link : linkData.internalLinks) {
uint8_t* patchAt = base + link.patchAtOffset;
void* target = 0;
if (link.isRawPointerPatch())
*(void**)(patchAt) = target;
else
Assembler::PatchInstructionImmediate(patchAt, PatchedImmPtr(target));
}
for (auto imm : MakeEnumeratedRange(SymbolicAddress::Limit)) {
const Uint32Vector& offsets = linkData.symbolicLinks[imm];
if (offsets.empty())
continue;
void* target = SymbolicAddressTarget(imm);
for (uint32_t offset : offsets) {
uint8_t* patchAt = base + offset;
Assembler::PatchDataWithValueCheck(CodeLocationLabel(patchAt),
PatchedImmPtr((void*)-1),
PatchedImmPtr(target));
}
}
}
static void
SendCodeRangesToProfiler(const CodeSegment& cs, const Bytes& bytecode, const Metadata& metadata)
{
@ -214,99 +185,53 @@ SendCodeRangesToProfiler(const CodeSegment& cs, const Bytes& bytecode, const Met
}
/* static */ UniqueConstCodeSegment
CodeSegment::create(jit::MacroAssembler& masm,
const ShareableBytes& bytecode,
CodeSegment::create(JSContext* cx,
const Bytes& codeBytes,
const SharedBytes& bytecode,
const LinkData& linkData,
const Metadata& metadata)
{
// Round up the code size to page size since this is eventually required by
// the executable-code allocator and for setting memory protection.
uint32_t bytesNeeded = masm.bytesNeeded();
uint32_t padding = ComputeByteAlignment(bytesNeeded, gc::SystemPageSize());
uint32_t codeLength = bytesNeeded + padding;
MOZ_ASSERT(codeBytes.length() % gc::SystemPageSize() == 0);
MOZ_ASSERT(linkData.functionCodeLength < codeBytes.length());
MOZ_ASSERT(linkData.functionCodeLength < codeLength);
uint8_t* codeBase = AllocateCodeBytes(codeLength);
if (!codeBase)
return nullptr;
// We'll flush the icache after static linking, in initialize().
masm.executableCopy(codeBase, /* flushICache = */ false);
// Zero the padding.
memset(codeBase + bytesNeeded, 0, padding);
return create(codeBase, codeLength, bytecode, linkData, metadata);
}
/* static */ UniqueConstCodeSegment
CodeSegment::create(const Bytes& unlinkedBytes, const ShareableBytes& bytecode,
const LinkData& linkData, const Metadata& metadata)
{
uint32_t codeLength = unlinkedBytes.length();
MOZ_ASSERT(codeLength % gc::SystemPageSize() == 0);
uint8_t* codeBytes = AllocateCodeBytes(codeLength);
if (!codeBytes)
return nullptr;
memcpy(codeBytes, unlinkedBytes.begin(), codeLength);
return create(codeBytes, codeLength, bytecode, linkData, metadata);
}
/* static */ UniqueConstCodeSegment
CodeSegment::create(uint8_t* codeBase, uint32_t codeLength,
const ShareableBytes& bytecode,
const LinkData& linkData,
const Metadata& metadata)
{
// These should always exist and should never be first in the code segment.
MOZ_ASSERT(linkData.interruptOffset != 0);
MOZ_ASSERT(linkData.outOfBoundsOffset != 0);
MOZ_ASSERT(linkData.unalignedAccessOffset != 0);
auto cs = js::MakeUnique<CodeSegment>();
uint8_t* codeBase = AllocateCodeSegment(cx, codeBytes.length());
if (!codeBase)
return nullptr;
auto cs = cx->make_unique<const CodeSegment>(codeBase, linkData.functionCodeLength,
codeBytes.length(),
codeBase + linkData.interruptOffset,
codeBase + linkData.outOfBoundsOffset,
codeBase + linkData.unalignedAccessOffset);
if (!cs) {
FreeCodeBytes(codeBase, codeLength);
FreeCodeSegment(codeBase, codeBytes.length());
return nullptr;
}
if (!cs->initialize(codeBase, codeLength, bytecode, linkData, metadata))
return nullptr;
{
JitContext jcx(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread()));
AutoFlushICache afc("CodeSegment::create");
AutoFlushICache::setRange(uintptr_t(codeBase), cs->length());
return UniqueConstCodeSegment(cs.release());
}
bool
CodeSegment::initialize(uint8_t* codeBase, uint32_t codeLength,
const ShareableBytes& bytecode,
const LinkData& linkData,
const Metadata& metadata)
{
MOZ_ASSERT(bytes_ == nullptr);
bytes_ = codeBase;
// This CodeSegment instance now owns the code bytes, and the CodeSegment's
// destructor will take care of freeing those bytes in the case of error.
functionLength_ = linkData.functionCodeLength;
length_ = codeLength;
interruptCode_ = codeBase + linkData.interruptOffset;
outOfBoundsCode_ = codeBase + linkData.outOfBoundsOffset;
unalignedAccessCode_ = codeBase + linkData.unalignedAccessOffset;
if (!StaticallyLink(*this, linkData))
return false;
ExecutableAllocator::cacheFlush(codeBase, RoundupCodeLength(codeLength));
memcpy(codeBase, codeBytes.begin(), codeBytes.length());
if (!StaticallyLink(*cs, linkData))
return nullptr;
}
// Reprotect the whole region to avoid having separate RW and RX mappings.
if (!ExecutableAllocator::makeExecutable(codeBase, RoundupCodeLength(codeLength)))
return false;
if (!ExecutableAllocator::makeExecutable(codeBase, RoundupCodeLength(cs->length()))) {
ReportOutOfMemory(cx);
return nullptr;
}
SendCodeRangesToProfiler(*this, bytecode.bytes, metadata);
SendCodeRangesToProfiler(*cs, bytecode->bytes, metadata);
return true;
return cs;
}
CodeSegment::~CodeSegment()
@ -314,69 +239,12 @@ CodeSegment::~CodeSegment()
if (!bytes_)
return;
MOZ_ASSERT(wasmCodeAllocations > 0);
wasmCodeAllocations--;
MOZ_ASSERT(length() > 0);
FreeCodeBytes(bytes_, length());
}
UniqueConstBytes
CodeSegment::unlinkedBytesForDebugging(const LinkData& linkData) const
{
UniqueBytes unlinkedBytes = js::MakeUnique<Bytes>();
if (!unlinkedBytes)
return nullptr;
if (!unlinkedBytes->append(base(), length()))
return nullptr;
StaticallyUnlink(unlinkedBytes->begin(), linkData);
return UniqueConstBytes(unlinkedBytes.release());
}
size_t
CodeSegment::serializedSize() const
{
return sizeof(uint32_t) + length_;
}
void
CodeSegment::addSizeOfMisc(mozilla::MallocSizeOf mallocSizeOf, size_t* code, size_t* data) const
{
*data += mallocSizeOf(this);
*code += RoundupCodeLength(length_);
}
uint8_t*
CodeSegment::serialize(uint8_t* cursor, const LinkData& linkData) const
{
cursor = WriteScalar<uint32_t>(cursor, length_);
uint8_t* base = cursor;
cursor = WriteBytes(cursor, bytes_, length_);
StaticallyUnlink(base, linkData);
return cursor;
}
const uint8_t*
CodeSegment::deserialize(const uint8_t* cursor, const ShareableBytes& bytecode,
const LinkData& linkData, const Metadata& metadata)
{
uint32_t length;
cursor = ReadScalar<uint32_t>(cursor, &length);
if (!cursor)
return nullptr;
MOZ_ASSERT(length_ % gc::SystemPageSize() == 0);
uint8_t* bytes = AllocateCodeBytes(length);
if (!bytes)
return nullptr;
cursor = ReadBytes(cursor, bytes, length);
if (!cursor) {
FreeCodeBytes(bytes, length);
return nullptr;
}
if (!initialize(bytes, length, bytecode, linkData, metadata))
return nullptr;
return cursor;
FreeCodeSegment(bytes_, length());
}
size_t
@ -623,11 +491,6 @@ Code::Code(UniqueConstCodeSegment segment,
MOZ_ASSERT_IF(metadata_->debugEnabled, maybeBytecode);
}
Code::Code()
: profilingLabels_(mutexid::WasmCodeProfilingLabels, CacheableCharsVector())
{
}
struct CallSiteRetAddrOffset
{
const CallSiteVector& callSites;
@ -637,73 +500,25 @@ struct CallSiteRetAddrOffset
}
};
size_t
Code::serializedSize() const
{
return metadata().serializedSize() +
segment().serializedSize();
}
uint8_t*
Code::serialize(uint8_t* cursor, const LinkData& linkData) const
{
MOZ_RELEASE_ASSERT(!metadata().debugEnabled);
cursor = metadata().serialize(cursor);
cursor = segment().serialize(cursor, linkData);
return cursor;
}
const uint8_t*
Code::deserialize(const uint8_t* cursor, const SharedBytes& bytecode, const LinkData& linkData,
Metadata* maybeMetadata)
{
MutableMetadata metadata;
if (maybeMetadata) {
metadata = maybeMetadata;
} else {
metadata = js_new<Metadata>();
if (!metadata)
return nullptr;
}
cursor = metadata->deserialize(cursor);
if (!cursor)
return nullptr;
UniqueCodeSegment codeSegment = js::MakeUnique<CodeSegment>();
if (!codeSegment)
return nullptr;
cursor = codeSegment->deserialize(cursor, *bytecode, linkData, *metadata);
if (!cursor)
return nullptr;
segment_ = UniqueConstCodeSegment(codeSegment.release());
metadata_ = metadata;
maybeBytecode_ = bytecode;
return cursor;
}
const CallSite*
Code::lookupCallSite(void* returnAddress) const
{
uint32_t target = ((uint8_t*)returnAddress) - segment_->base();
size_t lowerBound = 0;
size_t upperBound = metadata().callSites.length();
size_t upperBound = metadata_->callSites.length();
size_t match;
if (!BinarySearch(CallSiteRetAddrOffset(metadata().callSites), lowerBound, upperBound, target, &match))
if (!BinarySearch(CallSiteRetAddrOffset(metadata_->callSites), lowerBound, upperBound, target, &match))
return nullptr;
return &metadata().callSites[match];
return &metadata_->callSites[match];
}
const CodeRange*
Code::lookupRange(void* pc) const
{
CodeRange::OffsetInCode target((uint8_t*)pc - segment_->base());
return LookupInSorted(metadata().codeRanges, target);
return LookupInSorted(metadata_->codeRanges, target);
}
struct MemoryAccessOffset
@ -722,20 +537,20 @@ Code::lookupMemoryAccess(void* pc) const
uint32_t target = ((uint8_t*)pc) - segment_->base();
size_t lowerBound = 0;
size_t upperBound = metadata().memoryAccesses.length();
size_t upperBound = metadata_->memoryAccesses.length();
size_t match;
if (!BinarySearch(MemoryAccessOffset(metadata().memoryAccesses), lowerBound, upperBound, target, &match))
if (!BinarySearch(MemoryAccessOffset(metadata_->memoryAccesses), lowerBound, upperBound, target, &match))
return nullptr;
return &metadata().memoryAccesses[match];
return &metadata_->memoryAccesses[match];
}
bool
Code::getFuncName(uint32_t funcIndex, UTF8Bytes* name) const
{
const Bytes* maybeBytecode = maybeBytecode_ ? &maybeBytecode_.get()->bytes : nullptr;
return metadata().getFuncName(maybeBytecode, funcIndex, name);
return metadata_->getFuncName(maybeBytecode, funcIndex, name);
}
JSAtom*
@ -765,7 +580,7 @@ Code::ensureProfilingLabels(bool profilingEnabled) const
if (!labels->empty())
return;
for (const CodeRange& codeRange : metadata().codeRanges) {
for (const CodeRange& codeRange : metadata_->codeRanges) {
if (!codeRange.isFunction())
continue;
@ -777,7 +592,7 @@ Code::ensureProfilingLabels(bool profilingEnabled) const
if (!getFuncName(codeRange.funcIndex(), &name) || !name.append(" (", 2))
return;
if (const char* filename = metadata().filename.get()) {
if (const char* filename = metadata_->filename.get()) {
if (!name.append(filename, strlen(filename)))
return;
} else {
@ -816,24 +631,15 @@ Code::profilingLabel(uint32_t funcIndex) const
}
void
Code::addSizeOfMiscIfNotSeen(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
Code::SeenSet* seenCode,
size_t* code,
size_t* data) const
Code::addSizeOfMisc(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
size_t* code,
size_t* data) const
{
auto p = seenCode->lookupForAdd(this);
if (p)
return;
bool ok = seenCode->add(p, this);
(void)ok; // oh well
*code += segment_->length();
*data += mallocSizeOf(this) +
metadata().sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenMetadata) +
profilingLabels_.lock()->sizeOfExcludingThis(mallocSizeOf);
segment_->addSizeOfMisc(mallocSizeOf, code, data);
metadata_->sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenMetadata);
if (maybeBytecode_)
*data += maybeBytecode_->sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenBytes);

Просмотреть файл

@ -53,7 +53,6 @@ typedef RefPtr<const ShareableBytes> SharedBytes;
// A wasm CodeSegment owns the allocated executable code for a wasm module.
class CodeSegment;
typedef UniquePtr<CodeSegment> UniqueCodeSegment;
typedef UniquePtr<const CodeSegment> UniqueConstCodeSegment;
class CodeSegment
@ -71,40 +70,32 @@ class CodeSegment
uint8_t* outOfBoundsCode_;
uint8_t* unalignedAccessCode_;
// This assumes ownership of the codeBytes, and deletes them in the event of error.
bool initialize(uint8_t* codeBase, uint32_t codeLength, const ShareableBytes& bytecode,
const LinkData& linkData, const Metadata& metadata);
CodeSegment(uint8_t* bytes, uint32_t functionLength, uint32_t length, uint8_t* interruptCode,
uint8_t* outOfBoundsCode, uint8_t* unalignedAccessCode)
: bytes_(bytes),
functionLength_(functionLength),
length_(length),
interruptCode_(interruptCode),
outOfBoundsCode_(outOfBoundsCode),
unalignedAccessCode_(unalignedAccessCode)
{
}
protected:
CodeSegment() { PodZero(this); }
template <class> friend struct js::MallocProvider;
// codeBytes must be executable memory.
// This assumes ownership of the codeBytes, and deletes them in the event of error.
static UniqueConstCodeSegment create(uint8_t* codeBytes,
uint32_t codeLength,
const ShareableBytes& bytecode,
const LinkData& linkData,
const Metadata& metadata);
public:
CodeSegment(const CodeSegment&) = delete;
CodeSegment(CodeSegment&&) = delete;
void operator=(const CodeSegment&) = delete;
void operator=(CodeSegment&&) = delete;
CodeSegment()
: bytes_(nullptr),
functionLength_(0),
length_(0),
interruptCode_(nullptr),
outOfBoundsCode_(nullptr),
unalignedAccessCode_(nullptr)
{}
static UniqueConstCodeSegment create(jit::MacroAssembler& masm,
const ShareableBytes& bytecode,
public:
static UniqueConstCodeSegment create(JSContext* cx,
const Bytes& codeBytes,
const SharedBytes& bytecode,
const LinkData& linkData,
const Metadata& metadata);
static UniqueConstCodeSegment create(const Bytes& codeBytes,
const ShareableBytes& bytecode,
const LinkData& linkData,
const Metadata& metadata);
~CodeSegment();
uint8_t* base() const { return bytes_; }
@ -126,15 +117,6 @@ class CodeSegment
bool containsCodePC(const void* pc) const {
return pc >= base() && pc < (base() + length_);
}
UniqueConstBytes unlinkedBytesForDebugging(const LinkData& linkData) const;
size_t serializedSize() const;
uint8_t* serialize(uint8_t* cursor, const LinkData& linkData) const;
const uint8_t* deserialize(const uint8_t* cursor, const ShareableBytes& bytecode,
const LinkData& linkData, const Metadata& metadata);
void addSizeOfMisc(mozilla::MallocSizeOf mallocSizeOf, size_t* code, size_t* data) const;
};
// A FuncExport represents a single function definition inside a wasm Module
@ -390,14 +372,12 @@ typedef RefPtr<const Metadata> SharedMetadata;
class Code : public ShareableBase<Code>
{
UniqueConstCodeSegment segment_;
SharedMetadata metadata_;
SharedBytes maybeBytecode_;
ExclusiveData<CacheableCharsVector> profilingLabels_;
const UniqueConstCodeSegment segment_;
const SharedMetadata metadata_;
const SharedBytes maybeBytecode_;
const ExclusiveData<CacheableCharsVector> profilingLabels_;
public:
Code();
Code(UniqueConstCodeSegment segment,
const Metadata& metadata,
const ShareableBytes* maybeBytecode);
@ -425,25 +405,16 @@ class Code : public ShareableBase<Code>
// about:memory reporting:
void addSizeOfMiscIfNotSeen(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
Code::SeenSet* seenCode,
size_t* code,
size_t* data) const;
void addSizeOfMisc(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
size_t* code,
size_t* data) const;
// A Code object is serialized as the length and bytes of the machine code
// after statically unlinking it; the Code is then later recreated from the
// machine code and other parts.
size_t serializedSize() const;
uint8_t* serialize(uint8_t* cursor, const LinkData& linkData) const;
const uint8_t* deserialize(const uint8_t* cursor, const SharedBytes& bytecode,
const LinkData& linkData, Metadata* maybeMetadata);
WASM_DECLARE_SERIALIZABLE(Code);
};
typedef RefPtr<const Code> SharedCode;
typedef RefPtr<Code> MutableCode;
} // namespace wasm
} // namespace js

Просмотреть файл

@ -75,22 +75,15 @@ GeneratedSourceMap::searchLineByOffset(JSContext* cx, uint32_t offset, size_t* e
return true;
}
size_t
GeneratedSourceMap::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const
{
size_t size = exprlocs_.sizeOfExcludingThis(mallocSizeOf);
if (sortedByOffsetExprLocIndices_)
size += sortedByOffsetExprLocIndices_->sizeOfIncludingThis(mallocSizeOf);
return size;
}
DebugState::DebugState(SharedCode code,
const Metadata& metadata,
const ShareableBytes* maybeBytecode)
: code_(Move(code)),
metadata_(&metadata),
maybeBytecode_(maybeBytecode),
enterAndLeaveFrameTrapsCounter_(0)
{
MOZ_ASSERT_IF(debugEnabled(), maybeBytecode);
MOZ_ASSERT_IF(metadata_->debugEnabled, maybeBytecode);
}
const char enabledMessage[] =
@ -164,7 +157,7 @@ struct LineComparator
bool
DebugState::getLineOffsets(JSContext* cx, size_t lineno, Vector<uint32_t>* offsets)
{
if (!debugEnabled())
if (!metadata_->debugEnabled)
return true;
if (!ensureSourceMap(cx))
@ -197,7 +190,7 @@ bool
DebugState::getOffsetLocation(JSContext* cx, uint32_t offset, bool* found, size_t* lineno, size_t* column)
{
*found = false;
if (!debugEnabled())
if (!metadata_->debugEnabled)
return true;
if (!ensureSourceMap(cx))
@ -221,7 +214,7 @@ bool
DebugState::totalSourceLines(JSContext* cx, uint32_t* count)
{
*count = 0;
if (!debugEnabled())
if (!metadata_->debugEnabled)
return true;
if (!ensureSourceMap(cx))
@ -241,8 +234,8 @@ DebugState::stepModeEnabled(uint32_t funcIndex) const
bool
DebugState::incrementStepModeCount(JSContext* cx, uint32_t funcIndex)
{
MOZ_ASSERT(debugEnabled());
const CodeRange& codeRange = codeRanges()[debugFuncToCodeRange(funcIndex)];
MOZ_ASSERT(metadata_->debugEnabled);
const CodeRange& codeRange = metadata_->codeRanges[metadata_->debugFuncToCodeRange[funcIndex]];
MOZ_ASSERT(codeRange.isFunction());
if (!stepModeCounters_.initialized() && !stepModeCounters_.init()) {
@ -265,7 +258,7 @@ DebugState::incrementStepModeCount(JSContext* cx, uint32_t funcIndex)
codeRange.end() - codeRange.begin());
AutoFlushICache afc("Code::incrementStepModeCount");
for (const CallSite& callSite : callSites()) {
for (const CallSite& callSite : metadata_->callSites) {
if (callSite.kind() != CallSite::Breakpoint)
continue;
uint32_t offset = callSite.returnAddressOffset();
@ -278,8 +271,8 @@ DebugState::incrementStepModeCount(JSContext* cx, uint32_t funcIndex)
bool
DebugState::decrementStepModeCount(JSContext* cx, uint32_t funcIndex)
{
MOZ_ASSERT(debugEnabled());
const CodeRange& codeRange = codeRanges()[debugFuncToCodeRange(funcIndex)];
MOZ_ASSERT(metadata_->debugEnabled);
const CodeRange& codeRange = metadata_->codeRanges[metadata_->debugFuncToCodeRange[funcIndex]];
MOZ_ASSERT(codeRange.isFunction());
MOZ_ASSERT(stepModeCounters_.initialized() && !stepModeCounters_.empty());
@ -294,7 +287,7 @@ DebugState::decrementStepModeCount(JSContext* cx, uint32_t funcIndex)
codeRange.end() - codeRange.begin());
AutoFlushICache afc("Code::decrementStepModeCount");
for (const CallSite& callSite : callSites()) {
for (const CallSite& callSite : metadata_->callSites) {
if (callSite.kind() != CallSite::Breakpoint)
continue;
uint32_t offset = callSite.returnAddressOffset();
@ -319,16 +312,16 @@ SlowCallSiteSearchByOffset(const Metadata& metadata, uint32_t offset)
bool
DebugState::hasBreakpointTrapAtOffset(uint32_t offset)
{
if (!debugEnabled())
if (!metadata_->debugEnabled)
return false;
return SlowCallSiteSearchByOffset(metadata(), offset);
return SlowCallSiteSearchByOffset(*metadata_, offset);
}
void
DebugState::toggleBreakpointTrap(JSRuntime* rt, uint32_t offset, bool enabled)
{
MOZ_ASSERT(debugEnabled());
const CallSite* callSite = SlowCallSiteSearchByOffset(metadata(), offset);
MOZ_ASSERT(metadata_->debugEnabled);
const CallSite* callSite = SlowCallSiteSearchByOffset(*metadata_, offset);
if (!callSite)
return;
size_t debugTrapOffset = callSite->returnAddressOffset();
@ -420,7 +413,7 @@ DebugState::toggleDebugTrap(uint32_t offset, bool enabled)
{
MOZ_ASSERT(offset);
uint8_t* trap = code_->segment().base() + offset;
const Uint32Vector& farJumpOffsets = metadata().debugTrapFarJumpOffsets;
const Uint32Vector& farJumpOffsets = metadata_->debugTrapFarJumpOffsets;
if (enabled) {
MOZ_ASSERT(farJumpOffsets.length() > 0);
size_t i = 0;
@ -439,7 +432,7 @@ DebugState::toggleDebugTrap(uint32_t offset, bool enabled)
void
DebugState::adjustEnterAndLeaveFrameTrapsState(JSContext* cx, bool enabled)
{
MOZ_ASSERT(debugEnabled());
MOZ_ASSERT(metadata_->debugEnabled);
MOZ_ASSERT_IF(!enabled, enterAndLeaveFrameTrapsCounter_ > 0);
bool wasEnabled = enterAndLeaveFrameTrapsCounter_ > 0;
@ -454,7 +447,7 @@ DebugState::adjustEnterAndLeaveFrameTrapsState(JSContext* cx, bool enabled)
AutoWritableJitCode awjc(cx->runtime(), code_->segment().base(), code_->segment().length());
AutoFlushICache afc("Code::adjustEnterAndLeaveFrameTrapsState");
AutoFlushICache::setRange(uintptr_t(code_->segment().base()), code_->segment().length());
for (const CallSite& callSite : callSites()) {
for (const CallSite& callSite : metadata_->callSites) {
if (callSite.kind() != CallSite::EnterFrame && callSite.kind() != CallSite::LeaveFrame)
continue;
toggleDebugTrap(callSite.returnAddressOffset(), stillEnabled);
@ -464,28 +457,28 @@ DebugState::adjustEnterAndLeaveFrameTrapsState(JSContext* cx, bool enabled)
bool
DebugState::debugGetLocalTypes(uint32_t funcIndex, ValTypeVector* locals, size_t* argsLength)
{
MOZ_ASSERT(debugEnabled());
MOZ_ASSERT(metadata_->debugEnabled);
const ValTypeVector& args = metadata().debugFuncArgTypes[funcIndex];
const ValTypeVector& args = metadata_->debugFuncArgTypes[funcIndex];
*argsLength = args.length();
if (!locals->appendAll(args))
return false;
// Decode local var types from wasm binary function body.
const CodeRange& range = codeRanges()[debugFuncToCodeRange(funcIndex)];
const CodeRange& range = metadata_->codeRanges[metadata_->debugFuncToCodeRange[funcIndex]];
// In wasm, the Code points to the function start via funcLineOrBytecode.
MOZ_ASSERT(!metadata().isAsmJS() && maybeBytecode_);
MOZ_ASSERT(!metadata_->isAsmJS() && maybeBytecode_);
size_t offsetInModule = range.funcLineOrBytecode();
Decoder d(maybeBytecode_->begin() + offsetInModule, maybeBytecode_->end(),
offsetInModule, /* error = */ nullptr);
return DecodeLocalEntries(d, metadata().kind, locals);
return DecodeLocalEntries(d, metadata_->kind, locals);
}
ExprType
DebugState::debugGetResultType(uint32_t funcIndex)
{
MOZ_ASSERT(debugEnabled());
return metadata().debugFuncReturnTypes[funcIndex];
MOZ_ASSERT(metadata_->debugEnabled);
return metadata_->debugFuncReturnTypes[funcIndex];
}
JSString*
@ -498,7 +491,7 @@ DebugState::debugDisplayURL(JSContext* cx) const
js::StringBuffer result(cx);
if (!result.append("wasm:"))
return nullptr;
if (const char* filename = metadata().filename.get()) {
if (const char* filename = metadata_->filename.get()) {
js::StringBuffer filenamePrefix(cx);
// EncodeURI returns false due to invalid chars or OOM -- fail only
// during OOM.
@ -511,7 +504,7 @@ DebugState::debugDisplayURL(JSContext* cx) const
}
}
const ModuleHash& hash = metadata().hash;
const ModuleHash& hash = metadata_->hash;
for (size_t i = 0; i < sizeof(ModuleHash); i++) {
char digit1 = hash[i] / 16, digit2 = hash[i] % 16;
if (!result.append((char)(digit1 < 10 ? digit1 + '0' : digit1 + 'a' - 10)))
@ -520,19 +513,5 @@ DebugState::debugDisplayURL(JSContext* cx) const
return nullptr;
}
return result.finishString();
}
void
DebugState::addSizeOfMisc(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
Code::SeenSet* seenCode,
size_t* code,
size_t* data) const
{
code_->addSizeOfMiscIfNotSeen(mallocSizeOf, seenMetadata, seenBytes, seenCode, code, data);
if (maybeSourceMap_)
*data += maybeSourceMap_->sizeOfExcludingThis(mallocSizeOf);
if (maybeBytecode_)
*data += maybeBytecode_->sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenBytes);
}

Просмотреть файл

@ -71,8 +71,6 @@ class GeneratedSourceMap
void setTotalLines(uint32_t val) { totalLines_ = val; }
bool searchLineByOffset(JSContext* cx, uint32_t offset, size_t* exprlocIndex);
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
};
typedef UniquePtr<GeneratedSourceMap> UniqueGeneratedSourceMap;
@ -82,10 +80,11 @@ typedef HashMap<uint32_t, WasmBreakpointSite*, DefaultHasher<uint32_t>, SystemAl
class DebugState
{
const SharedCode code_;
const SharedMetadata metadata_;
const SharedBytes maybeBytecode_;
UniqueGeneratedSourceMap maybeSourceMap_;
// State maintained when debugging is enabled. In this case, the Code is
// State maintained when debugging is enabled. In this case, the Code is
// not actually shared, but is referenced uniquely by the instance that is
// being debugged.
@ -98,6 +97,7 @@ class DebugState
public:
DebugState(SharedCode code,
const Metadata& metadata,
const ShareableBytes* maybeBytecode);
// If the source bytecode was saved when this Code was constructed, this
@ -140,26 +140,6 @@ class DebugState
// Debug URL helpers.
JSString* debugDisplayURL(JSContext* cx) const;
// Accessors for commonly used elements of linked structures.
const Metadata& metadata() const { return code_->metadata(); }
bool debugEnabled() const { return metadata().debugEnabled; }
const CodeRangeVector& codeRanges() const { return metadata().codeRanges; }
const CallSiteVector& callSites() const { return metadata().callSites; }
uint32_t debugFuncToCodeRange(uint32_t funcIndex) const {
return metadata().debugFuncToCodeRange[funcIndex];
}
// about:memory reporting:
void addSizeOfMisc(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
Code::SeenSet* seenCode,
size_t* code,
size_t* data) const;
};
typedef UniquePtr<DebugState> UniqueDebugState;

Просмотреть файл

@ -1124,6 +1124,25 @@ ModuleGenerator::finish(const ShareableBytes& bytecode)
if (!finishCodegen())
return nullptr;
// Round up the code size to page size since this is eventually required by
// the executable-code allocator and for setting memory protection.
uint32_t bytesNeeded = masm_.bytesNeeded();
uint32_t padding = ComputeByteAlignment(bytesNeeded, gc::SystemPageSize());
// Use initLengthUninitialized so there is no round-up allocation nor time
// wasted zeroing memory.
Bytes code;
if (!code.initLengthUninitialized(bytesNeeded + padding))
return nullptr;
// We're not copying into executable memory, so don't flush the icache.
// Note: we may be executing on an arbitrary thread without TlsContext set
// so we can't use AutoFlushICache to inhibit.
masm_.executableCopy(code.begin(), /* flushICache = */ false);
// Zero the padding, since we used resizeUninitialized above.
memset(code.begin() + bytesNeeded, 0, padding);
// Convert the CallSiteAndTargetVector (needed during generation) to a
// CallSiteVector (what is stored in the Module).
if (!metadata_->callSites.appendAll(masm_.callSites()))
@ -1183,29 +1202,14 @@ ModuleGenerator::finish(const ShareableBytes& bytecode)
generateBytecodeHash(bytecode);
UniqueConstCodeSegment codeSegment = CodeSegment::create(masm_, bytecode, linkData_, *metadata_);
if (!codeSegment)
return nullptr;
UniqueConstBytes maybeDebuggingBytes;
if (metadata_->debugEnabled) {
maybeDebuggingBytes = codeSegment->unlinkedBytesForDebugging(linkData_);
if (!maybeDebuggingBytes)
return nullptr;
}
SharedCode code = js_new<Code>(Move(codeSegment), *metadata_, &bytecode);
if (!code)
return nullptr;
return SharedModule(js_new<Module>(Move(assumptions_),
*code,
Move(maybeDebuggingBytes),
Move(code),
Move(linkData_),
Move(env_->imports),
Move(env_->exports),
Move(env_->dataSegments),
Move(env_->elemSegments),
*metadata_,
bytecode));
}

Просмотреть файл

@ -802,15 +802,13 @@ void
Instance::addSizeOfMisc(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
Code::SeenSet* seenCode,
Table::SeenSet* seenTables,
size_t* code,
size_t* data) const
{
*data += mallocSizeOf(this) + globals_->sizeOfMisc(mallocSizeOf);
debug_->addSizeOfMisc(mallocSizeOf, seenMetadata, seenBytes, seenCode, code, data);
code_->addSizeOfMiscIfNotSeen(mallocSizeOf, seenMetadata, seenBytes, seenCode, code, data);
code_->addSizeOfMisc(mallocSizeOf, seenMetadata, seenBytes, code, data);
for (const SharedTable& table : tables_)
*data += table->sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenTables);

Просмотреть файл

@ -62,10 +62,6 @@ typedef UniquePtr<GlobalSegment> UniqueGlobalSegment;
// instances instantiated from the same Module. However, an Instance has no
// direct reference to its source Module which allows a Module to be destroyed
// while it still has live Instances.
//
// The instance's code may be shared among multiple instances provided none of
// those instances are being debugged. Instances that are being debugged own
// their code.
class Instance
{
@ -162,7 +158,6 @@ class Instance
void addSizeOfMisc(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
Code::SeenSet* seenCode,
Table::SeenSet* seenTables,
size_t* code,
size_t* data) const;

Просмотреть файл

@ -815,7 +815,6 @@ WasmModuleObject::create(JSContext* cx, Module& module, HandleObject proto)
obj->initReservedSlot(MODULE_SLOT, PrivateValue(&module));
module.AddRef();
cx->zone()->updateJitCodeMallocBytes(module.codeLength());
return obj;
}

Просмотреть файл

@ -146,17 +146,18 @@ Module::serializedSize(size_t* maybeBytecodeSize, size_t* maybeCompiledSize) con
// The compiled debug code must not be saved, set compiled size to 0,
// so Module::assumptionsMatch will return false during assumptions
// deserialization.
if (maybeCompiledSize && metadata().debugEnabled)
if (maybeCompiledSize && metadata_->debugEnabled)
*maybeCompiledSize = 0;
if (maybeCompiledSize && !metadata().debugEnabled) {
if (maybeCompiledSize && !metadata_->debugEnabled) {
*maybeCompiledSize = assumptions_.serializedSize() +
SerializedPodVectorSize(code_) +
linkData_.serializedSize() +
SerializedVectorSize(imports_) +
SerializedVectorSize(exports_) +
SerializedPodVectorSize(dataSegments_) +
SerializedVectorSize(elemSegments_) +
code_->serializedSize();
metadata_->serializedSize();
}
}
@ -179,21 +180,22 @@ Module::serialize(uint8_t* maybeBytecodeBegin, size_t maybeBytecodeSize,
MOZ_RELEASE_ASSERT(bytecodeEnd == maybeBytecodeBegin + maybeBytecodeSize);
}
MOZ_ASSERT_IF(maybeCompiledBegin && metadata().debugEnabled, maybeCompiledSize == 0);
MOZ_ASSERT_IF(maybeCompiledBegin && metadata_->debugEnabled, maybeCompiledSize == 0);
if (maybeCompiledBegin && !metadata().debugEnabled) {
if (maybeCompiledBegin && !metadata_->debugEnabled) {
// Assumption must be serialized at the beginning of the compiled bytes so
// that compiledAssumptionsMatch can detect a build-id mismatch before any
// other decoding occurs.
uint8_t* cursor = maybeCompiledBegin;
cursor = assumptions_.serialize(cursor);
cursor = SerializePodVector(cursor, code_);
cursor = linkData_.serialize(cursor);
cursor = SerializeVector(cursor, imports_);
cursor = SerializeVector(cursor, exports_);
cursor = SerializePodVector(cursor, dataSegments_);
cursor = SerializeVector(cursor, elemSegments_);
cursor = code_->serialize(cursor, linkData_);
cursor = metadata_->serialize(cursor);
MOZ_RELEASE_ASSERT(cursor == maybeCompiledBegin + maybeCompiledSize);
}
}
@ -224,6 +226,11 @@ Module::deserialize(const uint8_t* bytecodeBegin, size_t bytecodeSize,
if (!cursor)
return nullptr;
Bytes code;
cursor = DeserializePodVector(cursor, &code);
if (!cursor)
return nullptr;
LinkData linkData;
cursor = linkData.deserialize(cursor);
if (!cursor)
@ -249,22 +256,29 @@ Module::deserialize(const uint8_t* bytecodeBegin, size_t bytecodeSize,
if (!cursor)
return nullptr;
MutableCode code = js_new<Code>();
cursor = code->deserialize(cursor, bytecode, linkData, maybeMetadata);
MutableMetadata metadata;
if (maybeMetadata) {
metadata = maybeMetadata;
} else {
metadata = js_new<Metadata>();
if (!metadata)
return nullptr;
}
cursor = metadata->deserialize(cursor);
if (!cursor)
return nullptr;
MOZ_RELEASE_ASSERT(cursor == compiledBegin + compiledSize);
MOZ_RELEASE_ASSERT(!!maybeMetadata == code->metadata().isAsmJS());
MOZ_RELEASE_ASSERT(!!maybeMetadata == metadata->isAsmJS());
return js_new<Module>(Move(assumptions),
*code,
nullptr, // Serialized code is never debuggable
Move(code),
Move(linkData),
Move(imports),
Move(exports),
Move(dataSegments),
Move(elemSegments),
*metadata,
*bytecode);
}
@ -363,21 +377,19 @@ wasm::DeserializeModule(PRFileDesc* bytecodeFile, PRFileDesc* maybeCompiledFile,
Module::addSizeOfMisc(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
Code::SeenSet* seenCode,
size_t* code,
size_t* data) const
{
code_->addSizeOfMiscIfNotSeen(mallocSizeOf, seenMetadata, seenBytes, seenCode, code, data);
*data += mallocSizeOf(this) +
assumptions_.sizeOfExcludingThis(mallocSizeOf) +
code_.sizeOfExcludingThis(mallocSizeOf) +
linkData_.sizeOfExcludingThis(mallocSizeOf) +
SizeOfVectorExcludingThis(imports_, mallocSizeOf) +
SizeOfVectorExcludingThis(exports_, mallocSizeOf) +
dataSegments_.sizeOfExcludingThis(mallocSizeOf) +
SizeOfVectorExcludingThis(elemSegments_, mallocSizeOf) +
metadata_->sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenMetadata) +
bytecode_->sizeOfIncludingThisIfNotSeen(mallocSizeOf, seenBytes);
if (unlinkedCodeForDebugging_)
*data += unlinkedCodeForDebugging_->sizeOfExcludingThis(mallocSizeOf);
}
@ -386,17 +398,17 @@ Module::addSizeOfMisc(MallocSizeOf mallocSizeOf,
// contain offsets in the "code" array and basic information about a code
// segment/function body.
bool
Module::extractCode(JSContext* cx, MutableHandleValue vp) const
Module::extractCode(JSContext* cx, MutableHandleValue vp)
{
RootedPlainObject result(cx, NewBuiltinClassInstance<PlainObject>(cx));
if (!result)
return false;
RootedObject code(cx, JS_NewUint8Array(cx, code_->segment().length()));
RootedObject code(cx, JS_NewUint8Array(cx, code_.length()));
if (!code)
return false;
memcpy(code->as<TypedArrayObject>().viewDataUnshared(), code_->segment().base(), code_->segment().length());
memcpy(code->as<TypedArrayObject>().viewDataUnshared(), code_.begin(), code_.length());
RootedValue value(cx, ObjectValue(*code));
if (!JS_DefineProperty(cx, result, "code", value, JSPROP_ENUMERATE))
@ -406,7 +418,7 @@ Module::extractCode(JSContext* cx, MutableHandleValue vp) const
if (!segments)
return false;
for (const CodeRange& p : metadata().codeRanges) {
for (const CodeRange& p : metadata_->codeRanges) {
RootedObject segment(cx, NewObjectWithGivenProto<PlainObject>(cx, nullptr));
if (!segment)
return false;
@ -563,12 +575,12 @@ FindImportForFuncImport(const ImportVector& imports, uint32_t funcImportIndex)
bool
Module::instantiateFunctions(JSContext* cx, Handle<FunctionVector> funcImports) const
{
MOZ_ASSERT(funcImports.length() == metadata().funcImports.length());
MOZ_ASSERT(funcImports.length() == metadata_->funcImports.length());
if (metadata().isAsmJS())
return true;
for (size_t i = 0; i < metadata().funcImports.length(); i++) {
for (size_t i = 0; i < metadata_->funcImports.length(); i++) {
HandleFunction f = funcImports[i];
if (!IsExportedFunction(f) || ExportedFunctionToInstance(f).isAsmJS())
continue;
@ -577,7 +589,7 @@ Module::instantiateFunctions(JSContext* cx, Handle<FunctionVector> funcImports)
Instance& instance = ExportedFunctionToInstance(f);
const FuncExport& funcExport = instance.metadata().lookupFuncExport(funcIndex);
if (funcExport.sig() != metadata().funcImports[i].sig()) {
if (funcExport.sig() != metadata_->funcImports[i].sig()) {
const Import& import = FindImportForFuncImport(imports_, i);
JS_ReportErrorNumberASCII(cx, GetErrorMessage, nullptr, JSMSG_WASM_BAD_IMPORT_SIG,
import.module.get(), import.field.get());
@ -618,27 +630,27 @@ CheckLimits(JSContext* cx, uint32_t declaredMin, const Maybe<uint32_t>& declared
bool
Module::instantiateMemory(JSContext* cx, MutableHandleWasmMemoryObject memory) const
{
if (!metadata().usesMemory()) {
if (!metadata_->usesMemory()) {
MOZ_ASSERT(!memory);
MOZ_ASSERT(dataSegments_.empty());
return true;
}
uint32_t declaredMin = metadata().minMemoryLength;
Maybe<uint32_t> declaredMax = metadata().maxMemoryLength;
uint32_t declaredMin = metadata_->minMemoryLength;
Maybe<uint32_t> declaredMax = metadata_->maxMemoryLength;
if (memory) {
ArrayBufferObjectMaybeShared& buffer = memory->buffer();
MOZ_ASSERT_IF(metadata().isAsmJS(), buffer.isPreparedForAsmJS());
MOZ_ASSERT_IF(!metadata().isAsmJS(), buffer.as<ArrayBufferObject>().isWasm());
MOZ_ASSERT_IF(metadata_->isAsmJS(), buffer.isPreparedForAsmJS());
MOZ_ASSERT_IF(!metadata_->isAsmJS(), buffer.as<ArrayBufferObject>().isWasm());
if (!CheckLimits(cx, declaredMin, declaredMax, buffer.byteLength(), buffer.wasmMaxSize(),
metadata().isAsmJS(), "Memory")) {
metadata_->isAsmJS(), "Memory")) {
return false;
}
} else {
MOZ_ASSERT(!metadata().isAsmJS());
MOZ_ASSERT(metadata().memoryUsage == MemoryUsage::Unshared);
MOZ_ASSERT(!metadata_->isAsmJS());
MOZ_ASSERT(metadata_->memoryUsage == MemoryUsage::Unshared);
RootedArrayBufferObjectMaybeShared buffer(cx,
ArrayBufferObject::createForWasm(cx, declaredMin, declaredMax));
@ -660,15 +672,15 @@ Module::instantiateTable(JSContext* cx, MutableHandleWasmTableObject tableObj,
SharedTableVector* tables) const
{
if (tableObj) {
MOZ_ASSERT(!metadata().isAsmJS());
MOZ_ASSERT(!metadata_->isAsmJS());
MOZ_ASSERT(metadata().tables.length() == 1);
const TableDesc& td = metadata().tables[0];
MOZ_ASSERT(metadata_->tables.length() == 1);
const TableDesc& td = metadata_->tables[0];
MOZ_ASSERT(td.external);
Table& table = tableObj->table();
if (!CheckLimits(cx, td.limits.initial, td.limits.maximum, table.length(), table.maximum(),
metadata().isAsmJS(), "Table")) {
metadata_->isAsmJS(), "Table")) {
return false;
}
@ -677,7 +689,7 @@ Module::instantiateTable(JSContext* cx, MutableHandleWasmTableObject tableObj,
return false;
}
} else {
for (const TableDesc& td : metadata().tables) {
for (const TableDesc& td : metadata_->tables) {
SharedTable table;
if (td.external) {
MOZ_ASSERT(!tableObj);
@ -869,29 +881,16 @@ Module::instantiate(JSContext* cx,
if (!instantiateTable(cx, &table, &tables))
return false;
// The CodeSegment does not hold on to the bytecode, see comment below.
auto codeSegment = CodeSegment::create(cx, code_, bytecode_, linkData_, *metadata_);
if (!codeSegment)
return false;
auto globalSegment = GlobalSegment::create(linkData_.globalDataLength);
if (!globalSegment)
return false;
SharedCode code(code_);
if (metadata().debugEnabled) {
// The first time through, use the pre-linked code in the module but
// mark it as busy. Subsequently, instantiate the copy of the code
// bytes that we keep around for debugging instead, because the debugger
// may patch the pre-linked code at any time.
if (!codeIsBusy_.compareExchange(false, true)) {
UniqueConstCodeSegment codeSegment = CodeSegment::create(*unlinkedCodeForDebugging_,
*bytecode_, linkData_,
metadata());
if (!codeSegment)
return false;
code = js_new<Code>(Move(codeSegment), metadata(), bytecode_);
if (!code)
return false;
}
}
// To support viewing the source of an instance (Instance::createText), the
// instance must hold onto a ref of the bytecode (keeping it alive). This
// wastes memory for most users, so we try to only save the source when a
@ -901,17 +900,21 @@ Module::instantiate(JSContext* cx,
// for non-developer builds).
const ShareableBytes* maybeBytecode = nullptr;
if (cx->compartment()->isDebuggee() || metadata().debugEnabled ||
!metadata().funcNames.empty())
if (cx->compartment()->isDebuggee() || metadata_->debugEnabled ||
!metadata_->funcNames.empty())
{
maybeBytecode = bytecode_.get();
}
SharedCode code(js_new<Code>(Move(codeSegment), *metadata_, maybeBytecode));
if (!code)
return false;
// The debug object must be present even when debugging is not enabled: It
// provides the lazily created source text for the program, even if that
// text is a placeholder message when debugging is not enabled.
auto debug = cx->make_unique<DebugState>(code, maybeBytecode);
auto debug = cx->make_unique<DebugState>(code, *metadata_, maybeBytecode);
if (!debug)
return false;
@ -959,9 +962,9 @@ Module::instantiate(JSContext* cx,
// Note that failure may cause instantiation to throw, but the instance may
// still be live via edges created by initSegments or the start function.
if (metadata().startFuncIndex) {
if (metadata_->startFuncIndex) {
FixedInvokeArgs<0> args(cx);
if (!instance->instance().callExport(cx, *metadata().startFuncIndex, args))
if (!instance->instance().callExport(cx, *metadata_->startFuncIndex, args))
return false;
}

Просмотреть файл

@ -83,32 +83,24 @@ typedef UniquePtr<const LinkData> UniqueConstLinkData;
// any number of times such that the serialized bytes can be deserialized later
// to produce a new, equivalent Module.
//
// Fully linked-and-instantiated code (represented by Code and its owned
// CodeSegment) can be shared between instances, provided none of those
// instances are being debugged. If patchable code is needed then each instance
// must have its own Code. Module eagerly creates a new Code and gives it to the
// first instance; it then instantiates new Code objects from a copy of the
// unlinked code that it keeps around for that purpose.
// Since fully linked-and-instantiated code (represented by CodeSegment) cannot
// be shared between instances, Module stores an unlinked, uninstantiated copy
// of the code (represented by the Bytes) and creates a new CodeSegment each
// time it is instantiated. In the future, Module will store a shareable,
// immutable CodeSegment that can be shared by all its instances.
class Module : public JS::WasmModule
{
const Assumptions assumptions_;
const SharedCode code_;
const UniqueConstBytes unlinkedCodeForDebugging_;
const Bytes code_;
const LinkData linkData_;
const ImportVector imports_;
const ExportVector exports_;
const DataSegmentVector dataSegments_;
const ElemSegmentVector elemSegments_;
const SharedMetadata metadata_;
const SharedBytes bytecode_;
// `codeIsBusy_` is set to false initially and then to true when `code_` is
// already being used for an instance and can't be shared because it may be
// patched by the debugger. Subsequent instances must then create copies
// by linking the `unlinkedCodeForDebugging_`.
mutable mozilla::Atomic<bool> codeIsBusy_;
bool instantiateFunctions(JSContext* cx, Handle<FunctionVector> funcImports) const;
bool instantiateMemory(JSContext* cx, MutableHandleWasmMemoryObject memory) const;
bool instantiateTable(JSContext* cx,
@ -122,34 +114,30 @@ class Module : public JS::WasmModule
public:
Module(Assumptions&& assumptions,
const Code& code,
UniqueConstBytes unlinkedCodeForDebugging,
Bytes&& code,
LinkData&& linkData,
ImportVector&& imports,
ExportVector&& exports,
DataSegmentVector&& dataSegments,
ElemSegmentVector&& elemSegments,
const Metadata& metadata,
const ShareableBytes& bytecode)
: assumptions_(Move(assumptions)),
code_(&code),
unlinkedCodeForDebugging_(Move(unlinkedCodeForDebugging)),
code_(Move(code)),
linkData_(Move(linkData)),
imports_(Move(imports)),
exports_(Move(exports)),
dataSegments_(Move(dataSegments)),
elemSegments_(Move(elemSegments)),
bytecode_(&bytecode),
codeIsBusy_(false)
{
MOZ_ASSERT_IF(metadata().debugEnabled, unlinkedCodeForDebugging_);
}
metadata_(&metadata),
bytecode_(&bytecode)
{}
~Module() override { /* Note: can be called on any thread */ }
const Metadata& metadata() const { return code_->metadata(); }
const Metadata& metadata() const { return *metadata_; }
const ImportVector& imports() const { return imports_; }
const ExportVector& exports() const { return exports_; }
const Bytes& bytecode() const { return bytecode_->bytes; }
uint32_t codeLength() const { return code_->segment().length(); }
// Instantiate this module with the given imports:
@ -178,12 +166,11 @@ class Module : public JS::WasmModule
void addSizeOfMisc(MallocSizeOf mallocSizeOf,
Metadata::SeenSet* seenMetadata,
ShareableBytes::SeenSet* seenBytes,
Code::SeenSet* seenCode,
size_t* code, size_t* data) const;
// Generated code analysis support:
bool extractCode(JSContext* cx, MutableHandleValue vp) const;
bool extractCode(JSContext* cx, MutableHandleValue vp);
};
typedef RefPtr<Module> SharedModule;

Просмотреть файл

@ -89,7 +89,6 @@ using mozilla::Unused;
typedef Vector<uint32_t, 0, SystemAllocPolicy> Uint32Vector;
typedef Vector<uint8_t, 0, SystemAllocPolicy> Bytes;
typedef UniquePtr<Bytes> UniqueBytes;
typedef UniquePtr<const Bytes> UniqueConstBytes;
typedef Vector<char, 0, SystemAllocPolicy> UTF8Bytes;
typedef int8_t I8x16[16];