зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1338217 - Make data structures immutable in preparation for sharing Wasm code. r=luke
--HG-- extra : rebase_source : 1554db7a4054cfab3878e2313e2c3353fc29e161
This commit is contained in:
Родитель
5724e57fa5
Коммит
0eec1e4467
|
@ -34,6 +34,7 @@
|
|||
_(GeckoProfilerStrings, 500) \
|
||||
_(ProtectedRegionTree, 500) \
|
||||
_(WasmSigIdSet, 500) \
|
||||
_(WasmCodeProfilingLabels, 500) \
|
||||
_(ShellOffThreadState, 500) \
|
||||
_(SimulatorCacheLock, 500) \
|
||||
_(Arm64SimulatorLock, 500) \
|
||||
|
|
|
@ -54,16 +54,23 @@ using JS::GenericNaN;
|
|||
static Atomic<uint32_t> wasmCodeAllocations(0);
|
||||
static const uint32_t MaxWasmCodeAllocations = 16384;
|
||||
|
||||
static uint32_t
|
||||
RoundupCodeLength(uint32_t codeLength)
|
||||
{
|
||||
// codeLength is a multiple of the system's page size, but not necessarily
|
||||
// a multiple of ExecutableCodePageSize.
|
||||
MOZ_ASSERT(codeLength % gc::SystemPageSize() == 0);
|
||||
return JS_ROUNDUP(codeLength, ExecutableCodePageSize);
|
||||
}
|
||||
|
||||
static uint8_t*
|
||||
AllocateCodeSegment(JSContext* cx, uint32_t codeLength)
|
||||
{
|
||||
codeLength = RoundupCodeLength(codeLength);
|
||||
|
||||
if (wasmCodeAllocations >= MaxWasmCodeAllocations)
|
||||
return nullptr;
|
||||
|
||||
// codeLength is a multiple of the system's page size, but not necessarily
|
||||
// a multiple of ExecutableCodePageSize.
|
||||
codeLength = JS_ROUNDUP(codeLength, ExecutableCodePageSize);
|
||||
|
||||
void* p = AllocateExecutableMemory(codeLength, ProtectionSetting::Writable);
|
||||
|
||||
// If the allocation failed and the embedding gives us a last-ditch attempt
|
||||
|
@ -87,8 +94,18 @@ AllocateCodeSegment(JSContext* cx, uint32_t codeLength)
|
|||
return (uint8_t*)p;
|
||||
}
|
||||
|
||||
static void
|
||||
FreeCodeSegment(uint8_t* bytes, uint32_t codeLength)
|
||||
{
|
||||
codeLength = RoundupCodeLength(codeLength);
|
||||
#ifdef MOZ_VTUNE
|
||||
vtune::UnmarkBytes(bytes, codeLength);
|
||||
#endif
|
||||
DeallocateExecutableMemory(bytes, codeLength);
|
||||
}
|
||||
|
||||
static bool
|
||||
StaticallyLink(CodeSegment& cs, const LinkData& linkData)
|
||||
StaticallyLink(const CodeSegment& cs, const LinkData& linkData)
|
||||
{
|
||||
for (LinkData::InternalLink link : linkData.internalLinks) {
|
||||
uint8_t* patchAt = cs.base() + link.patchAtOffset;
|
||||
|
@ -120,7 +137,7 @@ StaticallyLink(CodeSegment& cs, const LinkData& linkData)
|
|||
}
|
||||
|
||||
static void
|
||||
SendCodeRangesToProfiler(CodeSegment& cs, const Bytes& bytecode, const Metadata& metadata)
|
||||
SendCodeRangesToProfiler(const CodeSegment& cs, const Bytes& bytecode, const Metadata& metadata)
|
||||
{
|
||||
bool enabled = false;
|
||||
#ifdef JS_ION_PERF
|
||||
|
@ -167,55 +184,52 @@ SendCodeRangesToProfiler(CodeSegment& cs, const Bytes& bytecode, const Metadata&
|
|||
return;
|
||||
}
|
||||
|
||||
/* static */ UniqueCodeSegment
|
||||
/* static */ UniqueConstCodeSegment
|
||||
CodeSegment::create(JSContext* cx,
|
||||
const Bytes& bytecode,
|
||||
const Bytes& codeBytes,
|
||||
const SharedBytes& bytecode,
|
||||
const LinkData& linkData,
|
||||
const Metadata& metadata,
|
||||
HandleWasmMemoryObject memory)
|
||||
const Metadata& metadata)
|
||||
{
|
||||
MOZ_ASSERT(bytecode.length() % gc::SystemPageSize() == 0);
|
||||
MOZ_ASSERT(linkData.functionCodeLength < bytecode.length());
|
||||
MOZ_ASSERT(codeBytes.length() % gc::SystemPageSize() == 0);
|
||||
MOZ_ASSERT(linkData.functionCodeLength < codeBytes.length());
|
||||
|
||||
// These should always exist and should never be first in the code segment.
|
||||
MOZ_ASSERT(linkData.interruptOffset != 0);
|
||||
MOZ_ASSERT(linkData.outOfBoundsOffset != 0);
|
||||
MOZ_ASSERT(linkData.unalignedAccessOffset != 0);
|
||||
|
||||
auto cs = cx->make_unique<CodeSegment>();
|
||||
if (!cs)
|
||||
uint8_t* codeBase = AllocateCodeSegment(cx, codeBytes.length());
|
||||
if (!codeBase)
|
||||
return nullptr;
|
||||
|
||||
cs->bytes_ = AllocateCodeSegment(cx, bytecode.length());
|
||||
if (!cs->bytes_)
|
||||
auto cs = cx->make_unique<const CodeSegment>(codeBase, linkData.functionCodeLength,
|
||||
codeBytes.length(),
|
||||
codeBase + linkData.interruptOffset,
|
||||
codeBase + linkData.outOfBoundsOffset,
|
||||
codeBase + linkData.unalignedAccessOffset);
|
||||
if (!cs) {
|
||||
FreeCodeSegment(codeBase, codeBytes.length());
|
||||
return nullptr;
|
||||
|
||||
uint8_t* codeBase = cs->base();
|
||||
|
||||
cs->functionLength_ = linkData.functionCodeLength;
|
||||
cs->length_ = bytecode.length();
|
||||
cs->interruptCode_ = codeBase + linkData.interruptOffset;
|
||||
cs->outOfBoundsCode_ = codeBase + linkData.outOfBoundsOffset;
|
||||
cs->unalignedAccessCode_ = codeBase + linkData.unalignedAccessOffset;
|
||||
}
|
||||
|
||||
{
|
||||
JitContext jcx(CompileRuntime::get(cx->compartment()->runtimeFromAnyThread()));
|
||||
AutoFlushICache afc("CodeSegment::create");
|
||||
AutoFlushICache::setRange(uintptr_t(codeBase), cs->length());
|
||||
|
||||
memcpy(codeBase, bytecode.begin(), bytecode.length());
|
||||
memcpy(codeBase, codeBytes.begin(), codeBytes.length());
|
||||
if (!StaticallyLink(*cs, linkData))
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
// Reprotect the whole region to avoid having separate RW and RX mappings.
|
||||
uint32_t size = JS_ROUNDUP(cs->length(), ExecutableCodePageSize);
|
||||
if (!ExecutableAllocator::makeExecutable(codeBase, size)) {
|
||||
if (!ExecutableAllocator::makeExecutable(codeBase, RoundupCodeLength(cs->length()))) {
|
||||
ReportOutOfMemory(cx);
|
||||
return nullptr;
|
||||
}
|
||||
|
||||
SendCodeRangesToProfiler(*cs, bytecode, metadata);
|
||||
SendCodeRangesToProfiler(*cs, bytecode->bytes, metadata);
|
||||
|
||||
return cs;
|
||||
}
|
||||
|
@ -230,12 +244,7 @@ CodeSegment::~CodeSegment()
|
|||
|
||||
MOZ_ASSERT(length() > 0);
|
||||
|
||||
// Match AllocateCodeSegment.
|
||||
uint32_t size = JS_ROUNDUP(length(), ExecutableCodePageSize);
|
||||
#ifdef MOZ_VTUNE
|
||||
vtune::UnmarkBytes(bytes_, size);
|
||||
#endif
|
||||
DeallocateExecutableMemory(bytes_, size);
|
||||
FreeCodeSegment(bytes_, length());
|
||||
}
|
||||
|
||||
size_t
|
||||
|
@ -471,12 +480,13 @@ Metadata::getFuncName(const Bytes* maybeBytecode, uint32_t funcIndex, UTF8Bytes*
|
|||
name->append(afterFuncIndex, strlen(afterFuncIndex));
|
||||
}
|
||||
|
||||
Code::Code(UniqueCodeSegment segment,
|
||||
Code::Code(UniqueConstCodeSegment segment,
|
||||
const Metadata& metadata,
|
||||
const ShareableBytes* maybeBytecode)
|
||||
: segment_(Move(segment)),
|
||||
metadata_(&metadata),
|
||||
maybeBytecode_(maybeBytecode)
|
||||
maybeBytecode_(maybeBytecode),
|
||||
profilingLabels_(mutexid::WasmCodeProfilingLabels, CacheableCharsVector())
|
||||
{
|
||||
MOZ_ASSERT_IF(metadata_->debugEnabled, maybeBytecode);
|
||||
}
|
||||
|
@ -558,14 +568,16 @@ Code::getFuncAtom(JSContext* cx, uint32_t funcIndex) const
|
|||
// since, once we start sampling, we'll be in a signal-handing context where we
|
||||
// cannot malloc.
|
||||
void
|
||||
Code::ensureProfilingLabels(bool profilingEnabled)
|
||||
Code::ensureProfilingLabels(bool profilingEnabled) const
|
||||
{
|
||||
auto labels = profilingLabels_.lock();
|
||||
|
||||
if (!profilingEnabled) {
|
||||
profilingLabels_.clear();
|
||||
labels->clear();
|
||||
return;
|
||||
}
|
||||
|
||||
if (!profilingLabels_.empty())
|
||||
if (!labels->empty())
|
||||
return;
|
||||
|
||||
for (const CodeRange& codeRange : metadata_->codeRanges) {
|
||||
|
@ -599,21 +611,23 @@ Code::ensureProfilingLabels(bool profilingEnabled)
|
|||
if (!label)
|
||||
return;
|
||||
|
||||
if (codeRange.funcIndex() >= profilingLabels_.length()) {
|
||||
if (!profilingLabels_.resize(codeRange.funcIndex() + 1))
|
||||
if (codeRange.funcIndex() >= labels->length()) {
|
||||
if (!labels->resize(codeRange.funcIndex() + 1))
|
||||
return;
|
||||
}
|
||||
|
||||
profilingLabels_[codeRange.funcIndex()] = Move(label);
|
||||
((CacheableCharsVector&)labels)[codeRange.funcIndex()] = Move(label);
|
||||
}
|
||||
}
|
||||
|
||||
const char*
|
||||
Code::profilingLabel(uint32_t funcIndex) const
|
||||
{
|
||||
if (funcIndex >= profilingLabels_.length() || !profilingLabels_[funcIndex])
|
||||
auto labels = profilingLabels_.lock();
|
||||
|
||||
if (funcIndex >= labels->length() || !((CacheableCharsVector&)labels)[funcIndex])
|
||||
return "?";
|
||||
return profilingLabels_[funcIndex].get();
|
||||
return ((CacheableCharsVector&)labels)[funcIndex].get();
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#define wasm_code_h
|
||||
|
||||
#include "js/HashTable.h"
|
||||
#include "threading/ExclusiveData.h"
|
||||
#include "wasm/WasmTypes.h"
|
||||
|
||||
namespace js {
|
||||
|
@ -33,10 +34,26 @@ struct LinkData;
|
|||
struct Metadata;
|
||||
class FrameIterator;
|
||||
|
||||
// ShareableBytes is a reference-counted Vector of bytes.
|
||||
|
||||
struct ShareableBytes : ShareableBase<ShareableBytes>
|
||||
{
|
||||
// Vector is 'final', so instead make Vector a member and add boilerplate.
|
||||
Bytes bytes;
|
||||
size_t sizeOfExcludingThis(MallocSizeOf m) const { return bytes.sizeOfExcludingThis(m); }
|
||||
const uint8_t* begin() const { return bytes.begin(); }
|
||||
const uint8_t* end() const { return bytes.end(); }
|
||||
size_t length() const { return bytes.length(); }
|
||||
bool append(const uint8_t *p, uint32_t ct) { return bytes.append(p, ct); }
|
||||
};
|
||||
|
||||
typedef RefPtr<ShareableBytes> MutableBytes;
|
||||
typedef RefPtr<const ShareableBytes> SharedBytes;
|
||||
|
||||
// A wasm CodeSegment owns the allocated executable code for a wasm module.
|
||||
|
||||
class CodeSegment;
|
||||
typedef UniquePtr<CodeSegment> UniqueCodeSegment;
|
||||
typedef UniquePtr<const CodeSegment> UniqueConstCodeSegment;
|
||||
|
||||
class CodeSegment
|
||||
{
|
||||
|
@ -53,6 +70,17 @@ class CodeSegment
|
|||
uint8_t* outOfBoundsCode_;
|
||||
uint8_t* unalignedAccessCode_;
|
||||
|
||||
CodeSegment(uint8_t* bytes, uint32_t functionLength, uint32_t length, uint8_t* interruptCode,
|
||||
uint8_t* outOfBoundsCode, uint8_t* unalignedAccessCode)
|
||||
: bytes_(bytes),
|
||||
functionLength_(functionLength),
|
||||
length_(length),
|
||||
interruptCode_(interruptCode),
|
||||
outOfBoundsCode_(outOfBoundsCode),
|
||||
unalignedAccessCode_(unalignedAccessCode)
|
||||
{
|
||||
}
|
||||
|
||||
protected:
|
||||
CodeSegment() { PodZero(this); }
|
||||
template <class> friend struct js::MallocProvider;
|
||||
|
@ -63,11 +91,11 @@ class CodeSegment
|
|||
void operator=(CodeSegment&&) = delete;
|
||||
|
||||
public:
|
||||
static UniqueCodeSegment create(JSContext* cx,
|
||||
const Bytes& code,
|
||||
const LinkData& linkData,
|
||||
const Metadata& metadata,
|
||||
HandleWasmMemoryObject memory);
|
||||
static UniqueConstCodeSegment create(JSContext* cx,
|
||||
const Bytes& codeBytes,
|
||||
const SharedBytes& bytecode,
|
||||
const LinkData& linkData,
|
||||
const Metadata& metadata);
|
||||
~CodeSegment();
|
||||
|
||||
uint8_t* base() const { return bytes_; }
|
||||
|
@ -91,23 +119,6 @@ class CodeSegment
|
|||
}
|
||||
};
|
||||
|
||||
// ShareableBytes is a ref-counted vector of bytes which are incrementally built
|
||||
// during compilation and then immutably shared.
|
||||
|
||||
struct ShareableBytes : ShareableBase<ShareableBytes>
|
||||
{
|
||||
// Vector is 'final', so instead make Vector a member and add boilerplate.
|
||||
Bytes bytes;
|
||||
size_t sizeOfExcludingThis(MallocSizeOf m) const { return bytes.sizeOfExcludingThis(m); }
|
||||
const uint8_t* begin() const { return bytes.begin(); }
|
||||
const uint8_t* end() const { return bytes.end(); }
|
||||
size_t length() const { return bytes.length(); }
|
||||
bool append(const uint8_t *p, uint32_t ct) { return bytes.append(p, ct); }
|
||||
};
|
||||
|
||||
typedef RefPtr<ShareableBytes> MutableBytes;
|
||||
typedef RefPtr<const ShareableBytes> SharedBytes;
|
||||
|
||||
// A FuncExport represents a single function definition inside a wasm Module
|
||||
// that has been exported one or more times. A FuncExport represents an
|
||||
// internal entry point that can be called via function definition index by
|
||||
|
@ -356,22 +367,21 @@ typedef RefPtr<const Metadata> SharedMetadata;
|
|||
// moment, Code objects are owned uniquely by instances since CodeSegments are
|
||||
// not shareable. However, once this restriction is removed, a single Code
|
||||
// object will be shared between a module and all its instances.
|
||||
//
|
||||
// profilingLabels_ is lazily initialized, but behind a lock.
|
||||
|
||||
class Code : public ShareableBase<Code>
|
||||
{
|
||||
const UniqueCodeSegment segment_;
|
||||
const SharedMetadata metadata_;
|
||||
const SharedBytes maybeBytecode_;
|
||||
|
||||
// Mutated at runtime:
|
||||
CacheableCharsVector profilingLabels_;
|
||||
const UniqueConstCodeSegment segment_;
|
||||
const SharedMetadata metadata_;
|
||||
const SharedBytes maybeBytecode_;
|
||||
const ExclusiveData<CacheableCharsVector> profilingLabels_;
|
||||
|
||||
public:
|
||||
Code(UniqueCodeSegment segment,
|
||||
Code(UniqueConstCodeSegment segment,
|
||||
const Metadata& metadata,
|
||||
const ShareableBytes* maybeBytecode);
|
||||
|
||||
CodeSegment& segment() { return *segment_; }
|
||||
const CodeSegment& segment() const { return *segment_; }
|
||||
const Metadata& metadata() const { return *metadata_; }
|
||||
|
||||
|
@ -390,7 +400,7 @@ class Code : public ShareableBase<Code>
|
|||
// To save memory, profilingLabels_ are generated lazily when profiling mode
|
||||
// is enabled.
|
||||
|
||||
void ensureProfilingLabels(bool profilingEnabled);
|
||||
void ensureProfilingLabels(bool profilingEnabled) const;
|
||||
const char* profilingLabel(uint32_t funcIndex) const;
|
||||
|
||||
// about:memory reporting:
|
||||
|
@ -404,7 +414,7 @@ class Code : public ShareableBase<Code>
|
|||
WASM_DECLARE_SERIALIZABLE(Code);
|
||||
};
|
||||
|
||||
typedef RefPtr<Code> MutableCode;
|
||||
typedef RefPtr<const Code> SharedCode;
|
||||
|
||||
} // namespace wasm
|
||||
} // namespace js
|
||||
|
|
|
@ -109,7 +109,7 @@ struct PCComparator
|
|||
}
|
||||
};
|
||||
|
||||
Code*
|
||||
const Code*
|
||||
Compartment::lookupCode(const void* pc) const
|
||||
{
|
||||
// lookupCode() can be called asynchronously from the interrupt signal
|
||||
|
|
|
@ -77,7 +77,7 @@ class Compartment
|
|||
// This methods returns the wasm::Code containing the given pc, if any
|
||||
// exists in the compartment.
|
||||
|
||||
Code* lookupCode(const void* pc) const;
|
||||
const Code* lookupCode(const void* pc) const;
|
||||
|
||||
// Ensure all Instances in this JSCompartment have profiling labels created.
|
||||
|
||||
|
|
|
@ -75,7 +75,7 @@ GeneratedSourceMap::searchLineByOffset(JSContext* cx, uint32_t offset, size_t* e
|
|||
return true;
|
||||
}
|
||||
|
||||
DebugState::DebugState(MutableCode code,
|
||||
DebugState::DebugState(SharedCode code,
|
||||
const Metadata& metadata,
|
||||
const ShareableBytes* maybeBytecode)
|
||||
: code_(Move(code)),
|
||||
|
|
|
@ -79,7 +79,7 @@ typedef HashMap<uint32_t, WasmBreakpointSite*, DefaultHasher<uint32_t>, SystemAl
|
|||
|
||||
class DebugState
|
||||
{
|
||||
MutableCode code_;
|
||||
const SharedCode code_;
|
||||
const SharedMetadata metadata_;
|
||||
const SharedBytes maybeBytecode_;
|
||||
UniqueGeneratedSourceMap maybeSourceMap_;
|
||||
|
@ -96,7 +96,7 @@ class DebugState
|
|||
bool ensureSourceMap(JSContext* cx);
|
||||
|
||||
public:
|
||||
DebugState(MutableCode code,
|
||||
DebugState(SharedCode code,
|
||||
const Metadata& metadata,
|
||||
const ShareableBytes* maybeBytecode);
|
||||
|
||||
|
|
|
@ -490,7 +490,7 @@ static inline void
|
|||
AssertMatchesCallSite(const WasmActivation& activation, void* callerPC, void* callerFP)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
Code* code = activation.compartment()->wasm.lookupCode(callerPC);
|
||||
const Code* code = activation.compartment()->wasm.lookupCode(callerPC);
|
||||
MOZ_ASSERT(code);
|
||||
|
||||
const CodeRange* callerCodeRange = code->lookupRange(callerPC);
|
||||
|
@ -934,7 +934,7 @@ wasm::LookupFaultingInstance(WasmActivation* activation, void* pc, void* fp)
|
|||
// simulators which call this function at every load/store before even
|
||||
// knowing whether there is a fault.
|
||||
|
||||
Code* code = activation->compartment()->wasm.lookupCode(pc);
|
||||
const Code* code = activation->compartment()->wasm.lookupCode(pc);
|
||||
if (!code)
|
||||
return nullptr;
|
||||
|
||||
|
|
|
@ -319,7 +319,7 @@ Instance::currentMemory_i32(Instance* instance)
|
|||
|
||||
Instance::Instance(JSContext* cx,
|
||||
Handle<WasmInstanceObject*> object,
|
||||
MutableCode code,
|
||||
SharedCode code,
|
||||
UniqueDebugState debug,
|
||||
UniqueGlobalSegment globals,
|
||||
HandleWasmMemoryObject memory,
|
||||
|
|
|
@ -67,7 +67,7 @@ class Instance
|
|||
{
|
||||
JSCompartment* const compartment_;
|
||||
ReadBarrieredWasmInstanceObject object_;
|
||||
const MutableCode code_;
|
||||
const SharedCode code_;
|
||||
const UniqueDebugState debug_;
|
||||
const UniqueGlobalSegment globals_;
|
||||
GCPtrWasmMemoryObject memory_;
|
||||
|
@ -89,7 +89,7 @@ class Instance
|
|||
public:
|
||||
Instance(JSContext* cx,
|
||||
HandleWasmInstanceObject object,
|
||||
MutableCode code,
|
||||
SharedCode code,
|
||||
UniqueDebugState debug,
|
||||
UniqueGlobalSegment globals,
|
||||
HandleWasmMemoryObject memory,
|
||||
|
@ -102,7 +102,6 @@ class Instance
|
|||
|
||||
JSContext* cx() const { return tlsData()->cx; }
|
||||
JSCompartment* compartment() const { return compartment_; }
|
||||
Code& code() { return *code_; }
|
||||
const Code& code() const { return *code_; }
|
||||
DebugState& debug() { return *debug_; }
|
||||
const DebugState& debug() const { return *debug_; }
|
||||
|
|
|
@ -976,7 +976,7 @@ WasmInstanceObject::trace(JSTracer* trc, JSObject* obj)
|
|||
|
||||
/* static */ WasmInstanceObject*
|
||||
WasmInstanceObject::create(JSContext* cx,
|
||||
MutableCode code,
|
||||
SharedCode code,
|
||||
UniqueDebugState debug,
|
||||
UniqueGlobalSegment globals,
|
||||
HandleWasmMemoryObject memory,
|
||||
|
|
|
@ -184,7 +184,7 @@ class WasmInstanceObject : public NativeObject
|
|||
static bool construct(JSContext*, unsigned, Value*);
|
||||
|
||||
static WasmInstanceObject* create(JSContext* cx,
|
||||
RefPtr<wasm::Code> code,
|
||||
RefPtr<const wasm::Code> code,
|
||||
UniquePtr<wasm::DebugState> debug,
|
||||
UniquePtr<wasm::GlobalSegment> globals,
|
||||
HandleWasmMemoryObject memory,
|
||||
|
|
|
@ -881,6 +881,16 @@ Module::instantiate(JSContext* cx,
|
|||
if (!instantiateTable(cx, &table, &tables))
|
||||
return false;
|
||||
|
||||
// The CodeSegment does not hold on to the bytecode, see comment below.
|
||||
|
||||
auto codeSegment = CodeSegment::create(cx, code_, bytecode_, linkData_, *metadata_);
|
||||
if (!codeSegment)
|
||||
return false;
|
||||
|
||||
auto globalSegment = GlobalSegment::create(linkData_.globalDataLength);
|
||||
if (!globalSegment)
|
||||
return false;
|
||||
|
||||
// To support viewing the source of an instance (Instance::createText), the
|
||||
// instance must hold onto a ref of the bytecode (keeping it alive). This
|
||||
// wastes memory for most users, so we try to only save the source when a
|
||||
|
@ -896,21 +906,14 @@ Module::instantiate(JSContext* cx,
|
|||
maybeBytecode = bytecode_.get();
|
||||
}
|
||||
|
||||
auto codeSegment = CodeSegment::create(cx, code_, linkData_, *metadata_, memory);
|
||||
if (!codeSegment)
|
||||
return false;
|
||||
|
||||
auto globalSegment = GlobalSegment::create(linkData_.globalDataLength);
|
||||
if (!globalSegment)
|
||||
return false;
|
||||
|
||||
MutableCode code(js_new<Code>(Move(codeSegment), *metadata_, maybeBytecode));
|
||||
SharedCode code(js_new<Code>(Move(codeSegment), *metadata_, maybeBytecode));
|
||||
if (!code)
|
||||
return false;
|
||||
|
||||
// The debug object must be present even when debugging is not enabled: It
|
||||
// provides the lazily created source text for the program, even if that
|
||||
// text is a placeholder message when debugging is not enabled.
|
||||
|
||||
auto debug = cx->make_unique<DebugState>(code, *metadata_, maybeBytecode);
|
||||
if (!debug)
|
||||
return false;
|
||||
|
|
|
@ -834,7 +834,7 @@ HandleFault(PEXCEPTION_POINTERS exception)
|
|||
if (!activation)
|
||||
return false;
|
||||
|
||||
Code* code = activation->compartment()->wasm.lookupCode(pc);
|
||||
const Code* code = activation->compartment()->wasm.lookupCode(pc);
|
||||
if (!code)
|
||||
return false;
|
||||
|
||||
|
|
Загрузка…
Ссылка в новой задаче