зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1614622 part 5 - Remove most optimization tracking storage code. r=djvj
Differential Revision: https://phabricator.services.mozilla.com/D62429 --HG-- extra : moz-landing-system : lando
This commit is contained in:
Родитель
79a35a52a6
Коммит
ae08c642db
|
@ -176,17 +176,13 @@ class MOZ_STACK_CLASS ProfiledFrameHandle {
|
|||
void* canonicalAddr_;
|
||||
const char* label_;
|
||||
uint32_t depth_;
|
||||
mozilla::Maybe<uint8_t> optsIndex_;
|
||||
|
||||
ProfiledFrameHandle(JSRuntime* rt, js::jit::JitcodeGlobalEntry& entry,
|
||||
void* addr, const char* label, uint32_t depth);
|
||||
|
||||
void updateHasTrackedOptimizations();
|
||||
|
||||
public:
|
||||
const char* label() const { return label_; }
|
||||
uint32_t depth() const { return depth_; }
|
||||
bool hasTrackedOptimizations() const { return optsIndex_.isSome(); }
|
||||
void* canonicalAddress() const { return canonicalAddr_; }
|
||||
|
||||
JS_PUBLIC_API ProfilingFrameIterator::FrameKind frameKind() const;
|
||||
|
|
|
@ -303,8 +303,6 @@ void js::gc::GCRuntime::traceRuntimeForMinorGC(JSTracer* trc,
|
|||
// the verifier for the last time.
|
||||
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::MARK_ROOTS);
|
||||
|
||||
jit::JitRuntime::TraceJitcodeGlobalTableForMinorGC(trc);
|
||||
|
||||
traceRuntimeCommon(trc, TraceRuntime);
|
||||
}
|
||||
|
||||
|
|
|
@ -6222,28 +6222,6 @@ void CodeGenerator::emitDebugForceBailing(LInstruction* lir) {
|
|||
}
|
||||
#endif
|
||||
|
||||
static void DumpTrackedSite(const BytecodeSite* site) {
|
||||
if (!JitSpewEnabled(JitSpew_OptimizationTracking)) {
|
||||
return;
|
||||
}
|
||||
|
||||
#ifdef JS_JITSPEW
|
||||
unsigned column = 0;
|
||||
unsigned lineNumber = PCToLineNumber(site->script(), site->pc(), &column);
|
||||
JitSpew(JitSpew_OptimizationTracking, "Types for %s at %s:%u:%u",
|
||||
CodeName(JSOp(*site->pc())), site->script()->filename(), lineNumber,
|
||||
column);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void DumpTrackedOptimizations(TrackedOptimizations* optimizations) {
|
||||
if (!JitSpewEnabled(JitSpew_OptimizationTracking)) {
|
||||
return;
|
||||
}
|
||||
|
||||
optimizations->spew(JitSpew_OptimizationTracking);
|
||||
}
|
||||
|
||||
bool CodeGenerator::generateBody() {
|
||||
JitSpew(JitSpew_Codegen, "==== BEGIN CodeGenerator::generateBody ====\n");
|
||||
IonScriptCounts* counts = maybeCreateScriptCounts();
|
||||
|
@ -6295,7 +6273,6 @@ bool CodeGenerator::generateBody() {
|
|||
return false;
|
||||
}
|
||||
}
|
||||
TrackedOptimizations* last = nullptr;
|
||||
|
||||
#if defined(JS_ION_PERF)
|
||||
if (!perfSpewer->startBasicBlock(current->mir(), masm)) {
|
||||
|
@ -6334,19 +6311,6 @@ bool CodeGenerator::generateBody() {
|
|||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Track the start native offset of optimizations.
|
||||
if (iter->mirRaw()->trackedOptimizations()) {
|
||||
if (last != iter->mirRaw()->trackedOptimizations()) {
|
||||
DumpTrackedSite(iter->mirRaw()->trackedSite());
|
||||
DumpTrackedOptimizations(iter->mirRaw()->trackedOptimizations());
|
||||
last = iter->mirRaw()->trackedOptimizations();
|
||||
}
|
||||
if (!addTrackedOptimizationsEntry(
|
||||
iter->mirRaw()->trackedOptimizations())) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
setElement(*iter); // needed to encode correct snapshot location.
|
||||
|
@ -6369,11 +6333,6 @@ bool CodeGenerator::generateBody() {
|
|||
MOZ_CRASH("Invalid LIR op");
|
||||
}
|
||||
|
||||
// Track the end native offset of optimizations.
|
||||
if (iter->mirRaw() && iter->mirRaw()->trackedOptimizations()) {
|
||||
extendTrackedOptimizationsEntry(iter->mirRaw()->trackedOptimizations());
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
if (!counts) {
|
||||
emitDebugResultChecks(*iter);
|
||||
|
@ -10747,32 +10706,6 @@ bool CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints) {
|
|||
// nativeToBytecodeScriptList_ is no longer needed.
|
||||
js_free(nativeToBytecodeScriptList_);
|
||||
|
||||
// Generate the tracked optimizations map.
|
||||
if (isOptimizationTrackingEnabled()) {
|
||||
// Treat OOMs and failures as if optimization tracking were turned off.
|
||||
IonTrackedTypeVector* allTypes = cx->new_<IonTrackedTypeVector>();
|
||||
if (allTypes &&
|
||||
generateCompactTrackedOptimizationsMap(cx, code, allTypes)) {
|
||||
const uint8_t* optsRegionTableAddr =
|
||||
trackedOptimizationsMap_ + trackedOptimizationsRegionTableOffset_;
|
||||
const IonTrackedOptimizationsRegionTable* optsRegionTable =
|
||||
(const IonTrackedOptimizationsRegionTable*)optsRegionTableAddr;
|
||||
const uint8_t* optsTypesTableAddr =
|
||||
trackedOptimizationsMap_ + trackedOptimizationsTypesTableOffset_;
|
||||
const IonTrackedOptimizationsTypesTable* optsTypesTable =
|
||||
(const IonTrackedOptimizationsTypesTable*)optsTypesTableAddr;
|
||||
const uint8_t* optsAttemptsTableAddr =
|
||||
trackedOptimizationsMap_ + trackedOptimizationsAttemptsTableOffset_;
|
||||
const IonTrackedOptimizationsAttemptsTable* optsAttemptsTable =
|
||||
(const IonTrackedOptimizationsAttemptsTable*)optsAttemptsTableAddr;
|
||||
entry.initTrackedOptimizations(optsRegionTable, optsTypesTable,
|
||||
optsAttemptsTable, allTypes);
|
||||
} else {
|
||||
cx->recoverFromOutOfMemory();
|
||||
js_delete(allTypes);
|
||||
}
|
||||
}
|
||||
|
||||
// Add entry to the global table.
|
||||
JitcodeGlobalTable* globalTable =
|
||||
cx->runtime()->jitRuntime()->getJitcodeGlobalTable();
|
||||
|
|
|
@ -508,15 +508,6 @@ void JitRuntime::Trace(JSTracer* trc, const AutoAccessAtomsZone& access) {
|
|||
}
|
||||
}
|
||||
|
||||
/* static */
|
||||
void JitRuntime::TraceJitcodeGlobalTableForMinorGC(JSTracer* trc) {
|
||||
if (trc->runtime()->geckoProfiler().enabled() &&
|
||||
trc->runtime()->hasJitRuntime() &&
|
||||
trc->runtime()->jitRuntime()->hasJitcodeGlobalTable()) {
|
||||
trc->runtime()->jitRuntime()->getJitcodeGlobalTable()->traceForMinorGC(trc);
|
||||
}
|
||||
}
|
||||
|
||||
/* static */
|
||||
bool JitRuntime::MarkJitcodeGlobalTableIteratively(GCMarker* marker) {
|
||||
if (marker->runtime()->hasJitRuntime() &&
|
||||
|
|
|
@ -303,7 +303,6 @@ class JitRuntime {
|
|||
MOZ_MUST_USE bool initialize(JSContext* cx);
|
||||
|
||||
static void Trace(JSTracer* trc, const js::AutoAccessAtomsZone& access);
|
||||
static void TraceJitcodeGlobalTableForMinorGC(JSTracer* trc);
|
||||
static MOZ_MUST_USE bool MarkJitcodeGlobalTableIteratively(GCMarker* marker);
|
||||
static void TraceWeakJitcodeGlobalTable(JSRuntime* rt, JSTracer* trc);
|
||||
|
||||
|
|
|
@ -132,19 +132,6 @@ void JitcodeGlobalEntry::IonEntry::destroy() {
|
|||
// Free the script list
|
||||
js_free(scriptList_);
|
||||
scriptList_ = nullptr;
|
||||
|
||||
// The optimizations region and attempts table is in the same block of
|
||||
// memory, the beginning of which is pointed to by
|
||||
// optimizationsRegionTable_->payloadStart().
|
||||
if (optsRegionTable_) {
|
||||
MOZ_ASSERT(optsAttemptsTable_);
|
||||
js_free((void*)optsRegionTable_->payloadStart());
|
||||
}
|
||||
optsRegionTable_ = nullptr;
|
||||
optsTypesTable_ = nullptr;
|
||||
optsAttemptsTable_ = nullptr;
|
||||
js_delete(optsAllTypes_);
|
||||
optsAllTypes_ = nullptr;
|
||||
}
|
||||
|
||||
void* JitcodeGlobalEntry::BaselineEntry::canonicalNativeAddrFor(
|
||||
|
@ -383,12 +370,6 @@ bool JitcodeGlobalTable::addEntry(const JitcodeGlobalEntry& entry) {
|
|||
skiplistSize_++;
|
||||
// verifySkiplist(); - disabled for release.
|
||||
|
||||
// Any entries that may directly contain nursery pointers must be marked
|
||||
// during a minor GC to update those pointers.
|
||||
if (entry.canHoldNurseryPointers()) {
|
||||
addToNurseryList(&newEntry->ionEntry());
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -396,10 +377,6 @@ void JitcodeGlobalTable::removeEntry(JitcodeGlobalEntry& entry,
|
|||
JitcodeGlobalEntry** prevTower) {
|
||||
MOZ_ASSERT(!TlsContext.get()->isProfilerSamplingEnabled());
|
||||
|
||||
if (entry.canHoldNurseryPointers()) {
|
||||
removeFromNurseryList(&entry.ionEntry());
|
||||
}
|
||||
|
||||
// Unlink query entry.
|
||||
for (int level = entry.tower_->height() - 1; level >= 0; level--) {
|
||||
JitcodeGlobalEntry* prevTowerEntry = prevTower[level];
|
||||
|
@ -578,9 +555,6 @@ void JitcodeGlobalTable::setAllEntriesAsExpired() {
|
|||
AutoSuppressProfilerSampling suppressSampling(TlsContext.get());
|
||||
for (Range r(*this); !r.empty(); r.popFront()) {
|
||||
auto entry = r.front();
|
||||
if (entry->canHoldNurseryPointers()) {
|
||||
removeFromNurseryList(&entry->ionEntry());
|
||||
}
|
||||
entry->setAsExpired();
|
||||
}
|
||||
}
|
||||
|
@ -592,23 +566,6 @@ struct Unconditionally {
|
|||
}
|
||||
};
|
||||
|
||||
void JitcodeGlobalTable::traceForMinorGC(JSTracer* trc) {
|
||||
// Trace only entries that can directly contain nursery pointers.
|
||||
|
||||
MOZ_ASSERT(trc->runtime()->geckoProfiler().enabled());
|
||||
MOZ_ASSERT(JS::RuntimeHeapIsMinorCollecting());
|
||||
|
||||
JSContext* cx = trc->runtime()->mainContextFromOwnThread();
|
||||
AutoSuppressProfilerSampling suppressSampling(cx);
|
||||
JitcodeGlobalEntry::IonEntry* entry = nurseryEntries_;
|
||||
while (entry) {
|
||||
entry->trace<Unconditionally>(trc);
|
||||
JitcodeGlobalEntry::IonEntry* prev = entry;
|
||||
entry = entry->nextNursery_;
|
||||
removeFromNurseryList(prev);
|
||||
}
|
||||
}
|
||||
|
||||
struct IfUnmarked {
|
||||
template <typename T>
|
||||
static bool ShouldTrace(JSRuntime* rt, T* thingp) {
|
||||
|
@ -666,9 +623,6 @@ bool JitcodeGlobalTable::markIteratively(GCMarker* marker) {
|
|||
// types used by optimizations and scripts used for pc to line number
|
||||
// mapping, alive as well.
|
||||
if (!rangeStart || !entry->isSampled(*rangeStart)) {
|
||||
if (entry->canHoldNurseryPointers()) {
|
||||
removeFromNurseryList(&entry->ionEntry());
|
||||
}
|
||||
entry->setAsExpired();
|
||||
if (!entry->baseEntry().isJitcodeMarkedFromAnyThread(marker->runtime())) {
|
||||
continue;
|
||||
|
@ -753,31 +707,6 @@ bool JitcodeGlobalEntry::IonEntry::trace(JSTracer* trc) {
|
|||
}
|
||||
}
|
||||
|
||||
if (!optsAllTypes_) {
|
||||
return tracedAny;
|
||||
}
|
||||
|
||||
for (IonTrackedTypeWithAddendum* iter = optsAllTypes_->begin();
|
||||
iter != optsAllTypes_->end(); iter++) {
|
||||
if (ShouldTraceProvider::ShouldTrace(rt, &iter->type)) {
|
||||
iter->type.trace(trc);
|
||||
tracedAny = true;
|
||||
}
|
||||
if (iter->hasAllocationSite() &&
|
||||
ShouldTraceProvider::ShouldTrace(rt, &iter->script)) {
|
||||
TraceManuallyBarrieredEdge(
|
||||
trc, &iter->script,
|
||||
"jitcodeglobaltable-ionentry-type-addendum-script");
|
||||
tracedAny = true;
|
||||
} else if (iter->hasConstructor() &&
|
||||
ShouldTraceProvider::ShouldTrace(rt, &iter->constructor)) {
|
||||
TraceManuallyBarrieredEdge(
|
||||
trc, &iter->constructor,
|
||||
"jitcodeglobaltable-ionentry-type-addendum-constructor");
|
||||
tracedAny = true;
|
||||
}
|
||||
}
|
||||
|
||||
return tracedAny;
|
||||
}
|
||||
|
||||
|
@ -786,22 +715,6 @@ void JitcodeGlobalEntry::IonEntry::sweepChildren() {
|
|||
MOZ_ALWAYS_FALSE(
|
||||
IsAboutToBeFinalizedUnbarriered(&sizedScriptList()->pairs[i].script));
|
||||
}
|
||||
|
||||
if (!optsAllTypes_) {
|
||||
return;
|
||||
}
|
||||
|
||||
for (IonTrackedTypeWithAddendum* iter = optsAllTypes_->begin();
|
||||
iter != optsAllTypes_->end(); iter++) {
|
||||
// Types may move under compacting GC. This method is only called on
|
||||
// entries that are sampled, and thus are not about to be finalized.
|
||||
MOZ_ALWAYS_FALSE(TypeSet::IsTypeAboutToBeFinalized(&iter->type));
|
||||
if (iter->hasAllocationSite()) {
|
||||
MOZ_ALWAYS_FALSE(IsAboutToBeFinalizedUnbarriered(&iter->script));
|
||||
} else if (iter->hasConstructor()) {
|
||||
MOZ_ALWAYS_FALSE(IsAboutToBeFinalizedUnbarriered(&iter->constructor));
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
bool JitcodeGlobalEntry::IonEntry::isMarkedFromAnyThread(JSRuntime* rt) {
|
||||
|
@ -811,17 +724,6 @@ bool JitcodeGlobalEntry::IonEntry::isMarkedFromAnyThread(JSRuntime* rt) {
|
|||
}
|
||||
}
|
||||
|
||||
if (!optsAllTypes_) {
|
||||
return true;
|
||||
}
|
||||
|
||||
for (IonTrackedTypeWithAddendum* iter = optsAllTypes_->begin();
|
||||
iter != optsAllTypes_->end(); iter++) {
|
||||
if (!TypeSet::IsTypeMarked(rt, &iter->type)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -1418,14 +1320,8 @@ JS::ProfiledFrameHandle::ProfiledFrameHandle(JSRuntime* rt,
|
|||
addr_(addr),
|
||||
canonicalAddr_(nullptr),
|
||||
label_(label),
|
||||
depth_(depth),
|
||||
optsIndex_() {
|
||||
updateHasTrackedOptimizations();
|
||||
|
||||
depth_(depth) {
|
||||
if (!canonicalAddr_) {
|
||||
// If the entry has tracked optimizations, updateHasTrackedOptimizations
|
||||
// would have updated the canonical address.
|
||||
MOZ_ASSERT_IF(entry_.isIon(), !hasTrackedOptimizations());
|
||||
canonicalAddr_ = entry_.canonicalNativeAddrFor(rt_, addr_);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -214,29 +214,6 @@ class JitcodeGlobalEntry {
|
|||
// of the memory space.
|
||||
JitcodeIonTable* regionTable_;
|
||||
|
||||
// optsRegionTable_ points to the table within the compact
|
||||
// optimizations map indexing all regions that have tracked
|
||||
// optimization attempts. optsTypesTable_ is the tracked typed info
|
||||
// associated with the attempts vectors; it is the same length as the
|
||||
// attempts table. optsAttemptsTable_ is the table indexing those
|
||||
// attempts vectors.
|
||||
//
|
||||
// All pointers point into the same block of memory; the beginning of
|
||||
// the block is optRegionTable_->payloadStart().
|
||||
const IonTrackedOptimizationsRegionTable* optsRegionTable_;
|
||||
const IonTrackedOptimizationsTypesTable* optsTypesTable_;
|
||||
const IonTrackedOptimizationsAttemptsTable* optsAttemptsTable_;
|
||||
|
||||
// The types table above records type sets, which have been gathered
|
||||
// into one vector here.
|
||||
IonTrackedTypeVector* optsAllTypes_;
|
||||
|
||||
// Linked list pointers to allow traversing through all entries that
|
||||
// could possibly contain nursery pointers. Note that the contained
|
||||
// pointers can be mutated into nursery pointers at any time.
|
||||
IonEntry* prevNursery_;
|
||||
IonEntry* nextNursery_;
|
||||
|
||||
struct ScriptNamePair {
|
||||
JSScript* script;
|
||||
char* str;
|
||||
|
@ -267,22 +244,6 @@ class JitcodeGlobalEntry {
|
|||
BaseEntry::init(Ion, code, nativeStartAddr, nativeEndAddr);
|
||||
regionTable_ = regionTable;
|
||||
scriptList_ = scriptList;
|
||||
optsRegionTable_ = nullptr;
|
||||
optsTypesTable_ = nullptr;
|
||||
optsAllTypes_ = nullptr;
|
||||
optsAttemptsTable_ = nullptr;
|
||||
prevNursery_ = nextNursery_ = nullptr;
|
||||
}
|
||||
|
||||
void initTrackedOptimizations(
|
||||
const IonTrackedOptimizationsRegionTable* regionTable,
|
||||
const IonTrackedOptimizationsTypesTable* typesTable,
|
||||
const IonTrackedOptimizationsAttemptsTable* attemptsTable,
|
||||
IonTrackedTypeVector* allTypes) {
|
||||
optsRegionTable_ = regionTable;
|
||||
optsTypesTable_ = typesTable;
|
||||
optsAttemptsTable_ = attemptsTable;
|
||||
optsAllTypes_ = allTypes;
|
||||
}
|
||||
|
||||
SizedScriptList* sizedScriptList() const { return scriptList_; }
|
||||
|
@ -324,37 +285,6 @@ class JitcodeGlobalEntry {
|
|||
|
||||
uint64_t lookupRealmID(void* ptr) const;
|
||||
|
||||
bool hasTrackedOptimizations() const { return !!optsRegionTable_; }
|
||||
|
||||
const IonTrackedOptimizationsRegionTable* trackedOptimizationsRegionTable()
|
||||
const {
|
||||
MOZ_ASSERT(hasTrackedOptimizations());
|
||||
return optsRegionTable_;
|
||||
}
|
||||
|
||||
uint8_t numOptimizationAttempts() const {
|
||||
MOZ_ASSERT(hasTrackedOptimizations());
|
||||
return optsAttemptsTable_->numEntries();
|
||||
}
|
||||
|
||||
IonTrackedOptimizationsAttempts trackedOptimizationAttempts(uint8_t index) {
|
||||
MOZ_ASSERT(hasTrackedOptimizations());
|
||||
return optsAttemptsTable_->entry(index);
|
||||
}
|
||||
|
||||
IonTrackedOptimizationsTypeInfo trackedOptimizationTypeInfo(uint8_t index) {
|
||||
MOZ_ASSERT(hasTrackedOptimizations());
|
||||
return optsTypesTable_->entry(index);
|
||||
}
|
||||
|
||||
const IonTrackedTypeVector* allTrackedTypes() {
|
||||
MOZ_ASSERT(hasTrackedOptimizations());
|
||||
return optsAllTypes_;
|
||||
}
|
||||
|
||||
mozilla::Maybe<uint8_t> trackedOptimizationIndexAtAddr(
|
||||
void* ptr, uint32_t* entryOffsetOut);
|
||||
|
||||
template <class ShouldTraceProvider>
|
||||
bool trace(JSTracer* trc);
|
||||
void sweepChildren();
|
||||
|
@ -724,37 +654,6 @@ class JitcodeGlobalEntry {
|
|||
return compare(*this, other);
|
||||
}
|
||||
|
||||
bool hasTrackedOptimizations() const {
|
||||
switch (kind()) {
|
||||
case Ion:
|
||||
return ionEntry().hasTrackedOptimizations();
|
||||
case Baseline:
|
||||
case Dummy:
|
||||
break;
|
||||
default:
|
||||
MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
bool canHoldNurseryPointers() const {
|
||||
return isIon() && ionEntry().hasTrackedOptimizations();
|
||||
}
|
||||
|
||||
mozilla::Maybe<uint8_t> trackedOptimizationIndexAtAddr(
|
||||
JSRuntime* rt, void* addr, uint32_t* entryOffsetOut) {
|
||||
switch (kind()) {
|
||||
case Ion:
|
||||
return ionEntry().trackedOptimizationIndexAtAddr(addr, entryOffsetOut);
|
||||
case Baseline:
|
||||
case Dummy:
|
||||
break;
|
||||
default:
|
||||
MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
|
||||
}
|
||||
return mozilla::Nothing();
|
||||
}
|
||||
|
||||
Zone* zone() { return baseEntry().jitcode()->zone(); }
|
||||
|
||||
template <class ShouldTraceProvider>
|
||||
|
@ -851,7 +750,6 @@ class JitcodeGlobalTable {
|
|||
JitcodeGlobalEntry* freeEntries_;
|
||||
uint32_t rand_;
|
||||
uint32_t skiplistSize_;
|
||||
JitcodeGlobalEntry::IonEntry* nurseryEntries_;
|
||||
|
||||
JitcodeGlobalEntry* startTower_[JitcodeSkiplistTower::MAX_HEIGHT];
|
||||
JitcodeSkiplistTower* freeTowers_[JitcodeSkiplistTower::MAX_HEIGHT];
|
||||
|
@ -861,8 +759,7 @@ class JitcodeGlobalTable {
|
|||
: alloc_(LIFO_CHUNK_SIZE),
|
||||
freeEntries_(nullptr),
|
||||
rand_(0),
|
||||
skiplistSize_(0),
|
||||
nurseryEntries_(nullptr) {
|
||||
skiplistSize_(0) {
|
||||
for (unsigned i = 0; i < JitcodeSkiplistTower::MAX_HEIGHT; i++) {
|
||||
startTower_[i] = nullptr;
|
||||
}
|
||||
|
@ -898,7 +795,6 @@ class JitcodeGlobalTable {
|
|||
JSRuntime* rt);
|
||||
|
||||
void setAllEntriesAsExpired();
|
||||
void traceForMinorGC(JSTracer* trc);
|
||||
MOZ_MUST_USE bool markIteratively(GCMarker* marker);
|
||||
void traceWeak(JSRuntime* rt, JSTracer* trc);
|
||||
|
||||
|
@ -931,33 +827,6 @@ class JitcodeGlobalTable {
|
|||
void verifySkiplist() {}
|
||||
#endif
|
||||
|
||||
void addToNurseryList(JitcodeGlobalEntry::IonEntry* entry) {
|
||||
MOZ_ASSERT(entry->prevNursery_ == nullptr);
|
||||
MOZ_ASSERT(entry->nextNursery_ == nullptr);
|
||||
|
||||
entry->nextNursery_ = nurseryEntries_;
|
||||
if (nurseryEntries_) {
|
||||
nurseryEntries_->prevNursery_ = entry;
|
||||
}
|
||||
nurseryEntries_ = entry;
|
||||
}
|
||||
|
||||
void removeFromNurseryList(JitcodeGlobalEntry::IonEntry* entry) {
|
||||
// Splice out of list to be scanned on a minor GC.
|
||||
if (entry->prevNursery_) {
|
||||
entry->prevNursery_->nextNursery_ = entry->nextNursery_;
|
||||
}
|
||||
if (entry->nextNursery_) {
|
||||
entry->nextNursery_->prevNursery_ = entry->prevNursery_;
|
||||
}
|
||||
|
||||
if (nurseryEntries_ == entry) {
|
||||
nurseryEntries_ = entry->nextNursery_;
|
||||
}
|
||||
|
||||
entry->prevNursery_ = entry->nextNursery_ = nullptr;
|
||||
}
|
||||
|
||||
public:
|
||||
class Range {
|
||||
protected:
|
||||
|
|
|
@ -371,583 +371,6 @@ bool UniqueTrackedTypes::enumerate(TypeSet::TypeList* types) const {
|
|||
return types->append(list_.begin(), list_.end());
|
||||
}
|
||||
|
||||
void IonTrackedOptimizationsRegion::unpackHeader() {
|
||||
CompactBufferReader reader(start_, end_);
|
||||
startOffset_ = reader.readUnsigned();
|
||||
endOffset_ = reader.readUnsigned();
|
||||
rangesStart_ = reader.currentPosition();
|
||||
MOZ_ASSERT(startOffset_ < endOffset_);
|
||||
}
|
||||
|
||||
void IonTrackedOptimizationsRegion::RangeIterator::readNext(
|
||||
uint32_t* startOffset, uint32_t* endOffset, uint8_t* index) {
|
||||
MOZ_ASSERT(more());
|
||||
|
||||
CompactBufferReader reader(cur_, end_);
|
||||
|
||||
// The very first entry isn't delta-encoded.
|
||||
if (cur_ == start_) {
|
||||
*startOffset = firstStartOffset_;
|
||||
*endOffset = prevEndOffset_ = reader.readUnsigned();
|
||||
*index = reader.readByte();
|
||||
cur_ = reader.currentPosition();
|
||||
MOZ_ASSERT(cur_ <= end_);
|
||||
return;
|
||||
}
|
||||
|
||||
// Otherwise, read a delta.
|
||||
uint32_t startDelta, length;
|
||||
ReadDelta(reader, &startDelta, &length, index);
|
||||
*startOffset = prevEndOffset_ + startDelta;
|
||||
*endOffset = prevEndOffset_ = *startOffset + length;
|
||||
cur_ = reader.currentPosition();
|
||||
MOZ_ASSERT(cur_ <= end_);
|
||||
}
|
||||
|
||||
Maybe<uint8_t> JitcodeGlobalEntry::IonEntry::trackedOptimizationIndexAtAddr(
|
||||
void* ptr, uint32_t* entryOffsetOut) {
|
||||
MOZ_ASSERT(hasTrackedOptimizations());
|
||||
MOZ_ASSERT(containsPointer(ptr));
|
||||
uint32_t ptrOffset = ((uint8_t*)ptr) - ((uint8_t*)nativeStartAddr());
|
||||
Maybe<IonTrackedOptimizationsRegion> region =
|
||||
optsRegionTable_->findRegion(ptrOffset);
|
||||
if (region.isNothing()) {
|
||||
return Nothing();
|
||||
}
|
||||
return region->findIndex(ptrOffset, entryOffsetOut);
|
||||
}
|
||||
|
||||
Maybe<uint8_t> IonTrackedOptimizationsRegion::findIndex(
|
||||
uint32_t offset, uint32_t* entryOffsetOut) const {
|
||||
if (offset <= startOffset_ || offset > endOffset_) {
|
||||
return Nothing();
|
||||
}
|
||||
|
||||
// Linear search through the run.
|
||||
RangeIterator iter = ranges();
|
||||
while (iter.more()) {
|
||||
uint32_t startOffset, endOffset;
|
||||
uint8_t index;
|
||||
iter.readNext(&startOffset, &endOffset, &index);
|
||||
if (startOffset < offset && offset <= endOffset) {
|
||||
*entryOffsetOut = endOffset;
|
||||
return Some(index);
|
||||
}
|
||||
}
|
||||
return Nothing();
|
||||
}
|
||||
|
||||
Maybe<IonTrackedOptimizationsRegion>
|
||||
IonTrackedOptimizationsRegionTable::findRegion(uint32_t offset) const {
|
||||
// For two contiguous regions, e.g., [i, j] and [j, k], an offset exactly
|
||||
// at j will be associated with [i, j] instead of [j, k]. An offset
|
||||
// exactly at j is often a return address from a younger frame, which case
|
||||
// the next region, despite starting at j, has not yet logically started
|
||||
// execution.
|
||||
|
||||
static const uint32_t LINEAR_SEARCH_THRESHOLD = 8;
|
||||
uint32_t regions = numEntries();
|
||||
MOZ_ASSERT(regions > 0);
|
||||
|
||||
// For small numbers of regions, do linear search.
|
||||
if (regions <= LINEAR_SEARCH_THRESHOLD) {
|
||||
for (uint32_t i = 0; i < regions; i++) {
|
||||
IonTrackedOptimizationsRegion region = entry(i);
|
||||
if (region.startOffset() < offset && offset <= region.endOffset()) {
|
||||
return Some(entry(i));
|
||||
}
|
||||
}
|
||||
return Nothing();
|
||||
}
|
||||
|
||||
// Otherwise, do binary search.
|
||||
uint32_t i = 0;
|
||||
while (regions > 1) {
|
||||
uint32_t step = regions / 2;
|
||||
uint32_t mid = i + step;
|
||||
IonTrackedOptimizationsRegion region = entry(mid);
|
||||
|
||||
if (offset <= region.startOffset()) {
|
||||
// Entry is below mid.
|
||||
regions = step;
|
||||
} else if (offset > region.endOffset()) {
|
||||
// Entry is above mid.
|
||||
i = mid;
|
||||
regions -= step;
|
||||
} else {
|
||||
// Entry is in mid.
|
||||
return Some(entry(i));
|
||||
}
|
||||
}
|
||||
return Nothing();
|
||||
}
|
||||
|
||||
/* static */
|
||||
uint32_t IonTrackedOptimizationsRegion::ExpectedRunLength(
|
||||
const NativeToTrackedOptimizations* start,
|
||||
const NativeToTrackedOptimizations* end) {
|
||||
MOZ_ASSERT(start < end);
|
||||
|
||||
// A run always has at least 1 entry, which is not delta encoded.
|
||||
uint32_t runLength = 1;
|
||||
uint32_t prevEndOffset = start->endOffset.offset();
|
||||
|
||||
for (const NativeToTrackedOptimizations* entry = start + 1; entry != end;
|
||||
entry++) {
|
||||
uint32_t startOffset = entry->startOffset.offset();
|
||||
uint32_t endOffset = entry->endOffset.offset();
|
||||
uint32_t startDelta = startOffset - prevEndOffset;
|
||||
uint32_t length = endOffset - startOffset;
|
||||
|
||||
if (!IsDeltaEncodeable(startDelta, length)) {
|
||||
break;
|
||||
}
|
||||
|
||||
runLength++;
|
||||
if (runLength == MAX_RUN_LENGTH) {
|
||||
break;
|
||||
}
|
||||
|
||||
prevEndOffset = endOffset;
|
||||
}
|
||||
|
||||
return runLength;
|
||||
}
|
||||
|
||||
void OptimizationAttempt::writeCompact(CompactBufferWriter& writer) const {
|
||||
writer.writeUnsigned((uint32_t)strategy_);
|
||||
writer.writeUnsigned((uint32_t)outcome_);
|
||||
}
|
||||
|
||||
bool OptimizationTypeInfo::writeCompact(CompactBufferWriter& writer,
|
||||
UniqueTrackedTypes& uniqueTypes) const {
|
||||
writer.writeUnsigned((uint32_t)site_);
|
||||
writer.writeUnsigned((uint32_t)mirType_);
|
||||
writer.writeUnsigned(types_.length());
|
||||
for (uint32_t i = 0; i < types_.length(); i++) {
|
||||
uint8_t index;
|
||||
if (!uniqueTypes.getIndexOf(types_[i], &index)) {
|
||||
return false;
|
||||
}
|
||||
writer.writeByte(index);
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
/* static */
|
||||
void IonTrackedOptimizationsRegion::ReadDelta(CompactBufferReader& reader,
|
||||
uint32_t* startDelta,
|
||||
uint32_t* length,
|
||||
uint8_t* index) {
|
||||
// 2 bytes
|
||||
// SSSS-SSSL LLLL-LII0
|
||||
const uint32_t firstByte = reader.readByte();
|
||||
const uint32_t secondByte = reader.readByte();
|
||||
if ((firstByte & ENC1_MASK) == ENC1_MASK_VAL) {
|
||||
uint32_t encVal = firstByte | secondByte << 8;
|
||||
*startDelta = encVal >> ENC1_START_DELTA_SHIFT;
|
||||
*length = (encVal >> ENC1_LENGTH_SHIFT) & ENC1_LENGTH_MAX;
|
||||
*index = (encVal >> ENC1_INDEX_SHIFT) & ENC1_INDEX_MAX;
|
||||
MOZ_ASSERT(length != 0);
|
||||
return;
|
||||
}
|
||||
|
||||
// 3 bytes
|
||||
// SSSS-SSSS SSSS-LLLL LLII-II01
|
||||
const uint32_t thirdByte = reader.readByte();
|
||||
if ((firstByte & ENC2_MASK) == ENC2_MASK_VAL) {
|
||||
uint32_t encVal = firstByte | secondByte << 8 | thirdByte << 16;
|
||||
*startDelta = encVal >> ENC2_START_DELTA_SHIFT;
|
||||
*length = (encVal >> ENC2_LENGTH_SHIFT) & ENC2_LENGTH_MAX;
|
||||
*index = (encVal >> ENC2_INDEX_SHIFT) & ENC2_INDEX_MAX;
|
||||
MOZ_ASSERT(length != 0);
|
||||
return;
|
||||
}
|
||||
|
||||
// 4 bytes
|
||||
// SSSS-SSSS SSSL-LLLL LLLL-LIII IIII-I011
|
||||
const uint32_t fourthByte = reader.readByte();
|
||||
if ((firstByte & ENC3_MASK) == ENC3_MASK_VAL) {
|
||||
uint32_t encVal =
|
||||
firstByte | secondByte << 8 | thirdByte << 16 | fourthByte << 24;
|
||||
*startDelta = encVal >> ENC3_START_DELTA_SHIFT;
|
||||
*length = (encVal >> ENC3_LENGTH_SHIFT) & ENC3_LENGTH_MAX;
|
||||
*index = (encVal >> ENC3_INDEX_SHIFT) & ENC3_INDEX_MAX;
|
||||
MOZ_ASSERT(length != 0);
|
||||
return;
|
||||
}
|
||||
|
||||
// 5 bytes
|
||||
// SSSS-SSSS SSSS-SSSL LLLL-LLLL LLLL-LIII IIII-I111
|
||||
MOZ_ASSERT((firstByte & ENC4_MASK) == ENC4_MASK_VAL);
|
||||
uint64_t fifthByte = reader.readByte();
|
||||
uint64_t encVal = firstByte | secondByte << 8 | thirdByte << 16 |
|
||||
fourthByte << 24 | fifthByte << 32;
|
||||
*startDelta = encVal >> ENC4_START_DELTA_SHIFT;
|
||||
*length = (encVal >> ENC4_LENGTH_SHIFT) & ENC4_LENGTH_MAX;
|
||||
*index = (encVal >> ENC4_INDEX_SHIFT) & ENC4_INDEX_MAX;
|
||||
MOZ_ASSERT(length != 0);
|
||||
}
|
||||
|
||||
/* static */
|
||||
void IonTrackedOptimizationsRegion::WriteDelta(CompactBufferWriter& writer,
|
||||
uint32_t startDelta,
|
||||
uint32_t length, uint8_t index) {
|
||||
// 2 bytes
|
||||
// SSSS-SSSL LLLL-LII0
|
||||
if (startDelta <= ENC1_START_DELTA_MAX && length <= ENC1_LENGTH_MAX &&
|
||||
index <= ENC1_INDEX_MAX) {
|
||||
uint16_t val = ENC1_MASK_VAL | (startDelta << ENC1_START_DELTA_SHIFT) |
|
||||
(length << ENC1_LENGTH_SHIFT) | (index << ENC1_INDEX_SHIFT);
|
||||
writer.writeByte(val & 0xff);
|
||||
writer.writeByte((val >> 8) & 0xff);
|
||||
return;
|
||||
}
|
||||
|
||||
// 3 bytes
|
||||
// SSSS-SSSS SSSS-LLLL LLII-II01
|
||||
if (startDelta <= ENC2_START_DELTA_MAX && length <= ENC2_LENGTH_MAX &&
|
||||
index <= ENC2_INDEX_MAX) {
|
||||
uint32_t val = ENC2_MASK_VAL | (startDelta << ENC2_START_DELTA_SHIFT) |
|
||||
(length << ENC2_LENGTH_SHIFT) | (index << ENC2_INDEX_SHIFT);
|
||||
writer.writeByte(val & 0xff);
|
||||
writer.writeByte((val >> 8) & 0xff);
|
||||
writer.writeByte((val >> 16) & 0xff);
|
||||
return;
|
||||
}
|
||||
|
||||
// 4 bytes
|
||||
// SSSS-SSSS SSSL-LLLL LLLL-LIII IIII-I011
|
||||
if (startDelta <= ENC3_START_DELTA_MAX && length <= ENC3_LENGTH_MAX) {
|
||||
// index always fits because it's an uint8_t; change this if
|
||||
// ENC3_INDEX_MAX changes.
|
||||
static_assert(ENC3_INDEX_MAX == UINT8_MAX);
|
||||
uint32_t val = ENC3_MASK_VAL | (startDelta << ENC3_START_DELTA_SHIFT) |
|
||||
(length << ENC3_LENGTH_SHIFT) | (index << ENC3_INDEX_SHIFT);
|
||||
writer.writeByte(val & 0xff);
|
||||
writer.writeByte((val >> 8) & 0xff);
|
||||
writer.writeByte((val >> 16) & 0xff);
|
||||
writer.writeByte((val >> 24) & 0xff);
|
||||
return;
|
||||
}
|
||||
|
||||
// 5 bytes
|
||||
// SSSS-SSSS SSSS-SSSL LLLL-LLLL LLLL-LIII IIII-I111
|
||||
if (startDelta <= ENC4_START_DELTA_MAX && length <= ENC4_LENGTH_MAX) {
|
||||
// index always fits because it's an uint8_t; change this if
|
||||
// ENC4_INDEX_MAX changes.
|
||||
static_assert(ENC4_INDEX_MAX == UINT8_MAX);
|
||||
uint64_t val = ENC4_MASK_VAL |
|
||||
(((uint64_t)startDelta) << ENC4_START_DELTA_SHIFT) |
|
||||
(((uint64_t)length) << ENC4_LENGTH_SHIFT) |
|
||||
(((uint64_t)index) << ENC4_INDEX_SHIFT);
|
||||
writer.writeByte(val & 0xff);
|
||||
writer.writeByte((val >> 8) & 0xff);
|
||||
writer.writeByte((val >> 16) & 0xff);
|
||||
writer.writeByte((val >> 24) & 0xff);
|
||||
writer.writeByte((val >> 32) & 0xff);
|
||||
return;
|
||||
}
|
||||
|
||||
MOZ_CRASH("startDelta,length,index triple too large to encode.");
|
||||
}
|
||||
|
||||
/* static */
|
||||
bool IonTrackedOptimizationsRegion::WriteRun(
|
||||
CompactBufferWriter& writer, const NativeToTrackedOptimizations* start,
|
||||
const NativeToTrackedOptimizations* end,
|
||||
const UniqueTrackedOptimizations& unique) {
|
||||
// Write the header, which is the range that this whole run encompasses.
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended, " Header: [%zu, %zu]",
|
||||
start->startOffset.offset(), (end - 1)->endOffset.offset());
|
||||
writer.writeUnsigned(start->startOffset.offset());
|
||||
writer.writeUnsigned((end - 1)->endOffset.offset());
|
||||
|
||||
// Write the first entry of the run, which is not delta-encoded.
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
" [%6zu, %6zu] vector %3u, offset %4zu",
|
||||
start->startOffset.offset(), start->endOffset.offset(),
|
||||
unique.indexOf(start->optimizations), writer.length());
|
||||
uint32_t prevEndOffset = start->endOffset.offset();
|
||||
writer.writeUnsigned(prevEndOffset);
|
||||
writer.writeByte(unique.indexOf(start->optimizations));
|
||||
|
||||
// Delta encode the run.
|
||||
for (const NativeToTrackedOptimizations* entry = start + 1; entry != end;
|
||||
entry++) {
|
||||
uint32_t startOffset = entry->startOffset.offset();
|
||||
uint32_t endOffset = entry->endOffset.offset();
|
||||
|
||||
uint32_t startDelta = startOffset - prevEndOffset;
|
||||
uint32_t length = endOffset - startOffset;
|
||||
uint8_t index = unique.indexOf(entry->optimizations);
|
||||
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
" [%6u, %6u] delta [+%5u, +%5u] vector %3u, offset %4zu",
|
||||
startOffset, endOffset, startDelta, length, index, writer.length());
|
||||
|
||||
WriteDelta(writer, startDelta, length, index);
|
||||
|
||||
prevEndOffset = endOffset;
|
||||
}
|
||||
|
||||
if (writer.oom()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
static bool WriteOffsetsTable(CompactBufferWriter& writer,
|
||||
const Vector<uint32_t, 16>& offsets,
|
||||
uint32_t* tableOffsetp) {
|
||||
// 4-byte align for the uint32s.
|
||||
uint32_t padding = sizeof(uint32_t) - (writer.length() % sizeof(uint32_t));
|
||||
if (padding == sizeof(uint32_t)) {
|
||||
padding = 0;
|
||||
}
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended, " Padding %u byte%s", padding,
|
||||
padding == 1 ? "" : "s");
|
||||
for (uint32_t i = 0; i < padding; i++) {
|
||||
writer.writeByte(0);
|
||||
}
|
||||
|
||||
// Record the start of the table to compute reverse offsets for entries.
|
||||
uint32_t tableOffset = writer.length();
|
||||
|
||||
// Write how many bytes were padded and numEntries.
|
||||
writer.writeNativeEndianUint32_t(padding);
|
||||
writer.writeNativeEndianUint32_t(offsets.length());
|
||||
|
||||
// Write entry offset table.
|
||||
for (size_t i = 0; i < offsets.length(); i++) {
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
" Entry %zu reverse offset %u", i,
|
||||
tableOffset - padding - offsets[i]);
|
||||
writer.writeNativeEndianUint32_t(tableOffset - padding - offsets[i]);
|
||||
}
|
||||
|
||||
if (writer.oom()) {
|
||||
return false;
|
||||
}
|
||||
|
||||
*tableOffsetp = tableOffset;
|
||||
return true;
|
||||
}
|
||||
|
||||
static JSFunction* MaybeConstructorFromType(TypeSet::Type ty) {
|
||||
if (ty.isUnknown() || ty.isAnyObject() || !ty.isGroup()) {
|
||||
return nullptr;
|
||||
}
|
||||
ObjectGroup* obj = ty.group();
|
||||
AutoSweepObjectGroup sweep(obj);
|
||||
TypeNewScript* newScript = obj->newScript(sweep);
|
||||
return newScript ? newScript->function() : nullptr;
|
||||
}
|
||||
|
||||
static void InterpretedFunctionFilenameAndLineNumber(JSFunction* fun,
|
||||
const char** filename,
|
||||
Maybe<unsigned>* lineno) {
|
||||
if (fun->hasBaseScript()) {
|
||||
*filename = fun->baseScript()->maybeForwardedScriptSource()->filename();
|
||||
*lineno = Some((unsigned)fun->baseScript()->lineno());
|
||||
} else {
|
||||
*filename = "(self-hosted builtin)";
|
||||
*lineno = Nothing();
|
||||
}
|
||||
}
|
||||
|
||||
static void SpewConstructor(TypeSet::Type ty, JSFunction* constructor) {
|
||||
#ifdef JS_JITSPEW
|
||||
if (!constructor->isInterpreted()) {
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
" Unique type %s has native constructor",
|
||||
TypeSet::TypeString(ty).get());
|
||||
return;
|
||||
}
|
||||
|
||||
char buf[512];
|
||||
if (constructor->displayAtom()) {
|
||||
PutEscapedString(buf, 512, constructor->displayAtom(), 0);
|
||||
} else {
|
||||
snprintf(buf, mozilla::ArrayLength(buf), "??");
|
||||
}
|
||||
|
||||
const char* filename;
|
||||
Maybe<unsigned> lineno;
|
||||
InterpretedFunctionFilenameAndLineNumber(constructor, &filename, &lineno);
|
||||
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
" Unique type %s has constructor %s (%s:%u)",
|
||||
TypeSet::TypeString(ty).get(), buf, filename,
|
||||
lineno.isSome() ? *lineno : 0);
|
||||
#endif
|
||||
}
|
||||
|
||||
static void SpewAllocationSite(TypeSet::Type ty, JSScript* script,
|
||||
uint32_t offset) {
|
||||
#ifdef JS_JITSPEW
|
||||
if (!JitSpewEnabled(JitSpew_OptimizationTrackingExtended)) {
|
||||
return;
|
||||
}
|
||||
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
" Unique type %s has alloc site %s:%u",
|
||||
TypeSet::TypeString(ty).get(), script->filename(),
|
||||
PCToLineNumber(script, script->offsetToPC(offset)));
|
||||
#endif
|
||||
}
|
||||
|
||||
bool jit::WriteIonTrackedOptimizationsTable(
|
||||
JSContext* cx, CompactBufferWriter& writer,
|
||||
const NativeToTrackedOptimizations* start,
|
||||
const NativeToTrackedOptimizations* end,
|
||||
const UniqueTrackedOptimizations& unique, uint32_t* numRegions,
|
||||
uint32_t* regionTableOffsetp, uint32_t* typesTableOffsetp,
|
||||
uint32_t* optimizationTableOffsetp, IonTrackedTypeVector* allTypes) {
|
||||
MOZ_ASSERT(unique.sorted());
|
||||
|
||||
#ifdef JS_JITSPEW
|
||||
// Spew training data, which may be fed into a script to determine a good
|
||||
// encoding strategy.
|
||||
if (JitSpewEnabled(JitSpew_OptimizationTrackingExtended)) {
|
||||
JitSpewStart(JitSpew_OptimizationTrackingExtended, "=> Training data: ");
|
||||
for (const NativeToTrackedOptimizations* entry = start; entry != end;
|
||||
entry++) {
|
||||
JitSpewCont(JitSpew_OptimizationTrackingExtended, "%zu,%zu,%u ",
|
||||
entry->startOffset.offset(), entry->endOffset.offset(),
|
||||
unique.indexOf(entry->optimizations));
|
||||
}
|
||||
JitSpewFin(JitSpew_OptimizationTrackingExtended);
|
||||
}
|
||||
#endif
|
||||
|
||||
Vector<uint32_t, 16> offsets(cx);
|
||||
const NativeToTrackedOptimizations* entry = start;
|
||||
|
||||
// Write out region offloads, partitioned into runs.
|
||||
JitSpew(JitSpew_Profiling, "=> Writing regions");
|
||||
while (entry != end) {
|
||||
uint32_t runLength =
|
||||
IonTrackedOptimizationsRegion::ExpectedRunLength(entry, end);
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
" Run at entry %zu, length %" PRIu32 ", offset %zu",
|
||||
size_t(entry - start), runLength, writer.length());
|
||||
|
||||
if (!offsets.append(writer.length())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
if (!IonTrackedOptimizationsRegion::WriteRun(writer, entry,
|
||||
entry + runLength, unique)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
entry += runLength;
|
||||
}
|
||||
|
||||
// Write out the table indexing into the payloads. 4-byte align for the
|
||||
// uint32s.
|
||||
if (!WriteOffsetsTable(writer, offsets, regionTableOffsetp)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
*numRegions = offsets.length();
|
||||
|
||||
// Clear offsets so that it may be reused below for the unique
|
||||
// optimizations table.
|
||||
offsets.clear();
|
||||
|
||||
const UniqueTrackedOptimizations::SortedVector& vec = unique.sortedVector();
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
"=> Writing unique optimizations table with %zu entr%s", vec.length(),
|
||||
vec.length() == 1 ? "y" : "ies");
|
||||
|
||||
// Write out type info payloads.
|
||||
UniqueTrackedTypes uniqueTypes(cx);
|
||||
|
||||
for (const UniqueTrackedOptimizations::SortEntry* p = vec.begin();
|
||||
p != vec.end(); p++) {
|
||||
const TempOptimizationTypeInfoVector* v = p->types;
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
" Type info entry %zu of length %zu, offset %zu",
|
||||
size_t(p - vec.begin()), v->length(), writer.length());
|
||||
SpewTempOptimizationTypeInfoVector(JitSpew_OptimizationTrackingExtended, v,
|
||||
" ");
|
||||
|
||||
if (!offsets.append(writer.length())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const OptimizationTypeInfo* t = v->begin(); t != v->end(); t++) {
|
||||
if (!t->writeCompact(writer, uniqueTypes)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Enumerate the unique types, and pull out any 'new' script constructor
|
||||
// functions and allocation site information. We do this during linking
|
||||
// instead of during profiling to avoid touching compartment tables during
|
||||
// profiling. Additionally, TypeNewScript is subject to GC in the
|
||||
// meantime.
|
||||
TypeSet::TypeList uniqueTypeList;
|
||||
if (!uniqueTypes.enumerate(&uniqueTypeList)) {
|
||||
return false;
|
||||
}
|
||||
for (uint32_t i = 0; i < uniqueTypeList.length(); i++) {
|
||||
TypeSet::Type ty = uniqueTypeList[i];
|
||||
if (JSFunction* constructor = MaybeConstructorFromType(ty)) {
|
||||
if (!allTypes->append(IonTrackedTypeWithAddendum(ty, constructor))) {
|
||||
return false;
|
||||
}
|
||||
SpewConstructor(ty, constructor);
|
||||
} else {
|
||||
JSScript* script;
|
||||
uint32_t offset;
|
||||
if (!ty.isUnknown() && !ty.isAnyObject() && ty.isGroup() &&
|
||||
ObjectGroup::findAllocationSite(cx, ty.group(), &script, &offset)) {
|
||||
if (!allTypes->append(IonTrackedTypeWithAddendum(ty, script, offset))) {
|
||||
return false;
|
||||
}
|
||||
SpewAllocationSite(ty, script, offset);
|
||||
} else {
|
||||
if (!allTypes->append(IonTrackedTypeWithAddendum(ty))) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (!WriteOffsetsTable(writer, offsets, typesTableOffsetp)) {
|
||||
return false;
|
||||
}
|
||||
offsets.clear();
|
||||
|
||||
// Write out attempts payloads.
|
||||
for (const UniqueTrackedOptimizations::SortEntry* p = vec.begin();
|
||||
p != vec.end(); p++) {
|
||||
const TempOptimizationAttemptsVector* v = p->attempts;
|
||||
if (JitSpewEnabled(JitSpew_OptimizationTrackingExtended)) {
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
" Attempts entry %zu of length %zu, offset %zu",
|
||||
size_t(p - vec.begin()), v->length(), writer.length());
|
||||
SpewTempOptimizationAttemptsVector(JitSpew_OptimizationTrackingExtended,
|
||||
v, " ");
|
||||
}
|
||||
|
||||
if (!offsets.append(writer.length())) {
|
||||
return false;
|
||||
}
|
||||
|
||||
for (const OptimizationAttempt* a = v->begin(); a != v->end(); a++) {
|
||||
a->writeCompact(writer);
|
||||
}
|
||||
}
|
||||
|
||||
return WriteOffsetsTable(writer, offsets, optimizationTableOffsetp);
|
||||
}
|
||||
|
||||
BytecodeSite* IonBuilder::maybeTrackedOptimizationSite(jsbytecode* pc) {
|
||||
// BytecodeSites that track optimizations need to be 1-1 with the pc
|
||||
// when optimization tracking is enabled, so that all MIR generated by
|
||||
|
@ -1069,40 +492,3 @@ void IonBuilder::trackInlineSuccessUnchecked(InliningStatus status) {
|
|||
trackOptimizationOutcome(TrackedOutcome::Inlined);
|
||||
}
|
||||
}
|
||||
|
||||
static JSFunction* FunctionFromTrackedType(
|
||||
const IonTrackedTypeWithAddendum& tracked) {
|
||||
if (tracked.hasConstructor()) {
|
||||
return tracked.constructor;
|
||||
}
|
||||
|
||||
TypeSet::Type ty = tracked.type;
|
||||
|
||||
if (ty.isSingleton()) {
|
||||
JSObject* obj = ty.singleton();
|
||||
return obj->is<JSFunction>() ? &obj->as<JSFunction>() : nullptr;
|
||||
}
|
||||
|
||||
return ty.group()->maybeInterpretedFunction();
|
||||
}
|
||||
|
||||
typedef JS::ProfiledFrameHandle FrameHandle;
|
||||
|
||||
void FrameHandle::updateHasTrackedOptimizations() {
|
||||
// All inlined frames will have the same optimization information by
|
||||
// virtue of sharing the JitcodeGlobalEntry, but such information is
|
||||
// only interpretable on the youngest frame.
|
||||
if (depth() != 0) {
|
||||
return;
|
||||
}
|
||||
if (!entry_.hasTrackedOptimizations()) {
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t entryOffset;
|
||||
optsIndex_ = entry_.trackedOptimizationIndexAtAddr(rt_, addr_, &entryOffset);
|
||||
if (optsIndex_.isSome()) {
|
||||
canonicalAddr_ =
|
||||
(void*)(((uint8_t*)entry_.nativeStartAddr()) + entryOffset);
|
||||
}
|
||||
}
|
||||
|
|
|
@ -180,355 +180,6 @@ class UniqueTrackedOptimizations {
|
|||
uint8_t indexOf(const TrackedOptimizations* optimizations) const;
|
||||
};
|
||||
|
||||
/* clang-format off */
|
||||
// A compact table of tracked optimization information. Pictorially,
|
||||
//
|
||||
// +------------------------------------------------+
|
||||
// | Region 1 | |
|
||||
// |------------------------------------------------| |
|
||||
// | Region 2 | |
|
||||
// |------------------------------------------------| |-- PayloadR of list-of-list of
|
||||
// | ... | | range triples (see below)
|
||||
// |------------------------------------------------| |
|
||||
// | Region M | |
|
||||
// +================================================+ <- IonTrackedOptimizationsRegionTable
|
||||
// | uint32_t numRegions_ = M | |
|
||||
// +------------------------------------------------+ |
|
||||
// | Region 1 | |
|
||||
// | uint32_t regionOffset = size(PayloadR) | |
|
||||
// +------------------------------------------------+ |-- Table
|
||||
// | ... | |
|
||||
// +------------------------------------------------+ |
|
||||
// | Region M | |
|
||||
// | uint32_t regionOffset | |
|
||||
// +================================================+
|
||||
// | Optimization type info 1 | |
|
||||
// |------------------------------------------------| |
|
||||
// | Optimization type info 2 | |-- PayloadT of list of
|
||||
// |------------------------------------------------| | OptimizationTypeInfo in
|
||||
// | ... | | order of decreasing frequency
|
||||
// |------------------------------------------------| |
|
||||
// | Optimization type info N | |
|
||||
// +================================================+ <- IonTrackedOptimizationsTypesTable
|
||||
// | uint32_t numEntries_ = N | |
|
||||
// +------------------------------------------------+ |
|
||||
// | Optimization type info 1 | |
|
||||
// | uint32_t entryOffset = size(PayloadT) | |
|
||||
// +------------------------------------------------+ |-- Table
|
||||
// | ... | |
|
||||
// +------------------------------------------------+ |
|
||||
// | Optimization type info N | |
|
||||
// | uint32_t entryOffset | |
|
||||
// +================================================+
|
||||
// | Optimization attempts 1 | |
|
||||
// |------------------------------------------------| |
|
||||
// | Optimization attempts 2 | |-- PayloadA of list of
|
||||
// |------------------------------------------------| | OptimizationAttempts in
|
||||
// | ... | | order of decreasing frequency
|
||||
// |------------------------------------------------| |
|
||||
// | Optimization attempts N | |
|
||||
// +================================================+ <- IonTrackedOptimizationsAttemptsTable
|
||||
// | uint32_t numEntries_ = N | |
|
||||
// +------------------------------------------------+ |
|
||||
// | Optimization attempts 1 | |
|
||||
// | uint32_t entryOffset = size(PayloadA) | |
|
||||
// +------------------------------------------------+ |-- Table
|
||||
// | ... | |
|
||||
// +------------------------------------------------+ |
|
||||
// | Optimization attempts N | |
|
||||
// | uint32_t entryOffset | |
|
||||
// +------------------------------------------------+
|
||||
/* clang-format on */
|
||||
//
|
||||
// Abstractly, each region in the PayloadR section is a list of triples of the
|
||||
// following, in order of ascending startOffset:
|
||||
//
|
||||
// (startOffset, endOffset, optimization attempts index)
|
||||
//
|
||||
// The range of [startOffset, endOffset) is the native machine code offsets
|
||||
// for which the optimization attempts referred to by the index applies.
|
||||
//
|
||||
// Concretely, each region starts with a header of:
|
||||
//
|
||||
// { startOffset : 32, endOffset : 32 }
|
||||
//
|
||||
// followed by an (endOffset, index) pair, then by delta-encoded variants
|
||||
// triples described below.
|
||||
//
|
||||
// Each list of type infos in the PayloadT section is a list of triples:
|
||||
//
|
||||
// (kind, MIR type, type set)
|
||||
//
|
||||
// The type set is separately in another vector, and what is encoded instead
|
||||
// is the (offset, length) pair needed to index into that vector.
|
||||
//
|
||||
// Each list of optimization attempts in the PayloadA section is a list of
|
||||
// pairs:
|
||||
//
|
||||
// (strategy, outcome)
|
||||
//
|
||||
// Both tail tables for PayloadR and PayloadA use reverse offsets from the
|
||||
// table pointers.
|
||||
|
||||
class IonTrackedOptimizationsRegion {
|
||||
const uint8_t* start_;
|
||||
const uint8_t* end_;
|
||||
|
||||
// Unpacked state.
|
||||
uint32_t startOffset_;
|
||||
uint32_t endOffset_;
|
||||
const uint8_t* rangesStart_;
|
||||
|
||||
void unpackHeader();
|
||||
|
||||
public:
|
||||
IonTrackedOptimizationsRegion(const uint8_t* start, const uint8_t* end)
|
||||
: start_(start),
|
||||
end_(end),
|
||||
startOffset_(0),
|
||||
endOffset_(0),
|
||||
rangesStart_(nullptr) {
|
||||
MOZ_ASSERT(start < end);
|
||||
unpackHeader();
|
||||
}
|
||||
|
||||
// Offsets for the entire range that this region covers.
|
||||
//
|
||||
// This, as well as the offsets for the deltas, is open at the ending
|
||||
// address: [startOffset, endOffset).
|
||||
uint32_t startOffset() const { return startOffset_; }
|
||||
uint32_t endOffset() const { return endOffset_; }
|
||||
|
||||
class RangeIterator {
|
||||
const uint8_t* cur_;
|
||||
const uint8_t* start_;
|
||||
const uint8_t* end_;
|
||||
|
||||
uint32_t firstStartOffset_;
|
||||
uint32_t prevEndOffset_;
|
||||
|
||||
public:
|
||||
RangeIterator(const uint8_t* start, const uint8_t* end,
|
||||
uint32_t startOffset)
|
||||
: cur_(start),
|
||||
start_(start),
|
||||
end_(end),
|
||||
firstStartOffset_(startOffset),
|
||||
prevEndOffset_(0) {}
|
||||
|
||||
bool more() const { return cur_ < end_; }
|
||||
void readNext(uint32_t* startOffset, uint32_t* endOffset, uint8_t* index);
|
||||
};
|
||||
|
||||
RangeIterator ranges() const {
|
||||
return RangeIterator(rangesStart_, end_, startOffset_);
|
||||
}
|
||||
|
||||
// Find the index of tracked optimization info (e.g., type info and
|
||||
// attempts) at a native code offset.
|
||||
mozilla::Maybe<uint8_t> findIndex(uint32_t offset,
|
||||
uint32_t* entryOffsetOut) const;
|
||||
|
||||
// For the variants below, S stands for startDelta, L for length, and I
|
||||
// for index. These were automatically generated from training on the
|
||||
// Octane benchmark.
|
||||
//
|
||||
// byte 1 byte 0
|
||||
// SSSS-SSSL LLLL-LII0
|
||||
// startDelta max 127, length max 63, index max 3
|
||||
|
||||
static const uint32_t ENC1_MASK = 0x1;
|
||||
static const uint32_t ENC1_MASK_VAL = 0x0;
|
||||
|
||||
static const uint32_t ENC1_START_DELTA_MAX = 0x7f;
|
||||
static const uint32_t ENC1_START_DELTA_SHIFT = 9;
|
||||
|
||||
static const uint32_t ENC1_LENGTH_MAX = 0x3f;
|
||||
static const uint32_t ENC1_LENGTH_SHIFT = 3;
|
||||
|
||||
static const uint32_t ENC1_INDEX_MAX = 0x3;
|
||||
static const uint32_t ENC1_INDEX_SHIFT = 1;
|
||||
|
||||
// byte 2 byte 1 byte 0
|
||||
// SSSS-SSSS SSSS-LLLL LLII-II01
|
||||
// startDelta max 4095, length max 63, index max 15
|
||||
|
||||
static const uint32_t ENC2_MASK = 0x3;
|
||||
static const uint32_t ENC2_MASK_VAL = 0x1;
|
||||
|
||||
static const uint32_t ENC2_START_DELTA_MAX = 0xfff;
|
||||
static const uint32_t ENC2_START_DELTA_SHIFT = 12;
|
||||
|
||||
static const uint32_t ENC2_LENGTH_MAX = 0x3f;
|
||||
static const uint32_t ENC2_LENGTH_SHIFT = 6;
|
||||
|
||||
static const uint32_t ENC2_INDEX_MAX = 0xf;
|
||||
static const uint32_t ENC2_INDEX_SHIFT = 2;
|
||||
|
||||
// byte 3 byte 2 byte 1 byte 0
|
||||
// SSSS-SSSS SSSL-LLLL LLLL-LIII IIII-I011
|
||||
// startDelta max 2047, length max 1023, index max 255
|
||||
|
||||
static const uint32_t ENC3_MASK = 0x7;
|
||||
static const uint32_t ENC3_MASK_VAL = 0x3;
|
||||
|
||||
static const uint32_t ENC3_START_DELTA_MAX = 0x7ff;
|
||||
static const uint32_t ENC3_START_DELTA_SHIFT = 21;
|
||||
|
||||
static const uint32_t ENC3_LENGTH_MAX = 0x3ff;
|
||||
static const uint32_t ENC3_LENGTH_SHIFT = 11;
|
||||
|
||||
static const uint32_t ENC3_INDEX_MAX = 0xff;
|
||||
static const uint32_t ENC3_INDEX_SHIFT = 3;
|
||||
|
||||
// byte 4 byte 3 byte 2 byte 1 byte 0
|
||||
// SSSS-SSSS SSSS-SSSL LLLL-LLLL LLLL-LIII IIII-I111
|
||||
// startDelta max 32767, length max 16383, index max 255
|
||||
|
||||
static const uint32_t ENC4_MASK = 0x7;
|
||||
static const uint32_t ENC4_MASK_VAL = 0x7;
|
||||
|
||||
static const uint32_t ENC4_START_DELTA_MAX = 0x7fff;
|
||||
static const uint32_t ENC4_START_DELTA_SHIFT = 25;
|
||||
|
||||
static const uint32_t ENC4_LENGTH_MAX = 0x3fff;
|
||||
static const uint32_t ENC4_LENGTH_SHIFT = 11;
|
||||
|
||||
static const uint32_t ENC4_INDEX_MAX = 0xff;
|
||||
static const uint32_t ENC4_INDEX_SHIFT = 3;
|
||||
|
||||
static bool IsDeltaEncodeable(uint32_t startDelta, uint32_t length) {
|
||||
MOZ_ASSERT(length != 0);
|
||||
return startDelta <= ENC4_START_DELTA_MAX && length <= ENC4_LENGTH_MAX;
|
||||
}
|
||||
|
||||
static const uint32_t MAX_RUN_LENGTH = 100;
|
||||
|
||||
static uint32_t ExpectedRunLength(const NativeToTrackedOptimizations* start,
|
||||
const NativeToTrackedOptimizations* end);
|
||||
|
||||
static void ReadDelta(CompactBufferReader& reader, uint32_t* startDelta,
|
||||
uint32_t* length, uint8_t* index);
|
||||
static void WriteDelta(CompactBufferWriter& writer, uint32_t startDelta,
|
||||
uint32_t length, uint8_t index);
|
||||
static MOZ_MUST_USE bool WriteRun(CompactBufferWriter& writer,
|
||||
const NativeToTrackedOptimizations* start,
|
||||
const NativeToTrackedOptimizations* end,
|
||||
const UniqueTrackedOptimizations& unique);
|
||||
};
|
||||
|
||||
class IonTrackedOptimizationsAttempts {
|
||||
const uint8_t* start_;
|
||||
const uint8_t* end_;
|
||||
|
||||
public:
|
||||
IonTrackedOptimizationsAttempts(const uint8_t* start, const uint8_t* end)
|
||||
: start_(start), end_(end) {
|
||||
// Cannot be empty.
|
||||
MOZ_ASSERT(start < end);
|
||||
}
|
||||
};
|
||||
|
||||
struct IonTrackedTypeWithAddendum {
|
||||
TypeSet::Type type;
|
||||
|
||||
enum HasAddendum { HasNothing, HasAllocationSite, HasConstructor };
|
||||
HasAddendum hasAddendum;
|
||||
|
||||
// If type is a type object and is tied to a site, the script and pc are
|
||||
// resolved early and stored below. This is done to avoid accessing the
|
||||
// compartment during profiling time.
|
||||
union {
|
||||
struct {
|
||||
JSScript* script;
|
||||
uint32_t offset;
|
||||
};
|
||||
JSFunction* constructor;
|
||||
};
|
||||
|
||||
explicit IonTrackedTypeWithAddendum(TypeSet::Type type)
|
||||
: type(type), hasAddendum(HasNothing), script(nullptr), offset(0) {}
|
||||
|
||||
IonTrackedTypeWithAddendum(TypeSet::Type type, JSScript* script,
|
||||
uint32_t offset)
|
||||
: type(type),
|
||||
hasAddendum(HasAllocationSite),
|
||||
script(script),
|
||||
offset(offset) {}
|
||||
|
||||
IonTrackedTypeWithAddendum(TypeSet::Type type, JSFunction* constructor)
|
||||
: type(type), hasAddendum(HasConstructor), constructor(constructor) {}
|
||||
|
||||
bool hasAllocationSite() const { return hasAddendum == HasAllocationSite; }
|
||||
bool hasConstructor() const { return hasAddendum == HasConstructor; }
|
||||
};
|
||||
|
||||
typedef Vector<IonTrackedTypeWithAddendum, 1, SystemAllocPolicy>
|
||||
IonTrackedTypeVector;
|
||||
|
||||
class IonTrackedOptimizationsTypeInfo {
|
||||
const uint8_t* start_;
|
||||
const uint8_t* end_;
|
||||
|
||||
public:
|
||||
IonTrackedOptimizationsTypeInfo(const uint8_t* start, const uint8_t* end)
|
||||
: start_(start), end_(end) {
|
||||
// Can be empty; i.e., no type info was tracked.
|
||||
}
|
||||
|
||||
bool empty() const { return start_ == end_; }
|
||||
};
|
||||
|
||||
template <class Entry>
|
||||
class IonTrackedOptimizationsOffsetsTable {
|
||||
uint32_t padding_;
|
||||
uint32_t numEntries_;
|
||||
uint32_t entryOffsets_[1];
|
||||
|
||||
protected:
|
||||
const uint8_t* payloadEnd() const { return (uint8_t*)(this) - padding_; }
|
||||
|
||||
public:
|
||||
uint32_t numEntries() const { return numEntries_; }
|
||||
uint32_t entryOffset(uint32_t index) const {
|
||||
MOZ_ASSERT(index < numEntries());
|
||||
return entryOffsets_[index];
|
||||
}
|
||||
|
||||
Entry entry(uint32_t index) const {
|
||||
const uint8_t* start = payloadEnd() - entryOffset(index);
|
||||
const uint8_t* end = payloadEnd();
|
||||
if (index < numEntries() - 1) {
|
||||
end -= entryOffset(index + 1);
|
||||
}
|
||||
return Entry(start, end);
|
||||
}
|
||||
};
|
||||
|
||||
class IonTrackedOptimizationsRegionTable
|
||||
: public IonTrackedOptimizationsOffsetsTable<
|
||||
IonTrackedOptimizationsRegion> {
|
||||
public:
|
||||
mozilla::Maybe<IonTrackedOptimizationsRegion> findRegion(
|
||||
uint32_t offset) const;
|
||||
|
||||
const uint8_t* payloadStart() const { return payloadEnd() - entryOffset(0); }
|
||||
};
|
||||
|
||||
typedef IonTrackedOptimizationsOffsetsTable<IonTrackedOptimizationsAttempts>
|
||||
IonTrackedOptimizationsAttemptsTable;
|
||||
|
||||
typedef IonTrackedOptimizationsOffsetsTable<IonTrackedOptimizationsTypeInfo>
|
||||
IonTrackedOptimizationsTypesTable;
|
||||
|
||||
MOZ_MUST_USE bool WriteIonTrackedOptimizationsTable(
|
||||
JSContext* cx, CompactBufferWriter& writer,
|
||||
const NativeToTrackedOptimizations* start,
|
||||
const NativeToTrackedOptimizations* end,
|
||||
const UniqueTrackedOptimizations& unique, uint32_t* numRegions,
|
||||
uint32_t* regionTableOffsetp, uint32_t* typesTableOffsetp,
|
||||
uint32_t* attemptsTableOffsetp, IonTrackedTypeVector* allTypes);
|
||||
|
||||
} // namespace jit
|
||||
} // namespace js
|
||||
|
||||
|
|
|
@ -66,11 +66,6 @@ CodeGeneratorShared::CodeGeneratorShared(MIRGenerator* gen, LIRGraph* graph,
|
|||
nativeToBytecodeNumRegions_(0),
|
||||
nativeToBytecodeScriptList_(nullptr),
|
||||
nativeToBytecodeScriptListLength_(0),
|
||||
trackedOptimizationsMap_(nullptr),
|
||||
trackedOptimizationsMapSize_(0),
|
||||
trackedOptimizationsRegionTableOffset_(0),
|
||||
trackedOptimizationsTypesTableOffset_(0),
|
||||
trackedOptimizationsAttemptsTableOffset_(0),
|
||||
osrEntryOffset_(0),
|
||||
skipArgCheckEntryOffset_(0),
|
||||
#ifdef CHECK_OSIPOINT_REGISTERS
|
||||
|
@ -327,55 +322,6 @@ void CodeGeneratorShared::dumpNativeToBytecodeEntry(uint32_t idx) {
|
|||
#endif
|
||||
}
|
||||
|
||||
bool CodeGeneratorShared::addTrackedOptimizationsEntry(
|
||||
const TrackedOptimizations* optimizations) {
|
||||
if (!isOptimizationTrackingEnabled()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
MOZ_ASSERT(optimizations);
|
||||
|
||||
uint32_t nativeOffset = masm.currentOffset();
|
||||
|
||||
if (!trackedOptimizations_.empty()) {
|
||||
NativeToTrackedOptimizations& lastEntry = trackedOptimizations_.back();
|
||||
MOZ_ASSERT_IF(!masm.oom(), nativeOffset >= lastEntry.endOffset.offset());
|
||||
|
||||
// If we're still generating code for the same set of optimizations,
|
||||
// we are done.
|
||||
if (lastEntry.optimizations == optimizations) {
|
||||
return true;
|
||||
}
|
||||
}
|
||||
|
||||
// If we're generating code for a new set of optimizations, add a new
|
||||
// entry.
|
||||
NativeToTrackedOptimizations entry;
|
||||
entry.startOffset = CodeOffset(nativeOffset);
|
||||
entry.endOffset = CodeOffset(nativeOffset);
|
||||
entry.optimizations = optimizations;
|
||||
return trackedOptimizations_.append(entry);
|
||||
}
|
||||
|
||||
void CodeGeneratorShared::extendTrackedOptimizationsEntry(
|
||||
const TrackedOptimizations* optimizations) {
|
||||
if (!isOptimizationTrackingEnabled()) {
|
||||
return;
|
||||
}
|
||||
|
||||
uint32_t nativeOffset = masm.currentOffset();
|
||||
NativeToTrackedOptimizations& entry = trackedOptimizations_.back();
|
||||
MOZ_ASSERT(entry.optimizations == optimizations);
|
||||
MOZ_ASSERT_IF(!masm.oom(), nativeOffset >= entry.endOffset.offset());
|
||||
|
||||
entry.endOffset = CodeOffset(nativeOffset);
|
||||
|
||||
// If we generated no code, remove the last entry.
|
||||
if (nativeOffset == entry.startOffset.offset()) {
|
||||
trackedOptimizations_.popBack();
|
||||
}
|
||||
}
|
||||
|
||||
// see OffsetOfFrameSlot
|
||||
static inline int32_t ToStackIndex(LAllocation* a) {
|
||||
if (a->isStackSlot()) {
|
||||
|
@ -887,155 +833,6 @@ void CodeGeneratorShared::verifyCompactNativeToBytecodeMap(JitCode* code) {
|
|||
#endif // DEBUG
|
||||
}
|
||||
|
||||
bool CodeGeneratorShared::generateCompactTrackedOptimizationsMap(
|
||||
JSContext* cx, JitCode* code, IonTrackedTypeVector* allTypes) {
|
||||
MOZ_ASSERT(trackedOptimizationsMap_ == nullptr);
|
||||
MOZ_ASSERT(trackedOptimizationsMapSize_ == 0);
|
||||
MOZ_ASSERT(trackedOptimizationsRegionTableOffset_ == 0);
|
||||
MOZ_ASSERT(trackedOptimizationsTypesTableOffset_ == 0);
|
||||
MOZ_ASSERT(trackedOptimizationsAttemptsTableOffset_ == 0);
|
||||
|
||||
if (trackedOptimizations_.empty()) {
|
||||
return true;
|
||||
}
|
||||
|
||||
UniqueTrackedOptimizations unique(cx);
|
||||
|
||||
// Iterate through all entries to deduplicate their optimization attempts.
|
||||
for (size_t i = 0; i < trackedOptimizations_.length(); i++) {
|
||||
NativeToTrackedOptimizations& entry = trackedOptimizations_[i];
|
||||
if (!unique.add(entry.optimizations)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
|
||||
// Sort the unique optimization attempts by frequency to stabilize the
|
||||
// attempts' indices in the compact table we will write later.
|
||||
if (!unique.sortByFrequency(cx)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Write out the ranges and the table.
|
||||
CompactBufferWriter writer;
|
||||
uint32_t numRegions;
|
||||
uint32_t regionTableOffset;
|
||||
uint32_t typesTableOffset;
|
||||
uint32_t attemptsTableOffset;
|
||||
if (!WriteIonTrackedOptimizationsTable(
|
||||
cx, writer, trackedOptimizations_.begin(),
|
||||
trackedOptimizations_.end(), unique, &numRegions, ®ionTableOffset,
|
||||
&typesTableOffset, &attemptsTableOffset, allTypes)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
MOZ_ASSERT(regionTableOffset > 0);
|
||||
MOZ_ASSERT(typesTableOffset > 0);
|
||||
MOZ_ASSERT(attemptsTableOffset > 0);
|
||||
MOZ_ASSERT(typesTableOffset > regionTableOffset);
|
||||
MOZ_ASSERT(attemptsTableOffset > typesTableOffset);
|
||||
|
||||
// Copy over the table out of the writer's buffer.
|
||||
uint8_t* data = cx->pod_malloc<uint8_t>(writer.length());
|
||||
if (!data) {
|
||||
return false;
|
||||
}
|
||||
|
||||
memcpy(data, writer.buffer(), writer.length());
|
||||
trackedOptimizationsMap_ = data;
|
||||
trackedOptimizationsMapSize_ = writer.length();
|
||||
trackedOptimizationsRegionTableOffset_ = regionTableOffset;
|
||||
trackedOptimizationsTypesTableOffset_ = typesTableOffset;
|
||||
trackedOptimizationsAttemptsTableOffset_ = attemptsTableOffset;
|
||||
|
||||
verifyCompactTrackedOptimizationsMap(code, numRegions, unique, allTypes);
|
||||
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
"== Compact Native To Optimizations Map [%p-%p] size %u", data,
|
||||
data + trackedOptimizationsMapSize_, trackedOptimizationsMapSize_);
|
||||
JitSpew(JitSpew_OptimizationTrackingExtended,
|
||||
" with type list of length %zu, size %zu", allTypes->length(),
|
||||
allTypes->length() * sizeof(IonTrackedTypeWithAddendum));
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void CodeGeneratorShared::verifyCompactTrackedOptimizationsMap(
|
||||
JitCode* code, uint32_t numRegions,
|
||||
const UniqueTrackedOptimizations& unique,
|
||||
const IonTrackedTypeVector* allTypes) {
|
||||
#ifdef DEBUG
|
||||
MOZ_ASSERT(trackedOptimizationsMap_ != nullptr);
|
||||
MOZ_ASSERT(trackedOptimizationsMapSize_ > 0);
|
||||
MOZ_ASSERT(trackedOptimizationsRegionTableOffset_ > 0);
|
||||
MOZ_ASSERT(trackedOptimizationsTypesTableOffset_ > 0);
|
||||
MOZ_ASSERT(trackedOptimizationsAttemptsTableOffset_ > 0);
|
||||
|
||||
// Table pointers must all be 4-byte aligned.
|
||||
const uint8_t* regionTableAddr =
|
||||
trackedOptimizationsMap_ + trackedOptimizationsRegionTableOffset_;
|
||||
const uint8_t* typesTableAddr =
|
||||
trackedOptimizationsMap_ + trackedOptimizationsTypesTableOffset_;
|
||||
const uint8_t* attemptsTableAddr =
|
||||
trackedOptimizationsMap_ + trackedOptimizationsAttemptsTableOffset_;
|
||||
MOZ_ASSERT(uintptr_t(regionTableAddr) % sizeof(uint32_t) == 0);
|
||||
MOZ_ASSERT(uintptr_t(typesTableAddr) % sizeof(uint32_t) == 0);
|
||||
MOZ_ASSERT(uintptr_t(attemptsTableAddr) % sizeof(uint32_t) == 0);
|
||||
|
||||
// Assert that the number of entries matches up for the tables.
|
||||
const IonTrackedOptimizationsRegionTable* regionTable =
|
||||
(const IonTrackedOptimizationsRegionTable*)regionTableAddr;
|
||||
MOZ_ASSERT(regionTable->numEntries() == numRegions);
|
||||
const IonTrackedOptimizationsTypesTable* typesTable =
|
||||
(const IonTrackedOptimizationsTypesTable*)typesTableAddr;
|
||||
MOZ_ASSERT(typesTable->numEntries() == unique.count());
|
||||
const IonTrackedOptimizationsAttemptsTable* attemptsTable =
|
||||
(const IonTrackedOptimizationsAttemptsTable*)attemptsTableAddr;
|
||||
MOZ_ASSERT(attemptsTable->numEntries() == unique.count());
|
||||
|
||||
// Verify each region.
|
||||
uint32_t trackedIdx = 0;
|
||||
for (uint32_t regionIdx = 0; regionIdx < regionTable->numEntries();
|
||||
regionIdx++) {
|
||||
// Check reverse offsets are within bounds.
|
||||
MOZ_ASSERT(regionTable->entryOffset(regionIdx) <=
|
||||
trackedOptimizationsRegionTableOffset_);
|
||||
MOZ_ASSERT_IF(regionIdx > 0, regionTable->entryOffset(regionIdx) <
|
||||
regionTable->entryOffset(regionIdx - 1));
|
||||
|
||||
IonTrackedOptimizationsRegion region = regionTable->entry(regionIdx);
|
||||
|
||||
// Check the region range is covered by jitcode.
|
||||
MOZ_ASSERT(region.startOffset() <= code->instructionsSize());
|
||||
MOZ_ASSERT(region.endOffset() <= code->instructionsSize());
|
||||
|
||||
IonTrackedOptimizationsRegion::RangeIterator iter = region.ranges();
|
||||
while (iter.more()) {
|
||||
// Assert that the offsets are correctly decoded from the delta.
|
||||
uint32_t startOffset, endOffset;
|
||||
uint8_t index;
|
||||
iter.readNext(&startOffset, &endOffset, &index);
|
||||
NativeToTrackedOptimizations& entry = trackedOptimizations_[trackedIdx++];
|
||||
MOZ_ASSERT(startOffset == entry.startOffset.offset());
|
||||
MOZ_ASSERT(endOffset == entry.endOffset.offset());
|
||||
MOZ_ASSERT(index == unique.indexOf(entry.optimizations));
|
||||
|
||||
// Assert that the type info and attempts vectors are correctly
|
||||
// decoded. This is disabled for now if the types table might
|
||||
// contain nursery pointers, in which case the types might not
|
||||
// match, see bug 1175761.
|
||||
JSRuntime* rt = code->runtimeFromMainThread();
|
||||
if (!rt->gc.storeBuffer().cancelIonCompilations()) {
|
||||
IonTrackedOptimizationsTypeInfo typeInfo = typesTable->entry(index);
|
||||
TempOptimizationTypeInfoVector tvec(alloc());
|
||||
}
|
||||
|
||||
IonTrackedOptimizationsAttempts attempts = attemptsTable->entry(index);
|
||||
TempOptimizationAttemptsVector avec(alloc());
|
||||
}
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
void CodeGeneratorShared::markSafepoint(LInstruction* ins) {
|
||||
markSafepointAt(masm.currentOffset(), ins);
|
||||
}
|
||||
|
|
|
@ -111,14 +111,6 @@ class CodeGeneratorShared : public LElementVisitor {
|
|||
|
||||
bool stringsCanBeInNursery() const { return gen->stringsCanBeInNursery(); }
|
||||
|
||||
js::Vector<NativeToTrackedOptimizations, 0, SystemAllocPolicy>
|
||||
trackedOptimizations_;
|
||||
uint8_t* trackedOptimizationsMap_;
|
||||
uint32_t trackedOptimizationsMapSize_;
|
||||
uint32_t trackedOptimizationsRegionTableOffset_;
|
||||
uint32_t trackedOptimizationsTypesTableOffset_;
|
||||
uint32_t trackedOptimizationsAttemptsTableOffset_;
|
||||
|
||||
bool isOptimizationTrackingEnabled() {
|
||||
return gen->isOptimizationTrackingEnabled();
|
||||
}
|
||||
|
@ -191,10 +183,6 @@ class CodeGeneratorShared : public LElementVisitor {
|
|||
void dumpNativeToBytecodeEntries();
|
||||
void dumpNativeToBytecodeEntry(uint32_t idx);
|
||||
|
||||
bool addTrackedOptimizationsEntry(const TrackedOptimizations* optimizations);
|
||||
void extendTrackedOptimizationsEntry(
|
||||
const TrackedOptimizations* optimizations);
|
||||
|
||||
public:
|
||||
MIRGenerator& mirGen() const { return *gen; }
|
||||
|
||||
|
@ -264,13 +252,6 @@ class CodeGeneratorShared : public LElementVisitor {
|
|||
bool generateCompactNativeToBytecodeMap(JSContext* cx, JitCode* code);
|
||||
void verifyCompactNativeToBytecodeMap(JitCode* code);
|
||||
|
||||
bool generateCompactTrackedOptimizationsMap(JSContext* cx, JitCode* code,
|
||||
IonTrackedTypeVector* allTypes);
|
||||
void verifyCompactTrackedOptimizationsMap(
|
||||
JitCode* code, uint32_t numRegions,
|
||||
const UniqueTrackedOptimizations& unique,
|
||||
const IonTrackedTypeVector* allTypes);
|
||||
|
||||
// Mark the safepoint on |ins| as corresponding to the current assembler
|
||||
// location. The location should be just after a call.
|
||||
void markSafepoint(LInstruction* ins);
|
||||
|
|
Загрузка…
Ссылка в новой задаче