Bug 1384808 - Implement a linear cache for searching the shape lineage r=djvj

Linearly searching the shape lineage can be expensive.  It is going to cause branch misses and cache misses since we are traversing a linked list.  Since this is done frequently enough, it may be worth while to "cache" results from the linear search.  This revision hopes to lazily allocate a small linear cache after the first linear search on a shape.  The results from each linear search afterwards will be placed into the cache.  If the jsid that is being searched for is frequently looked up then we obtain a "cache hit" from a quick search in the cache.  Otherwise, we fall back to a linear search and append the new entry to the cache.  Once the cache is full, it will transform into a shape hash table like the previous approach.

Differential Revision: https://phabricator.services.mozilla.com/D12155

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Denis Palmeiro 2019-02-03 00:03:35 +00:00
Родитель fd48fcd309
Коммит ad385bf56a
11 изменённых файлов: 483 добавлений и 131 удалений

Просмотреть файл

@ -4238,15 +4238,15 @@ static void RelazifyFunctionsForShrinkingGC(JSRuntime* rt) {
}
}
static void PurgeShapeTablesForShrinkingGC(JSRuntime* rt) {
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::PURGE_SHAPE_TABLES);
static void PurgeShapeCachesForShrinkingGC(JSRuntime* rt) {
gcstats::AutoPhase ap(rt->gc.stats(), gcstats::PhaseKind::PURGE_SHAPE_CACHES);
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
if (!CanRelocateZone(zone) || zone->keepShapeTables()) {
if (!CanRelocateZone(zone) || zone->keepShapeCaches()) {
continue;
}
for (auto baseShape = zone->cellIter<BaseShape>(); !baseShape.done();
baseShape.next()) {
baseShape->maybePurgeTable();
baseShape->maybePurgeCache();
}
}
}
@ -4337,7 +4337,7 @@ bool GCRuntime::beginMarkPhase(JS::GCReason reason, AutoGCSession& session) {
*/
if (invocationKind == GC_SHRINK) {
RelazifyFunctionsForShrinkingGC(rt);
PurgeShapeTablesForShrinkingGC(rt);
PurgeShapeCachesForShrinkingGC(rt);
}
/*
@ -8456,9 +8456,8 @@ void js::gc::CheckHashTablesAfterMovingGC(JSRuntime* rt) {
JS::AutoCheckCannotGC nogc;
for (auto baseShape = zone->cellIter<BaseShape>(); !baseShape.done();
baseShape.next()) {
if (ShapeTable* table = baseShape->maybeTable(nogc)) {
table->checkAfterMovingGC();
}
ShapeCachePtr p = baseShape->getCache(nogc);
p.checkAfterMovingGC();
}
}

Просмотреть файл

@ -89,7 +89,7 @@ PhaseKindGraphRoots = [
PhaseKind("MARK_DISCARD_CODE", "Mark Discard Code", 3),
PhaseKind("RELAZIFY_FUNCTIONS", "Relazify Functions", 4),
PhaseKind("PURGE", "Purge", 5),
PhaseKind("PURGE_SHAPE_TABLES", "Purge ShapeTables", 60),
PhaseKind("PURGE_SHAPE_CACHES", "Purge ShapeCaches", 60),
JoinParallelTasksPhaseKind
]),
PhaseKind("MARK", "Mark", 6, [

Просмотреть файл

@ -1096,8 +1096,8 @@ inline void js::GCMarker::eagerlyMarkChildren(Shape* shape) {
BaseShape* base = shape->base();
CheckTraversedEdge(shape, base);
if (mark(base)) {
MOZ_ASSERT(base->canSkipMarkingShapeTable(shape));
base->traceChildrenSkipShapeTable(this);
MOZ_ASSERT(base->canSkipMarkingShapeCache(shape));
base->traceChildrenSkipShapeCache(this);
}
traverseEdge(shape, shape->propidRef().get());

Просмотреть файл

@ -70,7 +70,7 @@ JS::Zone::Zone(JSRuntime* rt)
gcScheduled_(false),
gcScheduledSaved_(false),
gcPreserveCode_(false),
keepShapeTables_(this, false),
keepShapeCaches_(this, false),
listNext_(NotOnList) {
/* Ensure that there are no vtables to mess us up here. */
MOZ_ASSERT(reinterpret_cast<JS::shadow::Zone*>(this) ==

Просмотреть файл

@ -203,7 +203,7 @@ class Zone : public JS::shadow::Zone,
size_t* typePool, size_t* regexpZone,
size_t* jitZone, size_t* baselineStubsOptimized,
size_t* cachedCFG, size_t* uniqueIdMap,
size_t* shapeTables, size_t* atomsMarkBitmaps,
size_t* shapeCaches, size_t* atomsMarkBitmaps,
size_t* compartmentObjects,
size_t* crossCompartmentWrappersTables,
size_t* compartmentsPrivateData);
@ -645,8 +645,8 @@ class Zone : public JS::shadow::Zone,
void checkUniqueIdTableAfterMovingGC();
#endif
bool keepShapeTables() const { return keepShapeTables_; }
void setKeepShapeTables(bool b) { keepShapeTables_ = b; }
bool keepShapeCaches() const { return keepShapeCaches_; }
void setKeepShapeCaches(bool b) { keepShapeCaches_ = b; }
// Delete an empty compartment after its contents have been merged.
void deleteEmptyCompartment(JS::Compartment* comp);
@ -664,7 +664,7 @@ class Zone : public JS::shadow::Zone,
js::MainThreadData<bool> gcScheduled_;
js::MainThreadData<bool> gcScheduledSaved_;
js::MainThreadData<bool> gcPreserveCode_;
js::ZoneData<bool> keepShapeTables_;
js::ZoneData<bool> keepShapeCaches_;
// Allow zones to be linked into a list
friend class js::gc::ZoneList;

Просмотреть файл

@ -3654,6 +3654,9 @@ void JSObject::dump(js::GenericPrinter& out) const {
if (nobj->hasShapeTable()) {
out.put(" hasShapeTable");
}
if (nobj->hasShapeIC()) {
out.put(" hasShapeCache");
}
if (nobj->hadElementsAccess()) {
out.put(" had_elements_access");
}

Просмотреть файл

@ -506,6 +506,8 @@ class NativeObject : public ShapedObject {
bool hasShapeTable() const { return lastProperty()->hasTable(); }
bool hasShapeIC() const { return lastProperty()->hasIC(); }
HeapSlotArray getDenseElements() {
return HeapSlotArray(elements_, !getElementsHeader()->isCopyOnWrite());
}
@ -878,7 +880,7 @@ class NativeObject : public ShapedObject {
static MOZ_ALWAYS_INLINE bool maybeConvertToOrGrowDictionaryForAdd(
JSContext* cx, HandleNativeObject obj, HandleId id, ShapeTable** table,
ShapeTable::Entry** entry, const AutoKeepShapeTables& keep);
ShapeTable::Entry** entry, const AutoKeepShapeCaches& keep);
static bool maybeToDictionaryModeForPut(JSContext* cx, HandleNativeObject obj,
MutableHandleShape shape);
@ -933,12 +935,12 @@ class NativeObject : public ShapedObject {
HandleId id, uint32_t slot,
unsigned attrs, ShapeTable* table,
ShapeTable::Entry* entry,
const AutoKeepShapeTables& keep);
const AutoKeepShapeCaches& keep);
static Shape* addAccessorPropertyInternal(
JSContext* cx, HandleNativeObject obj, HandleId id, JSGetterOp getter,
JSSetterOp setter, unsigned attrs, ShapeTable* table,
ShapeTable::Entry* entry, const AutoKeepShapeTables& keep);
ShapeTable::Entry* entry, const AutoKeepShapeCaches& keep);
static MOZ_MUST_USE bool fillInAfterSwap(JSContext* cx,
HandleNativeObject obj,

Просмотреть файл

@ -22,13 +22,13 @@
namespace js {
inline AutoKeepShapeTables::AutoKeepShapeTables(JSContext* cx)
: cx_(cx), prev_(cx->zone()->keepShapeTables()) {
cx->zone()->setKeepShapeTables(true);
inline AutoKeepShapeCaches::AutoKeepShapeCaches(JSContext* cx)
: cx_(cx), prev_(cx->zone()->keepShapeCaches()) {
cx->zone()->setKeepShapeCaches(true);
}
inline AutoKeepShapeTables::~AutoKeepShapeTables() {
cx_->zone()->setKeepShapeTables(prev_);
inline AutoKeepShapeCaches::~AutoKeepShapeCaches() {
cx_->zone()->setKeepShapeCaches(prev_);
}
inline StackBaseShape::StackBaseShape(const Class* clasp, uint32_t objectFlags)
@ -38,8 +38,8 @@ MOZ_ALWAYS_INLINE Shape* Shape::search(JSContext* cx, jsid id) {
return search(cx, this, id);
}
MOZ_ALWAYS_INLINE bool Shape::maybeCreateTableForLookup(JSContext* cx) {
if (hasTable()) {
MOZ_ALWAYS_INLINE bool Shape::maybeCreateCacheForLookup(JSContext* cx) {
if (hasTable() || hasIC()) {
return true;
}
@ -52,12 +52,12 @@ MOZ_ALWAYS_INLINE bool Shape::maybeCreateTableForLookup(JSContext* cx) {
return true;
}
return Shape::hashify(cx, this);
return Shape::cachify(cx, this);
}
template <MaybeAdding Adding>
/* static */ inline bool Shape::search(JSContext* cx, Shape* start, jsid id,
const AutoKeepShapeTables& keep,
const AutoKeepShapeCaches& keep,
Shape** pshape, ShapeTable** ptable,
ShapeTable::Entry** pentry) {
if (start->inDictionary()) {
@ -80,18 +80,31 @@ template <MaybeAdding Adding>
template <MaybeAdding Adding>
/* static */ MOZ_ALWAYS_INLINE Shape* Shape::search(JSContext* cx, Shape* start,
jsid id) {
if (start->maybeCreateTableForLookup(cx)) {
Shape* foundShape = nullptr;
if (start->maybeCreateCacheForLookup(cx)) {
JS::AutoCheckCannotGC nogc;
if (ShapeTable* table = start->maybeTable(nogc)) {
ShapeTable::Entry& entry = table->search<Adding>(id, nogc);
return entry.shape();
ShapeCachePtr cache = start->getCache(nogc);
if (cache.search<Adding>(id, start, &foundShape)) {
return foundShape;
}
} else {
// Just do a linear search.
cx->recoverFromOutOfMemory();
}
return start->searchLinear(id);
foundShape = start->searchLinear(id);
if (start->hasIC()) {
JS::AutoCheckCannotGC nogc;
if (!start->appendShapeToIC(id, foundShape, nogc)) {
// Failure indicates that the cache is full, which means we missed
// the cache ShapeIC::MAX_SIZE times. This indicates the cache is no
// longer useful, so convert it into a ShapeTable.
if (!Shape::hashify(cx, start)) {
cx->recoverFromOutOfMemory();
}
}
}
return foundShape;
}
inline Shape* Shape::new_(JSContext* cx, Handle<StackShape> other,
@ -343,7 +356,7 @@ MOZ_ALWAYS_INLINE ShapeTable::Entry& ShapeTable::searchUnchecked(jsid id) {
template <MaybeAdding Adding>
MOZ_ALWAYS_INLINE ShapeTable::Entry& ShapeTable::search(
jsid id, const AutoKeepShapeTables&) {
jsid id, const AutoKeepShapeCaches&) {
return searchUnchecked<Adding>(id);
}
@ -359,16 +372,17 @@ MOZ_ALWAYS_INLINE ShapeTable::Entry& ShapeTable::search(
*/
MOZ_ALWAYS_INLINE Shape* Shape::searchNoHashify(Shape* start, jsid id) {
/*
* If we have a table, search in the shape table, else do a linear
* search. We never hashify into a table in parallel.
* If we have a cache, search in the shape cache, else do a linear
* search. We never hashify or cachify into a table in parallel.
*/
Shape* foundShape;
JS::AutoCheckCannotGC nogc;
if (ShapeTable* table = start->maybeTable(nogc)) {
ShapeTable::Entry& entry = table->search<MaybeAdding::NotAdding>(id, nogc);
return entry.shape();
ShapeCachePtr cache = start->getCache(nogc);
if (!cache.search<MaybeAdding::NotAdding>(id, start, &foundShape)) {
foundShape = start->searchLinear(id);
}
return start->searchLinear(id);
return foundShape;
}
/* static */ MOZ_ALWAYS_INLINE Shape* NativeObject::addDataProperty(
@ -378,7 +392,7 @@ MOZ_ALWAYS_INLINE Shape* Shape::searchNoHashify(Shape* start, jsid id) {
MOZ_ASSERT(obj->uninlinedNonProxyIsExtensible());
MOZ_ASSERT(!obj->containsPure(id));
AutoKeepShapeTables keep(cx);
AutoKeepShapeCaches keep(cx);
ShapeTable* table = nullptr;
ShapeTable::Entry* entry = nullptr;
if (obj->inDictionaryMode()) {
@ -399,7 +413,7 @@ MOZ_ALWAYS_INLINE Shape* Shape::searchNoHashify(Shape* start, jsid id) {
MOZ_ASSERT(obj->uninlinedNonProxyIsExtensible());
MOZ_ASSERT(!obj->containsPure(id));
AutoKeepShapeTables keep(cx);
AutoKeepShapeCaches keep(cx);
ShapeTable* table = nullptr;
ShapeTable::Entry* entry = nullptr;
if (obj->inDictionaryMode()) {

Просмотреть файл

@ -38,6 +38,12 @@ using JS::AutoCheckCannotGC;
Shape* const ShapeTable::Entry::SHAPE_REMOVED =
(Shape*)ShapeTable::Entry::SHAPE_COLLISION;
bool ShapeIC::init(JSContext* cx) {
size_ = MAX_SIZE;
entries_.reset(cx->pod_calloc<Entry>(size_));
return (!entries_) ? false : true;
}
bool ShapeTable::init(JSContext* cx, Shape* lastProp) {
uint32_t sizeLog2 = CeilingLog2Size(entryCount_);
uint32_t size = JS_BIT(sizeLog2);
@ -49,7 +55,7 @@ bool ShapeTable::init(JSContext* cx, Shape* lastProp) {
}
size = JS_BIT(sizeLog2);
entries_ = cx->pod_calloc<Entry>(size);
entries_.reset(cx->pod_calloc<Entry>(size));
if (!entries_) {
return false;
}
@ -155,17 +161,37 @@ void Shape::handoffTableTo(Shape* shape) {
return false;
}
ShapeTable* table = cx->new_<ShapeTable>(shape->entryCount());
UniquePtr<ShapeTable> table =
cx->make_unique<ShapeTable>(shape->entryCount());
if (!table) {
return false;
}
if (!table->init(cx, shape)) {
js_free(table);
return false;
}
shape->base()->setTable(table);
shape->base()->setTable(table.release());
return true;
}
/* static */ bool Shape::cachify(JSContext* cx, Shape* shape) {
MOZ_ASSERT(!shape->hasTable() && !shape->hasIC());
if (!shape->ensureOwnBaseShape(cx)) {
return false;
}
UniquePtr<ShapeIC> ic = cx->make_unique<ShapeIC>();
if (!ic) {
return false;
}
if (!ic->init(cx)) {
return false;
}
shape->base()->setIC(ic.release());
return true;
}
@ -189,8 +215,8 @@ bool ShapeTable::change(JSContext* cx, int log2Delta) {
MOZ_ASSERT(newLog2 <= HASH_BITS);
hashShift_ = HASH_BITS - newLog2;
removedCount_ = 0;
Entry* oldTable = entries_;
entries_ = newTable;
Entry* oldTable = entries_.release();
entries_.reset(newTable);
/* Copy only live entries, leaving removed and free ones behind. */
AutoCheckCannotGC nogc;
@ -228,6 +254,23 @@ bool ShapeTable::grow(JSContext* cx) {
return true;
}
void ShapeCachePtr::trace(JSTracer* trc) {
if (isIC()) {
getICPointer()->trace(trc);
} else if (isTable()) {
getTablePointer()->trace(trc);
}
}
void ShapeIC::trace(JSTracer* trc) {
for (size_t i = 0; i < entryCount(); i++) {
Entry& entry = entries_[i];
if (entry.shape_) {
TraceManuallyBarrieredEdge(trc, &entry.shape_, "ShapeIC shape");
}
}
}
void ShapeTable::trace(JSTracer* trc) {
for (size_t i = 0; i < capacity(); i++) {
Entry& entry = getEntry(i);
@ -243,6 +286,24 @@ void ShapeTable::trace(JSTracer* trc) {
#ifdef JSGC_HASH_TABLE_CHECKS
void ShapeCachePtr::checkAfterMovingGC() {
if (isIC()) {
getICPointer()->checkAfterMovingGC();
} else if (isTable()) {
getTablePointer()->checkAfterMovingGC();
}
}
void ShapeIC::checkAfterMovingGC() {
for (size_t i = 0; i < entryCount(); i++) {
Entry& entry = entries_[i];
Shape* shape = entry.shape_;
if (shape) {
CheckGCThingAfterMovingGC(shape);
}
}
}
void ShapeTable::checkAfterMovingGC() {
for (size_t i = 0; i < capacity(); i++) {
Entry& entry = getEntry(i);
@ -500,7 +561,7 @@ class MOZ_RAII AutoCheckShapeConsistency {
/* static */ MOZ_ALWAYS_INLINE bool
NativeObject::maybeConvertToOrGrowDictionaryForAdd(
JSContext* cx, HandleNativeObject obj, HandleId id, ShapeTable** table,
ShapeTable::Entry** entry, const AutoKeepShapeTables& keep) {
ShapeTable::Entry** entry, const AutoKeepShapeCaches& keep) {
MOZ_ASSERT(!!*table == !!*entry);
// The code below deals with either converting obj to dictionary mode or
@ -529,7 +590,7 @@ NativeObject::maybeConvertToOrGrowDictionaryForAdd(
MOZ_ALWAYS_INLINE void Shape::updateDictionaryTable(
ShapeTable* table, ShapeTable::Entry* entry,
const AutoKeepShapeTables& keep) {
const AutoKeepShapeCaches& keep) {
MOZ_ASSERT(table);
MOZ_ASSERT(entry);
MOZ_ASSERT(inDictionary());
@ -559,7 +620,7 @@ static void AssertValidPropertyOp(NativeObject* obj, GetterOp getter,
/* static */ Shape* NativeObject::addAccessorPropertyInternal(
JSContext* cx, HandleNativeObject obj, HandleId id, GetterOp getter,
SetterOp setter, unsigned attrs, ShapeTable* table,
ShapeTable::Entry* entry, const AutoKeepShapeTables& keep) {
ShapeTable::Entry* entry, const AutoKeepShapeCaches& keep) {
AutoCheckShapeConsistency check(obj);
AutoRooterGetterSetter gsRoot(cx, attrs, &getter, &setter);
@ -600,7 +661,7 @@ static void AssertValidPropertyOp(NativeObject* obj, GetterOp getter,
/* static */ Shape* NativeObject::addDataPropertyInternal(
JSContext* cx, HandleNativeObject obj, HandleId id, uint32_t slot,
unsigned attrs, ShapeTable* table, ShapeTable::Entry* entry,
const AutoKeepShapeTables& keep) {
const AutoKeepShapeCaches& keep) {
AutoCheckShapeConsistency check(obj);
// The slot, if any, must be a reserved slot.
@ -708,7 +769,7 @@ static MOZ_ALWAYS_INLINE Shape* PropertyTreeReadBarrier(Shape* parent,
return kid;
} while (0);
AutoKeepShapeTables keep(cx);
AutoKeepShapeCaches keep(cx);
ShapeTable* table = nullptr;
ShapeTable::Entry* entry = nullptr;
@ -900,7 +961,7 @@ static void AssertValidArrayIndex(NativeObject* obj, jsid id) {
AssertValidArrayIndex(obj, id);
// Search for id in order to claim its entry if table has been allocated.
AutoKeepShapeTables keep(cx);
AutoKeepShapeCaches keep(cx);
RootedShape shape(cx);
{
ShapeTable* table;
@ -1016,7 +1077,7 @@ static void AssertValidArrayIndex(NativeObject* obj, jsid id) {
AutoRooterGetterSetter gsRoot(cx, attrs, &getter, &setter);
// Search for id in order to claim its entry if table has been allocated.
AutoKeepShapeTables keep(cx);
AutoKeepShapeCaches keep(cx);
RootedShape shape(cx);
{
ShapeTable* table;
@ -1154,7 +1215,7 @@ static void AssertValidArrayIndex(NativeObject* obj, jsid id) {
jsid id_) {
RootedId id(cx, id_);
AutoKeepShapeTables keep(cx);
AutoKeepShapeCaches keep(cx);
ShapeTable* table;
ShapeTable::Entry* entry;
RootedShape shape(cx);
@ -1463,11 +1524,7 @@ static void AssertValidArrayIndex(NativeObject* obj, jsid id) {
}
inline BaseShape::BaseShape(const StackBaseShape& base)
: clasp_(base.clasp),
flags(base.flags),
slotSpan_(0),
unowned_(nullptr),
table_(nullptr) {}
: clasp_(base.clasp), flags(base.flags), slotSpan_(0), unowned_(nullptr) {}
/* static */ void BaseShape::copyFromUnowned(BaseShape& dest,
UnownedBaseShape& src) {
@ -1525,11 +1582,11 @@ void BaseShape::assertConsistency() {
}
void BaseShape::traceChildren(JSTracer* trc) {
traceChildrenSkipShapeTable(trc);
traceShapeTable(trc);
traceChildrenSkipShapeCache(trc);
traceShapeCache(trc);
}
void BaseShape::traceChildrenSkipShapeTable(JSTracer* trc) {
void BaseShape::traceChildrenSkipShapeCache(JSTracer* trc) {
if (isOwned()) {
TraceEdge(trc, &unowned_, "base");
}
@ -1537,21 +1594,18 @@ void BaseShape::traceChildrenSkipShapeTable(JSTracer* trc) {
assertConsistency();
}
void BaseShape::traceShapeTable(JSTracer* trc) {
void BaseShape::traceShapeCache(JSTracer* trc) {
AutoCheckCannotGC nogc;
if (ShapeTable* table = maybeTable(nogc)) {
table->trace(trc);
}
cache_.trace(trc);
}
#ifdef DEBUG
bool BaseShape::canSkipMarkingShapeTable(Shape* lastShape) {
bool BaseShape::canSkipMarkingShapeCache(Shape* lastShape) {
// Check that every shape in the shape table will be marked by marking
// |lastShape|.
AutoCheckCannotGC nogc;
ShapeTable* table = maybeTable(nogc);
if (!table) {
ShapeCachePtr cache = getCache(nogc);
if (!cache.isTable()) {
return true;
}
@ -1559,13 +1613,14 @@ bool BaseShape::canSkipMarkingShapeTable(Shape* lastShape) {
for (Shape::Range<NoGC> r(lastShape); !r.empty(); r.popFront()) {
Shape* shape = &r.front();
ShapeTable::Entry& entry =
table->search<MaybeAdding::NotAdding>(shape->propid(), nogc);
cache.getTablePointer()->search<MaybeAdding::NotAdding>(shape->propid(),
nogc);
if (entry.isLive()) {
count++;
}
}
return count == table->entryCount();
return count == cache.getTablePointer()->entryCount();
}
#endif
@ -1584,9 +1639,8 @@ void Zone::checkBaseShapeTableAfterMovingGC() {
#endif // JSGC_HASH_TABLE_CHECKS
void BaseShape::finalize(FreeOp* fop) {
if (table_) {
fop->delete_(table_);
table_ = nullptr;
if (cache_.isInitialized()) {
cache_.destroy(fop);
}
}

Просмотреть файл

@ -21,6 +21,7 @@
#include "NamespaceImports.h"
#include "gc/Barrier.h"
#include "gc/FreeOp.h"
#include "gc/Heap.h"
#include "gc/Rooting.h"
#include "js/HashTable.h"
@ -92,9 +93,9 @@
* Shapes can grow and shrink.
*
* To save memory, shape tables can be discarded on GC and recreated when
* needed. AutoKeepShapeTables can be used to avoid discarding shape tables
* needed. AutoKeepShapeCaches can be used to avoid discarding shape tables
* for a particular zone. Methods operating on ShapeTables take either an
* AutoCheckCannotGC or AutoKeepShapeTables argument, to help ensure tables
* AutoCheckCannotGC or AutoKeepShapeCaches argument, to help ensure tables
* are not purged while we're using them.
*
* There used to be a long, math-heavy comment here explaining why property
@ -211,10 +212,75 @@ static const uint32_t SHAPE_MAXIMUM_SLOT = JS_BIT(24) - 2;
enum class MaybeAdding { Adding = true, NotAdding = false };
class AutoKeepShapeTables;
class AutoKeepShapeCaches;
/*
* Shapes use multiplicative hashing, but specialized to
* ShapeIC uses a small array that is linearly searched.
*/
class ShapeIC {
public:
friend class NativeObject;
friend class BaseShape;
friend class Shape;
ShapeIC() : size_(0), nextFreeIndex_(0), entries_(nullptr) {}
~ShapeIC() = default;
bool isFull() const {
MOZ_ASSERT(nextFreeIndex_ <= size_);
return size_ == nextFreeIndex_;
}
size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
return mallocSizeOf(this) + mallocSizeOf(entries_.get());
}
uint32_t entryCount() { return nextFreeIndex_; }
bool init(JSContext* cx);
void trace(JSTracer* trc);
#ifdef JSGC_HASH_TABLE_CHECKS
void checkAfterMovingGC();
#endif
MOZ_ALWAYS_INLINE bool search(jsid id, Shape** foundShape);
MOZ_ALWAYS_INLINE bool appendEntry(jsid id, Shape* shape) {
MOZ_ASSERT(nextFreeIndex_ <= size_);
if (nextFreeIndex_ == size_) {
return false;
}
entries_[nextFreeIndex_].id_ = id;
entries_[nextFreeIndex_].shape_ = shape;
nextFreeIndex_++;
return true;
}
private:
static const uint32_t MAX_SIZE = 7;
class Entry {
public:
jsid id_;
Shape* shape_;
Entry() = delete;
Entry(const Entry&) = delete;
Entry& operator=(const Entry&) = delete;
};
uint8_t size_;
uint8_t nextFreeIndex_;
/* table of ptrs to {jsid,Shape*} pairs */
UniquePtr<Entry[], JS::FreePolicy> entries_;
};
/*
* ShapeTable uses multiplicative hashing, but specialized to
* minimize footprint.
*/
class ShapeTable {
@ -222,7 +288,7 @@ class ShapeTable {
friend class NativeObject;
friend class BaseShape;
friend class Shape;
static const uint32_t MIN_ENTRIES = 11;
friend class ShapeCachePtr;
class Entry {
// js::Shape pointer tag bit indicating a collision.
@ -282,7 +348,8 @@ class ShapeTable {
freelist in owning dictionary-mode
object */
Entry* entries_; /* table of ptrs to shared tree nodes */
UniquePtr<Entry[], JS::FreePolicy>
entries_; /* table of ptrs to shared tree nodes */
template <MaybeAdding Adding>
MOZ_ALWAYS_INLINE Entry& searchUnchecked(jsid id);
@ -297,7 +364,7 @@ class ShapeTable {
/* NB: entries is set by init, which must be called. */
}
~ShapeTable() { js_free(entries_); }
~ShapeTable() = default;
uint32_t entryCount() const { return entryCount_; }
@ -309,7 +376,7 @@ class ShapeTable {
* heap-allocated) and its |entries| array.
*/
size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
return mallocSizeOf(this) + mallocSizeOf(entries_);
return mallocSizeOf(this) + mallocSizeOf(entries_.get());
}
// init() is fallible and reports OOM to the context.
@ -319,7 +386,7 @@ class ShapeTable {
bool change(JSContext* cx, int log2Delta);
template <MaybeAdding Adding>
MOZ_ALWAYS_INLINE Entry& search(jsid id, const AutoKeepShapeTables&);
MOZ_ALWAYS_INLINE Entry& search(jsid id, const AutoKeepShapeCaches&);
template <MaybeAdding Adding>
MOZ_ALWAYS_INLINE Entry& search(jsid id, const JS::AutoCheckCannotGC&);
@ -363,17 +430,151 @@ class ShapeTable {
bool grow(JSContext* cx);
};
/*
* Wrapper class to either ShapeTable or ShapeIC optimization.
*
* Shapes are initially cached in a linear cache from the ShapeIC class that is
* lazily initialized after LINEAR_SEARCHES_MAX searches have been reached, and
* the Shape has at least MIN_ENTRIES parents in the lineage.
*
* We use the population of the cache as an indicator of whether the ShapeIC is
* working or not. Once it is full, it is destroyed and a ShapeTable is
* created instead.
*
* For dictionaries, the linear cache is skipped entirely and hashify is used
* to generate the ShapeTable immediately.
*/
class ShapeCachePtr {
// To reduce impact on memory usage, p is the only data member for this class.
uintptr_t p;
enum class CacheType {
IC = 0x1,
Table = 0x2,
};
static const uint32_t MASK_BITS = 0x3;
static const uintptr_t CACHETYPE_MASK = 0x3;
void* getPointer() const {
uintptr_t ptrVal = p & ~CACHETYPE_MASK;
return reinterpret_cast<void*>(ptrVal);
}
CacheType getType() const {
return static_cast<CacheType>(p & CACHETYPE_MASK);
}
public:
static const uint32_t MIN_ENTRIES = 3;
ShapeCachePtr() : p(0) {}
template <MaybeAdding Adding>
MOZ_ALWAYS_INLINE bool search(jsid id, Shape* start, Shape** foundShape);
bool isIC() const { return (getType() == CacheType::IC); }
bool isTable() const { return (getType() == CacheType::Table); }
bool isInitialized() const { return isTable() || isIC(); }
ShapeTable* getTablePointer() const {
MOZ_ASSERT(isTable());
return reinterpret_cast<ShapeTable*>(getPointer());
}
ShapeIC* getICPointer() const {
MOZ_ASSERT(isIC());
return reinterpret_cast<ShapeIC*>(getPointer());
}
// Use ShapeTable implementation.
// This will clobber an existing IC implementation.
void initializeTable(ShapeTable* table) {
MOZ_ASSERT(!isTable());
maybePurgeCache();
uintptr_t tableptr = uintptr_t(table);
// Double check that pointer is 4 byte aligned.
MOZ_ASSERT((tableptr & CACHETYPE_MASK) == 0);
tableptr |= static_cast<uintptr_t>(CacheType::Table);
p = tableptr;
}
// Use ShapeIC implementation.
// This cannot clobber an existing Table implementation.
void initializeIC(ShapeIC* ic) {
MOZ_ASSERT(!isTable() && !isIC());
uintptr_t icptr = uintptr_t(ic);
// Double check that pointer is 4 byte aligned.
MOZ_ASSERT((icptr & CACHETYPE_MASK) == 0);
icptr |= static_cast<uintptr_t>(CacheType::IC);
p = icptr;
}
void destroy(FreeOp* fop) {
if (isTable()) {
fop->delete_<ShapeTable>(getTablePointer());
} else if (isIC()) {
fop->delete_<ShapeIC>(getICPointer());
}
p = 0;
}
void maybePurgeCache() {
if (isTable()) {
ShapeTable* table = getTablePointer();
if (table->freeList() == SHAPE_INVALID_SLOT) {
js_delete<ShapeTable>(getTablePointer());
p = 0;
}
} else if (isIC()) {
js_delete<ShapeIC>(getICPointer());
p = 0;
}
}
void trace(JSTracer* trc);
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
size_t size = 0;
if (isIC()) {
size = getICPointer()->sizeOfIncludingThis(mallocSizeOf);
} else if (isTable()) {
size = getTablePointer()->sizeOfIncludingThis(mallocSizeOf);
}
return size;
}
uint32_t entryCount() {
uint32_t count = 0;
if (isIC()) {
count = getICPointer()->entryCount();
} else if (isTable()) {
count = getTablePointer()->entryCount();
}
return count;
}
#ifdef JSGC_HASH_TABLE_CHECKS
void checkAfterMovingGC();
#endif
};
// Ensures no shape tables are purged in the current zone.
class MOZ_RAII AutoKeepShapeTables {
class MOZ_RAII AutoKeepShapeCaches {
JSContext* cx_;
bool prev_;
AutoKeepShapeTables(const AutoKeepShapeTables&) = delete;
void operator=(const AutoKeepShapeTables&) = delete;
public:
explicit inline AutoKeepShapeTables(JSContext* cx);
inline ~AutoKeepShapeTables();
void operator=(const AutoKeepShapeCaches&) = delete;
AutoKeepShapeCaches(const AutoKeepShapeCaches&) = delete;
explicit inline AutoKeepShapeCaches(JSContext* cx);
inline ~AutoKeepShapeCaches();
};
/*
@ -481,7 +682,7 @@ class BaseShape : public gc::TenuredCell {
GCPtrUnownedBaseShape unowned_;
/* For owned BaseShapes, the shape's shape table. */
ShapeTable* table_;
ShapeCachePtr cache_;
BaseShape(const BaseShape& base) = delete;
BaseShape& operator=(const BaseShape& other) = delete;
@ -509,29 +710,57 @@ class BaseShape : public gc::TenuredCell {
uint32_t getObjectFlags() const { return flags & OBJECT_FLAG_MASK; }
bool hasTable() const {
MOZ_ASSERT_IF(table_, isOwned());
return table_ != nullptr;
}
void setTable(ShapeTable* table) {
MOZ_ASSERT(isOwned());
table_ = table;
MOZ_ASSERT_IF(cache_.isInitialized(), isOwned());
return cache_.isTable();
}
ShapeTable* maybeTable(const AutoKeepShapeTables&) const {
MOZ_ASSERT_IF(table_, isOwned());
return table_;
bool hasIC() const {
MOZ_ASSERT_IF(cache_.isInitialized(), isOwned());
return cache_.isIC();
}
void setTable(ShapeTable* table) {
MOZ_ASSERT(isOwned());
cache_.initializeTable(table);
}
void setIC(ShapeIC* ic) {
MOZ_ASSERT(isOwned());
cache_.initializeIC(ic);
}
ShapeCachePtr getCache(const AutoKeepShapeCaches&) const {
MOZ_ASSERT_IF(cache_.isInitialized(), isOwned());
return cache_;
}
ShapeCachePtr getCache(const JS::AutoCheckCannotGC&) const {
MOZ_ASSERT_IF(cache_.isInitialized(), isOwned());
return cache_;
}
ShapeTable* maybeTable(const AutoKeepShapeCaches&) const {
MOZ_ASSERT_IF(cache_.isInitialized(), isOwned());
return (cache_.isTable()) ? cache_.getTablePointer() : nullptr;
}
ShapeTable* maybeTable(const JS::AutoCheckCannotGC&) const {
MOZ_ASSERT_IF(table_, isOwned());
return table_;
MOZ_ASSERT_IF(cache_.isInitialized(), isOwned());
return (cache_.isTable()) ? cache_.getTablePointer() : nullptr;
}
void maybePurgeTable() {
if (table_ && table_->freeList() == SHAPE_INVALID_SLOT) {
js_delete(table_);
table_ = nullptr;
}
ShapeIC* maybeIC(const AutoKeepShapeCaches&) const {
MOZ_ASSERT_IF(cache_.isInitialized(), isOwned());
return (cache_.isIC()) ? cache_.getICPointer() : nullptr;
}
ShapeIC* maybeIC(const JS::AutoCheckCannotGC&) const {
MOZ_ASSERT_IF(cache_.isInitialized(), isOwned());
return (cache_.isIC()) ? cache_.getICPointer() : nullptr;
}
void maybePurgeCache() { cache_.maybePurgeCache(); }
uint32_t slotSpan() const {
MOZ_ASSERT(isOwned());
return slotSpan_;
@ -565,10 +794,10 @@ class BaseShape : public gc::TenuredCell {
static const JS::TraceKind TraceKind = JS::TraceKind::BaseShape;
void traceChildren(JSTracer* trc);
void traceChildrenSkipShapeTable(JSTracer* trc);
void traceChildrenSkipShapeCache(JSTracer* trc);
#ifdef DEBUG
bool canSkipMarkingShapeTable(Shape* lastShape);
bool canSkipMarkingShapeCache(Shape* lastShape);
#endif
private:
@ -580,7 +809,7 @@ class BaseShape : public gc::TenuredCell {
"a multiple of gc::CellAlignBytes");
}
void traceShapeTable(JSTracer* trc);
void traceShapeCache(JSTracer* trc);
};
class UnownedBaseShape : public BaseShape {};
@ -713,10 +942,11 @@ class Shape : public gc::TenuredCell {
enum MutableFlags : uint8_t {
// numLinearSearches starts at zero and is incremented initially on
// search() calls. Once numLinearSearches reaches LINEAR_SEARCHES_MAX,
// the table is created on the next search() call. The table can also
// be created when hashifying for dictionary mode.
LINEAR_SEARCHES_MAX = 0x7,
LINEAR_SEARCHES_MASK = LINEAR_SEARCHES_MAX,
// the inline cache is created on the next search() call. Once the
// cache is full, it self transforms into a hash table. The hash table
// can also be created directly when hashifying for dictionary mode.
LINEAR_SEARCHES_MAX = 0x5,
LINEAR_SEARCHES_MASK = 0x7,
// Slotful property was stored to more than once. This is used as a
// hint for type inference.
@ -747,7 +977,7 @@ class Shape : public gc::TenuredCell {
template <MaybeAdding Adding = MaybeAdding::NotAdding>
static inline MOZ_MUST_USE bool search(JSContext* cx, Shape* start, jsid id,
const AutoKeepShapeTables&,
const AutoKeepShapeCaches&,
Shape** pshape, ShapeTable** ptable,
ShapeTable::Entry** pentry);
@ -770,6 +1000,7 @@ class Shape : public gc::TenuredCell {
* lineage in preparation for converting an object to dictionary mode.
*/
static bool hashify(JSContext* cx, Shape* shape);
static bool cachify(JSContext* cx, Shape* shape);
void handoffTableTo(Shape* newShape);
void setParent(Shape* p) {
@ -789,21 +1020,41 @@ class Shape : public gc::TenuredCell {
bool makeOwnBaseShape(JSContext* cx);
MOZ_ALWAYS_INLINE MOZ_MUST_USE bool maybeCreateTableForLookup(JSContext* cx);
MOZ_ALWAYS_INLINE MOZ_MUST_USE bool maybeCreateCacheForLookup(JSContext* cx);
MOZ_ALWAYS_INLINE void updateDictionaryTable(ShapeTable* table,
ShapeTable::Entry* entry,
const AutoKeepShapeTables& keep);
const AutoKeepShapeCaches& keep);
public:
bool hasTable() const { return base()->hasTable(); }
bool hasIC() const { return base()->hasIC(); }
ShapeTable* maybeTable(const AutoKeepShapeTables& keep) const {
ShapeIC* maybeIC(const AutoKeepShapeCaches& keep) const {
return base()->maybeIC(keep);
}
ShapeIC* maybeIC(const JS::AutoCheckCannotGC& check) const {
return base()->maybeIC(check);
}
ShapeTable* maybeTable(const AutoKeepShapeCaches& keep) const {
return base()->maybeTable(keep);
}
ShapeTable* maybeTable(const JS::AutoCheckCannotGC& check) const {
return base()->maybeTable(check);
}
ShapeCachePtr getCache(const AutoKeepShapeCaches& keep) const {
return base()->getCache(keep);
}
ShapeCachePtr getCache(const JS::AutoCheckCannotGC& check) const {
return base()->getCache(check);
}
bool appendShapeToIC(jsid id, Shape* shape,
const JS::AutoCheckCannotGC& check) {
MOZ_ASSERT(hasIC());
ShapeCachePtr cache = getCache(check);
return cache.getICPointer()->appendEntry(id, shape);
}
template <typename T>
MOZ_MUST_USE ShapeTable* ensureTableForDictionary(JSContext* cx,
@ -823,14 +1074,12 @@ class Shape : public gc::TenuredCell {
void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf,
JS::ShapeInfo* info) const {
JS::AutoCheckCannotGC nogc;
if (ShapeTable* table = maybeTable(nogc)) {
if (inDictionary()) {
info->shapesMallocHeapDictTables +=
table->sizeOfIncludingThis(mallocSizeOf);
} else {
info->shapesMallocHeapTreeTables +=
table->sizeOfIncludingThis(mallocSizeOf);
}
if (inDictionary()) {
info->shapesMallocHeapDictTables +=
getCache(nogc).sizeOfExcludingThis(mallocSizeOf);
} else {
info->shapesMallocHeapTreeTables +=
getCache(nogc).sizeOfExcludingThis(mallocSizeOf);
}
if (!inDictionary() && kids.isHash()) {
@ -1083,7 +1332,7 @@ class Shape : public gc::TenuredCell {
uint32_t count = 0;
for (Shape::Range<NoGC> r(this); !r.empty(); r.popFront()) {
++count;
if (count >= ShapeTable::MIN_ENTRIES) {
if (count >= ShapeCachePtr::MIN_ENTRIES) {
return true;
}
}
@ -1517,6 +1766,37 @@ inline bool Shape::matches(const StackShape& other) const {
other.rawGetter, other.rawSetter);
}
template <MaybeAdding Adding>
MOZ_ALWAYS_INLINE bool ShapeCachePtr::search(jsid id, Shape* start,
Shape** foundShape) {
bool found = false;
if (isIC()) {
ShapeIC* ic = getICPointer();
found = ic->search(id, foundShape);
} else if (isTable()) {
ShapeTable* table = getTablePointer();
ShapeTable::Entry& entry = table->searchUnchecked<Adding>(id);
*foundShape = entry.shape();
found = true;
}
return found;
}
MOZ_ALWAYS_INLINE bool ShapeIC::search(jsid id, Shape** foundShape) {
// This loop needs to be as fast as possible, so use a direct pointer
// to the array instead of going through the UniquePtr methods.
Entry* entriesArray = entries_.get();
for (uint8_t i = 0; i < nextFreeIndex_; i++) {
Entry& entry = entriesArray[i];
if (entry.id_ == id) {
*foundShape = entry.shape_;
return true;
}
}
return false;
}
Shape* ReshapeForAllocKind(JSContext* cx, Shape* shape, TaggedProto proto,
gc::AllocKind allocKind);

Просмотреть файл

@ -4758,7 +4758,7 @@ void TypeScript::destroy(Zone* zone) {
void Zone::addSizeOfIncludingThis(
mozilla::MallocSizeOf mallocSizeOf, size_t* typePool, size_t* regexpZone,
size_t* jitZone, size_t* baselineStubsOptimized, size_t* cachedCFG,
size_t* uniqueIdMap, size_t* shapeTables, size_t* atomsMarkBitmaps,
size_t* uniqueIdMap, size_t* shapeCaches, size_t* atomsMarkBitmaps,
size_t* compartmentObjects, size_t* crossCompartmentWrappersTables,
size_t* compartmentsPrivateData) {
*typePool += types.typeLifoAlloc().sizeOfExcludingThis(mallocSizeOf);
@ -4768,7 +4768,7 @@ void Zone::addSizeOfIncludingThis(
baselineStubsOptimized, cachedCFG);
}
*uniqueIdMap += uniqueIds().shallowSizeOfExcludingThis(mallocSizeOf);
*shapeTables += baseShapes().sizeOfExcludingThis(mallocSizeOf) +
*shapeCaches += baseShapes().sizeOfExcludingThis(mallocSizeOf) +
initialShapes().sizeOfExcludingThis(mallocSizeOf);
*atomsMarkBitmaps += markedAtoms().sizeOfExcludingThis(mallocSizeOf);