Bug 1633176 - re-land incremental weakmap marking

Differential Revision: https://phabricator.services.mozilla.com/D74079
This commit is contained in:
Steve Fink 2020-05-06 16:38:33 +00:00
Родитель d6e59a92f6
Коммит 3b17c8c75a
20 изменённых файлов: 472 добавлений и 87 удалений

Просмотреть файл

@ -2597,6 +2597,11 @@ void nsJSContext::EnsureStatics() {
"javascript.options.mem.gc_compacting",
(void*)JSGC_COMPACTING_ENABLED);
Preferences::RegisterCallbackAndCall(
SetMemoryPrefChangedCallbackBool,
"javascript.options.mem.incremental_weakmap",
(void*)JSGC_INCREMENTAL_WEAKMAP_ENABLED);
Preferences::RegisterCallbackAndCall(
SetMemoryPrefChangedCallbackInt,
"javascript.options.mem.gc_high_frequency_time_limit_ms",

Просмотреть файл

@ -340,6 +340,14 @@ typedef enum JSGCParamKey {
* Default: MallocGrowthFactor
*/
JSGC_MALLOC_GROWTH_FACTOR = 36,
/**
* Whether incremental weakmap marking is enabled.
*
* Pref: javascript.options.mem.incremental_weakmap
* Default: IncrementalWeakMarkEnabled
*/
JSGC_INCREMENTAL_WEAKMAP_ENABLED = 37,
} JSGCParamKey;
/*

Просмотреть файл

@ -92,7 +92,10 @@ bool WeakMapObject::get(JSContext* cx, unsigned argc, Value* vp) {
if (ObjectValueWeakMap* map =
args.thisv().toObject().as<WeakMapObject>().getMap()) {
JSObject* key = &args[0].toObject();
if (ObjectValueWeakMap::Ptr ptr = map->lookup(key)) {
// The lookup here is only used for the removal, so we can skip the read
// barrier. This is not very important for performance, but makes it easier
// to test nonbarriered removal from internal weakmaps (eg Debugger maps.)
if (ObjectValueWeakMap::Ptr ptr = map->lookupUnbarriered(key)) {
map->remove(ptr);
args.rval().setBoolean(true);
return true;

Просмотреть файл

@ -34,6 +34,7 @@
#include "gc/FreeOp-inl.h" // for JSFreeOp::free_
#include "gc/GC-inl.h" // for ZoneCellIter
#include "gc/Marking-inl.h" // for CheckGCThingAfterMovingGC
#include "gc/WeakMap-inl.h" // for WeakMap::remove
#include "vm/JSContext-inl.h" // for JSContext::check
#include "vm/JSScript-inl.h" // for JSScript::hasBaselineScript
#include "vm/Realm-inl.h" // for AutoRealm::AutoRealm

Просмотреть файл

@ -73,6 +73,7 @@
#include "wasm/WasmTypes.h" // for DebugFrame
#include "debugger/Debugger-inl.h" // for Debugger::fromJSObject
#include "gc/WeakMap-inl.h" // for WeakMap::remove
#include "vm/Compartment-inl.h" // for Compartment::wrap
#include "vm/JSContext-inl.h" // for JSContext::check
#include "vm/JSObject-inl.h" // for NewObjectWithGivenProto

Просмотреть файл

@ -1370,6 +1370,9 @@ bool GCRuntime::setParameter(JSGCParamKey key, uint32_t value,
case JSGC_COMPACTING_ENABLED:
compactingEnabled = value != 0;
break;
case JSGC_INCREMENTAL_WEAKMAP_ENABLED:
marker.incrementalWeakMapMarkingEnabled = value != 0;
break;
default:
if (!tunables.setParameter(key, value, lock)) {
return false;
@ -1403,6 +1406,10 @@ void GCRuntime::resetParameter(JSGCParamKey key, AutoLockGC& lock) {
case JSGC_COMPACTING_ENABLED:
compactingEnabled = TuningDefaults::CompactingEnabled;
break;
case JSGC_INCREMENTAL_WEAKMAP_ENABLED:
marker.incrementalWeakMapMarkingEnabled =
TuningDefaults::IncrementalWeakMapMarkingEnabled;
break;
default:
tunables.resetParameter(key, lock);
for (ZonesIter zone(this, WithAtoms); !zone.done(); zone.next()) {
@ -1474,6 +1481,8 @@ uint32_t GCRuntime::getParameter(JSGCParamKey key, const AutoLockGC& lock) {
return tunables.maxEmptyChunkCount();
case JSGC_COMPACTING_ENABLED:
return compactingEnabled;
case JSGC_INCREMENTAL_WEAKMAP_ENABLED:
return marker.incrementalWeakMapMarkingEnabled;
case JSGC_NURSERY_FREE_THRESHOLD_FOR_IDLE_COLLECTION:
return tunables.nurseryFreeThresholdForIdleCollection();
case JSGC_NURSERY_FREE_THRESHOLD_FOR_IDLE_COLLECTION_PERCENT:
@ -4280,47 +4289,63 @@ void GCRuntime::updateMemoryCountersOnGCStart() {
}
template <class ZoneIterT>
void GCRuntime::markWeakReferences(gcstats::PhaseKind phase) {
MOZ_ASSERT(marker.isDrained());
IncrementalProgress GCRuntime::markWeakReferences(
gcstats::PhaseKind phase, SliceBudget& incrementalBudget) {
gcstats::AutoPhase ap1(stats(), phase);
if (marker.enterWeakMarkingMode()) {
auto unlimited = SliceBudget::unlimited();
SliceBudget& budget =
marker.incrementalWeakMapMarkingEnabled ? incrementalBudget : unlimited;
// We may have already entered weak marking mode.
if (!marker.isWeakMarking() && marker.enterWeakMarkingMode()) {
for (ZoneIterT zone(this); !zone.done(); zone.next()) {
zone->enterWeakMarkingMode(&marker);
if (zone->enterWeakMarkingMode(&marker, budget) == NotFinished) {
MOZ_ASSERT(marker.incrementalWeakMapMarkingEnabled);
marker.leaveWeakMarkingMode();
return NotFinished;
}
}
#ifdef DEBUG
for (ZoneIterT zone(this); !zone.done(); zone.next()) {
zone->checkWeakMarkingMode();
}
#endif
}
// TODO bug 1167452: Make weak marking incremental
drainMarkStack();
#ifdef DEBUG
for (ZoneIterT zone(this); !zone.done(); zone.next()) {
zone->checkWeakMarkingMode();
}
#endif
// This is not strictly necessary; if we yield here, we could run the mutator
// in weak marking mode and unmark gray would end up doing the key lookups.
// But it seems better to not slow down barriers. Re-entering weak marking
// mode will be fast since already-processed markables have been removed.
auto leaveOnExit =
mozilla::MakeScopeExit([&] { marker.leaveWeakMarkingMode(); });
bool markedAny = true;
while (markedAny) {
if (!marker.markUntilBudgetExhausted(budget)) {
MOZ_ASSERT(marker.incrementalWeakMapMarkingEnabled);
return NotFinished;
}
markedAny = false;
for (;;) {
bool markedAny = false;
if (!marker.isWeakMarking()) {
for (ZoneIterT zone(this); !zone.done(); zone.next()) {
markedAny |= WeakMapBase::markZoneIteratively(zone, &marker);
}
}
markedAny |= jit::JitRuntime::MarkJitcodeGlobalTableIteratively(&marker);
if (!markedAny) {
break;
}
drainMarkStack();
}
MOZ_ASSERT(marker.isDrained());
marker.leaveWeakMarkingMode();
return Finished;
}
void GCRuntime::markWeakReferencesInCurrentGroup(gcstats::PhaseKind phase) {
markWeakReferences<SweepGroupZonesIter>(phase);
IncrementalProgress GCRuntime::markWeakReferencesInCurrentGroup(
gcstats::PhaseKind phase, SliceBudget& budget) {
return markWeakReferences<SweepGroupZonesIter>(phase, budget);
}
template <class ZoneIterT>
@ -4340,8 +4365,9 @@ void GCRuntime::markGrayRoots(gcstats::PhaseKind phase) {
}
}
void GCRuntime::markAllWeakReferences(gcstats::PhaseKind phase) {
markWeakReferences<GCZonesIter>(phase);
IncrementalProgress GCRuntime::markAllWeakReferences(gcstats::PhaseKind phase,
SliceBudget& budget) {
return markWeakReferences<GCZonesIter>(phase, budget);
}
void GCRuntime::markAllGrayReferences(gcstats::PhaseKind phase) {
@ -4895,13 +4921,19 @@ IncrementalProgress GCRuntime::endMarkingSweepGroup(JSFreeOp* fop,
gcstats::AutoPhase ap(stats(), gcstats::PhaseKind::SWEEP_MARK);
markWeakReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_WEAK);
if (markWeakReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_WEAK,
budget) == NotFinished) {
return NotFinished;
}
AutoSetMarkColor setColorGray(marker, MarkColor::Gray);
marker.setMainStackColor(MarkColor::Gray);
// Mark transitively inside the current compartment group.
markWeakReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_GRAY_WEAK);
if (markWeakReferencesInCurrentGroup(gcstats::PhaseKind::SWEEP_MARK_GRAY_WEAK,
budget) == NotFinished) {
return NotFinished;
}
MOZ_ASSERT(marker.isDrained());

Просмотреть файл

@ -28,6 +28,8 @@ static const size_t SMALL_MARK_STACK_BASE_CAPACITY = 256;
namespace gc {
enum IncrementalProgress { NotFinished = 0, Finished };
struct Cell;
struct WeakKeyTableHashPolicy {
@ -47,6 +49,10 @@ struct WeakMarkable {
WeakMarkable(WeakMapBase* weakmapArg, Cell* keyArg)
: weakmap(weakmapArg), key(keyArg) {}
bool operator==(const WeakMarkable& other) const {
return weakmap == other.weakmap && key == other.key;
}
};
using WeakEntryVector = Vector<WeakMarkable, 2, js::SystemAllocPolicy>;
@ -321,6 +327,16 @@ class GCMarker : public JSTracer {
void delayMarkingChildren(gc::Cell* cell);
// Remove <map,toRemove> from the weak keys table indexed by 'key'.
void forgetWeakKey(js::gc::WeakKeyTable& weakKeys, WeakMapBase* map,
gc::Cell* keyOrDelegate, gc::Cell* keyToRemove);
// Purge all mention of 'map' from the weak keys table.
void forgetWeakMap(WeakMapBase* map, Zone* zone);
// 'delegate' is no longer the delegate of 'key'.
void severWeakDelegate(JSObject* key, JSObject* delegate);
bool isDrained() { return isMarkStackEmpty() && !delayedMarkingList; }
// The mark queue is a testing-only feature for controlling mark ordering and
@ -471,7 +487,17 @@ class GCMarker : public JSTracer {
/* Track the state of marking. */
MainThreadOrGCTaskData<MarkingState> state;
public:
/*
* Whether weakmaps can be marked incrementally.
*
* JSGC_INCREMENTAL_WEAKMAP_ENABLED
* pref: javascript.options.mem.incremental_weakmap
*/
MainThreadOrGCTaskData<bool> incrementalWeakMapMarkingEnabled;
#ifdef DEBUG
private:
/* Count of arenas that are currently in the stack. */
MainThreadOrGCTaskData<size_t> markLaterArenas;

Просмотреть файл

@ -48,8 +48,6 @@ struct MovingTracer;
enum class ShouldCheckThresholds;
class SweepGroupsIter;
enum IncrementalProgress { NotFinished = 0, Finished };
// Interface to a sweep action.
struct SweepAction {
// The arguments passed to each action.
@ -741,12 +739,15 @@ class GCRuntime {
IncrementalProgress markUntilBudgetExhausted(SliceBudget& sliceBudget);
void drainMarkStack();
template <class ZoneIterT>
void markWeakReferences(gcstats::PhaseKind phase);
void markWeakReferencesInCurrentGroup(gcstats::PhaseKind phase);
IncrementalProgress markWeakReferences(gcstats::PhaseKind phase,
SliceBudget& budget);
IncrementalProgress markWeakReferencesInCurrentGroup(gcstats::PhaseKind phase,
SliceBudget& budget);
template <class ZoneIterT>
void markGrayRoots(gcstats::PhaseKind phase);
void markBufferedGrayRoots(JS::Zone* zone);
void markAllWeakReferences(gcstats::PhaseKind phase);
IncrementalProgress markAllWeakReferences(gcstats::PhaseKind phase,
SliceBudget& budget);
void markAllGrayReferences(gcstats::PhaseKind phase);
void beginSweepPhase(JS::GCReason reason, AutoGCSession& session);

Просмотреть файл

@ -15,6 +15,7 @@
#include "mozilla/Unused.h"
#include <algorithm>
#include <initializer_list>
#include <type_traits>
#include "jsfriendapi.h"
@ -763,6 +764,56 @@ void GCMarker::markEphemeronValues(gc::Cell* markedCell,
MOZ_ASSERT(values.length() == initialLen);
}
void GCMarker::forgetWeakKey(js::gc::WeakKeyTable& weakKeys, WeakMapBase* map,
gc::Cell* keyOrDelegate, gc::Cell* keyToRemove) {
// Find and remove the exact pair <map,keyToRemove> from the values of the
// weak keys table.
//
// This function is called when 'keyToRemove' is removed from a weakmap
// 'map'. If 'keyToRemove' has a delegate, then the delegate will be used as
// the lookup key in gcWeakKeys; otherwise, 'keyToRemove' itself will be. In
// either case, 'keyToRemove' is what we will be filtering out of the
// Markable values in the weakKey table.
auto p = weakKeys.get(keyOrDelegate);
// Note that this is not guaranteed to find anything. The key will have
// only been inserted into the weakKeys table if it was unmarked when the
// map was traced.
if (p) {
// Entries should only have been added to weakKeys if the map was marked.
for (auto r = p->value.all(); !r.empty(); r.popFront()) {
MOZ_ASSERT(r.front().weakmap->mapColor);
}
p->value.eraseIfEqual(WeakMarkable(map, keyToRemove));
}
}
void GCMarker::forgetWeakMap(WeakMapBase* map, Zone* zone) {
for (auto table : {&zone->gcNurseryWeakKeys(), &zone->gcWeakKeys()}) {
for (auto p = table->all(); !p.empty(); p.popFront()) {
p.front().value.eraseIf([map](const WeakMarkable& markable) -> bool {
return markable.weakmap == map;
});
}
}
}
// 'delegate' is no longer the delegate of 'key'.
void GCMarker::severWeakDelegate(JSObject* key, JSObject* delegate) {
JS::Zone* zone = delegate->zone();
auto p = zone->gcWeakKeys(delegate).get(delegate);
if (p) {
p->value.eraseIf([this, key](const WeakMarkable& markable) -> bool {
if (markable.key != key) {
return false;
}
markable.weakmap->postSeverDelegate(this, key, key->compartment());
return true;
});
}
}
template <typename T>
void GCMarker::markImplicitEdgesHelper(T markedThing) {
if (!isWeakMarking()) {
@ -860,6 +911,8 @@ template <typename T>
void DoMarking(GCMarker* gcmarker, T* thing) {
// Do per-type marking precondition checks.
if (!ShouldMark(gcmarker, thing)) {
MOZ_ASSERT(gc::detail::GetEffectiveColor(gcmarker->runtime(), thing) ==
js::gc::CellColor::Black);
return;
}
@ -2526,7 +2579,9 @@ GCMarker::GCMarker(JSRuntime* rt)
mainStackColor(MarkColor::Black),
delayedMarkingList(nullptr),
delayedMarkingWorkAdded(false),
state(MarkingState::NotActive)
state(MarkingState::NotActive),
incrementalWeakMapMarkingEnabled(
TuningDefaults::IncrementalWeakMapMarkingEnabled)
#ifdef DEBUG
,
markLaterArenas(0),
@ -2698,13 +2753,78 @@ bool GCMarker::enterWeakMarkingMode() {
return true;
}
void JS::Zone::enterWeakMarkingMode(GCMarker* marker) {
IncrementalProgress JS::Zone::enterWeakMarkingMode(GCMarker* marker,
SliceBudget& budget) {
MOZ_ASSERT(marker->isWeakMarking());
for (WeakMapBase* m : gcWeakMapList()) {
if (m->mapColor) {
mozilla::Unused << m->markEntries(marker);
if (!marker->incrementalWeakMapMarkingEnabled) {
// Do not rely on the information about not-yet-marked weak keys that have
// been collected by barriers. Rebuild the full table here.
mozilla::Unused << gcWeakKeys().clear();
for (WeakMapBase* m : gcWeakMapList()) {
if (m->mapColor) {
mozilla::Unused << m->markEntries(marker);
}
}
return IncrementalProgress::Finished;
}
// gcWeakKeys contains the keys from all weakmaps marked so far, or at least
// the keys that might still need to be marked through. Scan through
// gcWeakKeys and mark all values whose keys are marked. This marking may
// recursively mark through other weakmap entries (immediately since we are
// now in WeakMarking mode). The end result is a consistent state where all
// values are marked if both their map and key are marked -- though note that
// we may later leave weak marking mode, do some more marking, and then enter
// back in.
if (!isGCMarking()) {
return IncrementalProgress::Finished;
}
MOZ_ASSERT(gcNurseryWeakKeys().count() == 0);
// An OrderedHashMap::Range stays valid even when the underlying table
// (zone->gcWeakKeys) is mutated, which is useful here since we may add
// additional entries while iterating over the Range.
gc::WeakKeyTable::Range r = gcWeakKeys().all();
while (!r.empty()) {
gc::Cell* key = r.front().key;
gc::CellColor keyColor =
gc::detail::GetEffectiveColor(marker->runtime(), key);
if (keyColor) {
MOZ_ASSERT(key == r.front().key);
auto& markables = r.front().value;
r.popFront(); // Pop before any mutations happen.
size_t end = markables.length();
for (size_t i = 0; i < end; i++) {
WeakMarkable& v = markables[i];
// Note: if the key is marked gray but not black, then the markables
// vector may be appended to within this loop body. So iterate just
// over the ones from before weak marking mode was switched on.
v.weakmap->markKey(marker, key, v.key);
budget.step();
if (budget.isOverBudget()) {
return NotFinished;
}
}
if (keyColor == gc::CellColor::Black) {
// We can't mark the key any more than already is, so it no longer
// needs to be in the weak keys table.
if (end == markables.length()) {
bool found;
gcWeakKeys().remove(key, &found);
} else {
markables.erase(markables.begin(), &markables[end]);
}
}
} else {
r.popFront();
}
}
return IncrementalProgress::Finished;
}
#ifdef DEBUG
@ -2726,14 +2846,8 @@ void GCMarker::leaveWeakMarkingMode() {
state = MarkingState::RegularMarking;
}
// Table is expensive to maintain when not in weak marking mode, so we'll
// rebuild it upon entry rather than allow it to contain stale data.
AutoEnterOOMUnsafeRegion oomUnsafe;
for (GCZonesIter zone(runtime()); !zone.done(); zone.next()) {
if (!zone->gcWeakKeys().clear()) {
oomUnsafe.crash("clearing weak keys in GCMarker::leaveWeakMarkingMode()");
}
}
// The gcWeakKeys table is still populated and may be used during a future
// weak marking mode within this GC.
}
void GCMarker::delayMarkingChildren(Cell* cell) {

Просмотреть файл

@ -397,6 +397,9 @@ static const JSGCMode Mode = JSGC_MODE_ZONE_INCREMENTAL;
/* JSGC_COMPACTING_ENABLED */
static const bool CompactingEnabled = true;
/* JSGC_INCREMENTAL_WEAKMAP_ENABLED */
static const bool IncrementalWeakMapMarkingEnabled = true;
/* JSGC_NURSERY_FREE_THRESHOLD_FOR_IDLE_COLLECTION */
static const uint32_t NurseryFreeThresholdForIdleCollection = ChunkSize / 4;

Просмотреть файл

@ -606,7 +606,8 @@ void js::gc::MarkingValidator::nonIncrementalMark(AutoGCSession& session) {
gcstats::AutoPhase ap1(gc->stats(), gcstats::PhaseKind::SWEEP);
gcstats::AutoPhase ap2(gc->stats(), gcstats::PhaseKind::SWEEP_MARK);
gc->markAllWeakReferences(gcstats::PhaseKind::SWEEP_MARK_WEAK);
auto unlimited = SliceBudget::unlimited();
gc->markAllWeakReferences(gcstats::PhaseKind::SWEEP_MARK_WEAK, unlimited);
/* Update zone state for gray marking. */
for (GCZonesIter zone(gc); !zone.done(); zone.next()) {
@ -617,7 +618,8 @@ void js::gc::MarkingValidator::nonIncrementalMark(AutoGCSession& session) {
gcmarker->setMainStackColor(MarkColor::Gray);
gc->markAllGrayReferences(gcstats::PhaseKind::SWEEP_MARK_GRAY);
gc->markAllWeakReferences(gcstats::PhaseKind::SWEEP_MARK_GRAY_WEAK);
gc->markAllWeakReferences(gcstats::PhaseKind::SWEEP_MARK_GRAY_WEAK,
unlimited);
gc->marker.setMainStackColor(MarkColor::Black);
/* Restore zone state. */

Просмотреть файл

@ -130,6 +130,11 @@ void WeakMap<K, V>::markKey(GCMarker* marker, gc::Cell* markedCell,
MOZ_ASSERT(mapColor);
Ptr p = Base::lookup(static_cast<Lookup>(origKey));
// We should only be processing <weakmap,key> pairs where the key exists in
// the weakmap. Such pairs are inserted when a weakmap is marked, and are
// removed by barriers if the key is removed from the weakmap. Failure here
// probably means gcWeakKeys is not being properly traced during a minor GC,
// or the weakmap keys are not being updated when tenured.
MOZ_ASSERT(p.found());
mozilla::DebugOnly<gc::Cell*> oldKey = gc::ToMarkable(p->key());
@ -219,6 +224,30 @@ void WeakMap<K, V>::trace(JSTracer* trc) {
}
}
template <class K, class V>
/* static */ void WeakMap<K, V>::forgetKey(UnbarrieredKey key) {
// Remove the key or its delegate from weakKeys.
if (zone()->needsIncrementalBarrier()) {
JSRuntime* rt = zone()->runtimeFromMainThread();
if (JSObject* delegate = js::gc::detail::GetDelegate(key)) {
js::gc::WeakKeyTable& weakKeys = delegate->zone()->gcWeakKeys(delegate);
rt->gc.marker.forgetWeakKey(weakKeys, this, delegate, key);
} else {
js::gc::WeakKeyTable& weakKeys = key->zone()->gcWeakKeys(key);
rt->gc.marker.forgetWeakKey(weakKeys, this, key, key);
}
}
}
template <class K, class V>
/* static */ void WeakMap<K, V>::clear() {
Base::clear();
JSRuntime* rt = zone()->runtimeFromMainThread();
if (zone()->needsIncrementalBarrier()) {
rt->gc.marker.forgetWeakMap(this, zone());
}
}
template <class K, class V>
/* static */ void WeakMap<K, V>::addWeakEntry(
GCMarker* marker, gc::Cell* key, const gc::WeakMarkable& markable) {
@ -249,9 +278,8 @@ bool WeakMap<K, V>::markEntries(GCMarker* marker) {
if (markEntry(marker, e.front().mutableKey(), e.front().value())) {
markedAny = true;
}
if (!marker->isWeakMarking()) {
// No need to populate the weak key table yet; it will be built from
// scratch during enterWeakMarkingMode.
if (!marker->incrementalWeakMapMarkingEnabled && !marker->isWeakMarking()) {
// Populate weak keys table when we enter weak marking mode.
continue;
}
@ -271,9 +299,10 @@ bool WeakMap<K, V>::markEntries(GCMarker* marker) {
// up marking the delegate and thereby mark the entry.)
gc::Cell* weakKey = gc::detail::ExtractUnbarriered(e.front().key());
gc::WeakMarkable markable(this, weakKey);
addWeakEntry(marker, weakKey, markable);
if (JSObject* delegate = gc::detail::GetDelegate(e.front().key())) {
addWeakEntry(marker, delegate, markable);
} else {
addWeakEntry(marker, weakKey, markable);
}
}
}
@ -281,6 +310,17 @@ bool WeakMap<K, V>::markEntries(GCMarker* marker) {
return markedAny;
}
template <class K, class V>
void WeakMap<K, V>::postSeverDelegate(GCMarker* marker, JSObject* key,
Compartment* comp) {
if (mapColor) {
// We only stored the delegate, not the key, and we're severing the
// delegate from the key. So store the key.
gc::WeakMarkable markable(this, key);
addWeakEntry(marker, key, markable);
}
}
template <class K, class V>
void WeakMap<K, V>::sweep() {
/* Remove all entries whose keys remain unmarked. */

Просмотреть файл

@ -33,6 +33,12 @@ WeakMapBase::~WeakMapBase() {
}
void WeakMapBase::unmarkZone(JS::Zone* zone) {
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!zone->gcWeakKeys().clear()) {
oomUnsafe.crash("clearing weak keys table");
}
MOZ_ASSERT(zone->gcNurseryWeakKeys().count() == 0);
for (WeakMapBase* m : zone->gcWeakMapList()) {
m->mapColor = CellColor::White;
}
@ -137,27 +143,23 @@ size_t ObjectValueWeakMap::sizeOfIncludingThis(
}
bool ObjectValueWeakMap::findSweepGroupEdges() {
/*
* For unmarked weakmap keys with delegates in a different zone, add a zone
* edge to ensure that the delegate zone finishes marking before the key
* zone.
*/
// For weakmap keys with delegates in a different zone, add a zone edge to
// ensure that the delegate zone finishes marking before the key zone.
JS::AutoSuppressGCAnalysis nogc;
for (Range r = all(); !r.empty(); r.popFront()) {
JSObject* key = r.front().key();
if (key->asTenured().isMarkedBlack()) {
continue;
}
JSObject* delegate = gc::detail::GetDelegate(key);
if (!delegate) {
continue;
}
// Marking a WeakMap key's delegate will mark the key, so process the
// delegate zone no later than the key zone.
Zone* delegateZone = delegate->zone();
if (delegateZone == zone() || !delegateZone->isGCMarking()) {
continue;
}
if (!delegateZone->addSweepGroupEdgeTo(key->zone())) {
return false;
if (delegateZone != zone() && delegateZone->isGCMarking()) {
if (!delegateZone->addSweepGroupEdgeTo(key->zone())) {
return false;
}
}
}
return true;

Просмотреть файл

@ -14,6 +14,7 @@
#include "gc/Tracer.h"
#include "gc/ZoneAllocator.h"
#include "js/HashTable.h"
#include "js/HeapAPI.h"
namespace js {
@ -21,6 +22,8 @@ class GCMarker;
class WeakMapBase;
struct WeakMapTracer;
extern void DumpWeakMapLog(JSRuntime* rt);
namespace gc {
struct WeakMarkable;
@ -47,6 +50,42 @@ bool CheckWeakMapEntryMarking(const WeakMapBase* map, Cell* key, Cell* value);
// the implicit edges stored in the map) and of removing (sweeping) table
// entries when collection is complete.
// WeakMaps are marked with an incremental linear-time algorithm that handles
// all orderings of map and key marking. The basic algorithm is:
//
// At first while marking, do nothing special when marking WeakMap keys (there
// is no straightforward way to know whether a particular object is being used
// as a key in some weakmap.) When a WeakMap is marked, scan through it to mark
// all entries with live keys, and collect all unmarked keys into a "weak keys"
// table.
//
// At some point, everything reachable has been marked. At this point, enter
// "weak marking mode". In this mode, whenever any object is marked, look it up
// in the weak keys table to see if it is the key for any WeakMap entry and if
// so, mark the value. When entering weak marking mode, scan the weak key table
// to find all keys that have been marked since we added them to the table, and
// mark those entries.
//
// In addition, we want weakmap marking to work incrementally. So WeakMap
// mutations are barriered to keep the weak keys table up to date: entries are
// removed if their key is removed from the table, etc.
//
// You can break down various ways that WeakMap values get marked based on the
// order that the map and key are marked. All of these assume the map and key
// get marked at some point:
//
// key marked, then map marked:
// - value was marked with map in `markEntries()`
// map marked, key already in map, key marked before weak marking mode:
// - key added to weakKeys when map marked in `markEntries()`
// - value marked during `enterWeakMarkingMode`
// map marked, key already in map, key marked after weak marking mode:
// - when key is marked, weakKeys[key] triggers marking of value in
// `markImplicitEdges()`
// map marked, key inserted into map, key marked:
// - value marked by insert barrier in `barrierForInsert`
//
using WeakMapColors = HashMap<WeakMapBase*, js::gc::CellColor,
DefaultHasher<WeakMapBase*>, SystemAllocPolicy>;
@ -85,6 +124,9 @@ class WeakMapBase : public mozilla::LinkedListElement<WeakMapBase> {
// entries of live weak maps whose keys are dead.
static void sweepZone(JS::Zone* zone);
// Sweep the marked weak maps in a zone, updating moved keys.
static void sweepZoneAfterMinorGC(JS::Zone* zone);
// Trace all weak map bindings. Used by the cycle collector.
static void traceAllMappings(WeakMapTracer* tracer);
@ -112,6 +154,12 @@ class WeakMapBase : public mozilla::LinkedListElement<WeakMapBase> {
// ephemeron marking must override this method.
virtual void markKey(GCMarker* marker, gc::Cell* markedCell, gc::Cell* l) = 0;
// An unmarked CCW with a delegate will add a weakKeys entry for the
// delegate. If the delegate is removed with NukeCrossCompartmentWrapper,
// then the (former) CCW needs to be added to weakKeys instead.
virtual void postSeverDelegate(GCMarker* marker, JSObject* key,
Compartment* comp) = 0;
virtual bool markEntries(GCMarker* marker) = 0;
#ifdef JS_GC_ZEAL
@ -195,26 +243,66 @@ class WeakMap
return p;
}
void remove(Ptr p) {
MOZ_ASSERT(p.found());
if (mapColor) {
forgetKey(p->key());
}
Base::remove(p);
}
void remove(const Lookup& l) {
if (Ptr p = lookup(l)) {
remove(p);
}
}
void clear();
template <typename KeyInput, typename ValueInput>
MOZ_MUST_USE bool put(KeyInput&& key, ValueInput&& value) {
MOZ_ASSERT(key);
return Base::put(std::forward<KeyInput>(key),
std::forward<ValueInput>(value));
MOZ_MUST_USE bool add(AddPtr& p, KeyInput&& k, ValueInput&& v) {
MOZ_ASSERT(k);
if (!Base::add(p, std::forward<KeyInput>(k), std::forward<ValueInput>(v))) {
return false;
}
barrierForInsert(p->key(), p->value());
return true;
}
template <typename KeyInput, typename ValueInput>
MOZ_MUST_USE bool putNew(KeyInput&& key, ValueInput&& value) {
MOZ_ASSERT(key);
return Base::putNew(std::forward<KeyInput>(key),
std::forward<ValueInput>(value));
MOZ_MUST_USE bool relookupOrAdd(AddPtr& p, KeyInput&& k, ValueInput&& v) {
MOZ_ASSERT(k);
if (!Base::relookupOrAdd(p, std::forward<KeyInput>(k),
std::forward<ValueInput>(v))) {
return false;
}
barrierForInsert(p->key(), p->value());
return true;
}
template <typename KeyInput, typename ValueInput>
MOZ_MUST_USE bool relookupOrAdd(AddPtr& ptr, KeyInput&& key,
ValueInput&& value) {
MOZ_ASSERT(key);
return Base::relookupOrAdd(ptr, std::forward<KeyInput>(key),
std::forward<ValueInput>(value));
MOZ_MUST_USE bool put(KeyInput&& k, ValueInput&& v) {
MOZ_ASSERT(k);
AddPtr p = lookupForAdd(k);
if (p) {
p->value() = std::forward<ValueInput>(v);
return true;
}
return add(p, std::forward<KeyInput>(k), std::forward<ValueInput>(v));
}
template <typename KeyInput, typename ValueInput>
MOZ_MUST_USE bool putNew(KeyInput&& k, ValueInput&& v) {
MOZ_ASSERT(k);
barrierForInsert(k, v);
return Base::putNew(std::forward<KeyInput>(k), std::forward<ValueInput>(v));
}
template <typename KeyInput, typename ValueInput>
void putNewInfallible(KeyInput&& k, ValueInput&& v) {
MOZ_ASSERT(k);
barrierForInsert(k, v);
Base::putNewInfallible(std::forward(k), std::forward<KeyInput>(k));
}
#ifdef DEBUG
@ -230,9 +318,30 @@ class WeakMap
bool markEntry(GCMarker* marker, Key& key, Value& value);
// 'key' has lost its delegate, update our weak key state.
void postSeverDelegate(GCMarker* marker, JSObject* key,
Compartment* comp) override;
void trace(JSTracer* trc) override;
protected:
inline void forgetKey(UnbarrieredKey key);
void barrierForInsert(Key k, const Value& v) {
if (!mapColor) {
return;
}
auto mapZone = JS::shadow::Zone::from(zone());
if (!mapZone->needsIncrementalBarrier()) {
return;
}
JSTracer* trc = mapZone->barrierTracer();
Value tmp = v;
TraceEdge(trc, &tmp, "weakmap inserted value");
MOZ_ASSERT(tmp == v);
}
// We have a key that, if it or its delegate is marked, may lead to a WeakMap
// value getting marked. Insert it or its delegate (if any) into the
// appropriate zone's gcWeakKeys or gcNurseryWeakKeys.

Просмотреть файл

@ -484,6 +484,13 @@ void Zone::discardJitCode(JSFreeOp* fop,
}
}
void JS::Zone::delegatePreWriteBarrierInternal(JSObject* obj,
JSObject* delegate) {
MOZ_ASSERT(js::gc::detail::GetDelegate(obj) == delegate);
MOZ_ASSERT(needsIncrementalBarrier());
GCMarker::fromTracer(barrierTracer())->severWeakDelegate(obj, delegate);
}
#ifdef JSGC_HASH_TABLE_CHECKS
void JS::Zone::checkUniqueIdTableAfterMovingGC() {
for (auto r = uniqueIds().all(); !r.empty(); r.popFront()) {

Просмотреть файл

@ -550,12 +550,24 @@ class Zone : public js::ZoneAllocator, public js::gc::GraphNodeBase<JS::Zone> {
weakCaches().insertBack(cachep);
}
void delegatePreWriteBarrier(JSObject* obj, JSObject* delegate) {
if (needsIncrementalBarrier()) {
delegatePreWriteBarrierInternal(obj, delegate);
}
}
void delegatePreWriteBarrierInternal(JSObject* obj, JSObject* delegate);
js::gc::WeakKeyTable& gcWeakKeys() { return gcWeakKeys_.ref(); }
js::gc::WeakKeyTable& gcNurseryWeakKeys() { return gcNurseryWeakKeys_.ref(); }
js::gc::WeakKeyTable& gcWeakKeys(const js::gc::Cell* cell) {
return cell->isTenured() ? gcWeakKeys() : gcNurseryWeakKeys();
}
// Perform all pending weakmap entry marking for this zone after
// transitioning to weak marking mode.
void enterWeakMarkingMode(js::GCMarker* marker);
js::gc::IncrementalProgress enterWeakMarkingMode(js::GCMarker* marker,
js::SliceBudget& budget);
void checkWeakMarkingMode();
// A set of edges from this zone to other zones used during GC to calculate

Просмотреть файл

@ -17,7 +17,7 @@ function basicSweeping() {
finishgc();
startgc(100000, 'shrinking');
gcslice();
finishgc();
assertEq(wm1.get(hold).name, 'val2');
assertEq(nondeterministicGetWeakMapKeys(wm1).length, 1);
@ -41,7 +41,7 @@ function weakGraph() {
finishgc();
startgc(100000, 'shrinking');
gcslice();
finishgc();
assertEq(obj2.name, "obj2");
assertEq(wm1.get(obj2).name, "obj3");
@ -70,7 +70,7 @@ function deadWeakMap() {
finishgc();
startgc(100000, 'shrinking');
gcslice();
finishgc();
assertEq(obj2.name, "obj2");
assertEq(finalizeCount(), initialCount + 1);
@ -98,7 +98,7 @@ function deadKeys() {
finishgc();
startgc(100000, 'shrinking');
gcslice();
finishgc();
assertEq(finalizeCount(), initialCount + 2);
assertEq(nondeterministicGetWeakMapKeys(wm1).length, 0);
@ -132,7 +132,7 @@ function weakKeysRealloc() {
var initialCount = finalizeCount();
finishgc();
startgc(100000, 'shrinking');
gcslice();
finishgc();
assertEq(finalizeCount(), initialCount + 1);
}

Просмотреть файл

@ -28,6 +28,7 @@
#include "gc/GC-inl.h"
#include "gc/Marking-inl.h"
#include "gc/WeakMap-inl.h"
#include "vm/JSAtom-inl.h"
#include "vm/JSFunction-inl.h"
#include "vm/JSObject-inl.h"
@ -85,6 +86,16 @@ bool Compartment::putWrapper(JSContext* cx, JSString* wrapped,
return true;
}
void Compartment::removeWrapper(js::ObjectWrapperMap::Ptr p) {
JSObject* key = p->key();
JSObject* value = p->value().unbarrieredGet();
if (js::gc::detail::GetDelegate(value) == key) {
key->zone()->delegatePreWriteBarrier(value, key);
}
crossCompartmentObjectWrappers.remove(p);
}
static JSString* CopyStringPure(JSContext* cx, JSString* str) {
/*
* Directly allocate the copy in the destination compartment, rather than

Просмотреть файл

@ -380,9 +380,7 @@ class JS::Compartment {
inline js::StringWrapperMap::Ptr lookupWrapper(JSString* str) const;
void removeWrapper(js::ObjectWrapperMap::Ptr p) {
crossCompartmentObjectWrappers.remove(p);
}
void removeWrapper(js::ObjectWrapperMap::Ptr p);
bool hasNurseryAllocatedObjectWrapperEntries(const js::CompartmentFilter& f) {
return crossCompartmentObjectWrappers.hasNurseryAllocatedWrapperEntries(f);

Просмотреть файл

@ -12,6 +12,7 @@
#include "vm/Realm.h"
#include "gc/ObjectKind-inl.h"
#include "gc/WeakMap-inl.h"
#include "vm/JSObject-inl.h"
#include "vm/TypeInference-inl.h"
@ -267,6 +268,15 @@ inline void ProxyObject::setPrivate(const Value& priv) {
}
void ProxyObject::nuke() {
// Notify the zone that a delegate is no longer a delegate. Be careful not to
// expose this pointer, because it has already been removed from the wrapper
// map yet we have assertions during tracing that will verify that it is
// still present.
JSObject* delegate = UncheckedUnwrapWithoutExpose(this);
if (delegate != this) {
delegate->zone()->delegatePreWriteBarrier(this, delegate);
}
// Clear the target reference and replaced it with a value that encodes
// various information about the original target.
setSameCompartmentPrivate(DeadProxyTargetValue(this));