Bug 944040 - Do not use the store buffer during moving GC; r=jonco

--HG--
extra : rebase_source : cca04b1eb34c733a615a8cab12fb1625e940a1c9
This commit is contained in:
Terrence Cole 2013-11-25 14:06:56 -08:00
Родитель 98d0d39ffd
Коммит bcf90db7e6
6 изменённых файлов: 114 добавлений и 40 удалений

Просмотреть файл

@ -1103,29 +1103,50 @@ MapObject::mark(JSTracer *trc, JSObject *obj)
}
#ifdef JSGC_GENERATIONAL
struct UnbarrieredHashPolicy {
typedef Value Lookup;
static HashNumber hash(const Lookup &v) { return v.asRawBits(); }
static bool match(const Value &k, const Lookup &l) { return k == l; }
static bool isEmpty(const Value &v) { return v.isMagic(JS_HASH_KEY_EMPTY); }
static void makeEmpty(Value *vp) { vp->setMagic(JS_HASH_KEY_EMPTY); }
};
template <typename TableType>
class OrderedHashTableRef : public gc::BufferableRef
{
TableType *table;
HashableValue key;
Value key;
public:
explicit OrderedHashTableRef(TableType *t, const HashableValue &k) : table(t), key(k) {}
explicit OrderedHashTableRef(TableType *t, const Value &k) : table(t), key(k) {}
void mark(JSTracer *trc) {
HashableValue prior = key;
key = key.mark(trc);
JS_ASSERT(UnbarrieredHashPolicy::hash(key) ==
HashableValue::Hasher::hash(*reinterpret_cast<HashableValue*>(&key)));
Value prior = key;
gc::MarkValueUnbarriered(trc, &key, "ordered hash table key");
table->rekeyOneEntry(prior, key);
}
};
#endif
template <typename TableType>
static void
WriteBarrierPost(JSRuntime *rt, TableType *table, const HashableValue &key)
WriteBarrierPost(JSRuntime *rt, ValueMap *map, const HashableValue &key)
{
#ifdef JSGC_GENERATIONAL
rt->gcStoreBuffer.putGeneric(OrderedHashTableRef<TableType>(table, key));
typedef OrderedHashMap<Value, Value, UnbarrieredHashPolicy, RuntimeAllocPolicy> UnbarrieredMap;
rt->gcStoreBuffer.putGeneric(OrderedHashTableRef<UnbarrieredMap>(
reinterpret_cast<UnbarrieredMap *>(map), key.get()));
#endif
}
static void
WriteBarrierPost(JSRuntime *rt, ValueSet *set, const HashableValue &key)
{
#ifdef JSGC_GENERATIONAL
typedef OrderedHashSet<Value, UnbarrieredHashPolicy, RuntimeAllocPolicy> UnbarrieredSet;
rt->gcStoreBuffer.putGeneric(OrderedHashTableRef<UnbarrieredSet>(
reinterpret_cast<UnbarrieredSet *>(set), key.get()));
#endif
}

Просмотреть файл

@ -253,27 +253,10 @@ ZoneOfValueFromAnyThread(const JS::Value &value)
return static_cast<js::gc::Cell *>(value.toGCThing())->tenuredZoneFromAnyThread();
}
/*
* This is a post barrier for HashTables whose key is a GC pointer. Any
* insertion into a HashTable not marked as part of the runtime, with a GC
* pointer as a key, must call this immediately after each insertion.
*/
template <class Map, class Key>
inline void
HashTableWriteBarrierPost(JSRuntime *rt, Map *map, const Key &key)
{
#ifdef JSGC_GENERATIONAL
if (key && IsInsideNursery(rt, key)) {
JS::shadow::Runtime *shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
shadowRuntime->gcStoreBufferPtr()->putGeneric(gc::HashKeyRef<Map, Key>(map, key));
}
#endif
}
/*
* Base class for barriered pointer types.
*/
template<class T, typename Unioned = uintptr_t>
template <class T, typename Unioned = uintptr_t>
class BarrieredPtr
{
protected:
@ -312,7 +295,7 @@ class BarrieredPtr
void pre() { T::writeBarrierPre(value); }
};
template<class T, typename Unioned = uintptr_t>
template <class T, typename Unioned = uintptr_t>
class EncapsulatedPtr : public BarrieredPtr<T, Unioned>
{
public:
@ -381,7 +364,7 @@ class HeapPtr : public BarrieredPtr<T, Unioned>
void post() { T::writeBarrierPost(this->value, (void *)&this->value); }
/* Make this friend so it can access pre() and post(). */
template<class T1, class T2>
template <class T1, class T2>
friend inline void
BarrieredSetPair(Zone *zone,
HeapPtr<T1> &v1, T1 *val1,
@ -483,7 +466,7 @@ class RelocatablePtr : public BarrieredPtr<T>
* This is a hack for RegExpStatics::updateFromMatch. It allows us to do two
* barriers with only one branch to check if we're in an incremental GC.
*/
template<class T1, class T2>
template <class T1, class T2>
static inline void
BarrieredSetPair(Zone *zone,
HeapPtr<T1> &v1, T1 *val1,
@ -522,7 +505,8 @@ typedef HeapPtr<BaseShape> HeapPtrBaseShape;
typedef HeapPtr<types::TypeObject> HeapPtrTypeObject;
/* Useful for hashtables with a HeapPtr as key. */
template<class T>
template <class T>
struct HeapPtrHasher
{
typedef HeapPtr<T> Key;
@ -537,7 +521,7 @@ struct HeapPtrHasher
template <class T>
struct DefaultHasher< HeapPtr<T> > : HeapPtrHasher<T> { };
template<class T>
template <class T>
struct EncapsulatedPtrHasher
{
typedef EncapsulatedPtr<T> Key;
@ -1113,7 +1097,7 @@ class HeapId : public BarrieredId
* may collect the empty shape even though a live object points to it. To fix
* this, we mark these empty shapes black whenever they get read out.
*/
template<class T>
template <class T>
class ReadBarriered
{
T *value;
@ -1158,6 +1142,20 @@ class ReadBarrieredValue
inline JSObject &toObject() const;
};
/*
* Operations on a Heap thing inside the GC need to strip the barriers from
* pointer operations. This template helps do that in contexts where the type
* is templatized.
*/
template <typename T> struct Unbarriered {};
template <typename S> struct Unbarriered< EncapsulatedPtr<S> > { typedef S *type; };
template <typename S> struct Unbarriered< RelocatablePtr<S> > { typedef S *type; };
template <> struct Unbarriered<EncapsulatedValue> { typedef Value type; };
template <> struct Unbarriered<RelocatableValue> { typedef Value type; };
template <typename S> struct Unbarriered< DefaultHasher< EncapsulatedPtr<S> > > {
typedef DefaultHasher<S *> type;
};
} /* namespace js */
#endif /* gc_Barrier_h */

Просмотреть файл

@ -268,6 +268,21 @@ TryPreserveReflector(JSContext *cx, HandleObject obj)
return true;
}
static inline void
WeakMapPostWriteBarrier(JSRuntime *rt, ObjectValueMap *map, JSObject *key)
{
#ifdef JSGC_GENERATIONAL
/*
* Strip the barriers from the type before inserting into the store buffer.
* This will automatically ensure that barriers do not fire during GC.
*/
typedef WeakMap<JSObject *, Value> UnbarrieredObjectValueMap;
typedef HashKeyRef<UnbarrieredObjectValueMap, JSObject *> Ref;
if (key && IsInsideNursery(rt, key))
rt->gcStoreBuffer.putGeneric(Ref(reinterpret_cast<UnbarrieredObjectValueMap *>(map), key));
#endif
}
JS_ALWAYS_INLINE bool
WeakMap_set_impl(JSContext *cx, CallArgs args)
{
@ -312,7 +327,7 @@ WeakMap_set_impl(JSContext *cx, CallArgs args)
JS_ReportOutOfMemory(cx);
return false;
}
HashTableWriteBarrierPost(cx->runtime(), map, key.get());
WeakMapPostWriteBarrier(cx->runtime(), map, key.get());
args.rval().setUndefined();
return true;

Просмотреть файл

@ -131,6 +131,7 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
public:
typedef HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy> Base;
typedef typename Base::Enum Enum;
typedef typename Base::Lookup Lookup;
typedef typename Base::Range Range;
explicit WeakMap(JSContext *cx, JSObject *memOf = nullptr)
@ -150,7 +151,7 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
Key key(e.front().key);
gc::Mark(trc, &key, "WeakMap entry key");
if (key != e.front().key)
e.rekeyFront(key, key);
entryMoved(e, key);
}
}
@ -185,11 +186,11 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
if (markValue(trc, &e.front().value))
markedAny = true;
if (e.front().key != key)
e.rekeyFront(key);
entryMoved(e, key);
} else if (keyNeedsMark(key)) {
gc::Mark(trc, const_cast<Key *>(&key), "proxy-preserved WeakMap entry key");
if (e.front().key != key)
e.rekeyFront(key);
entryMoved(e, key);
gc::Mark(trc, &e.front().value, "WeakMap entry value");
markedAny = true;
}
@ -205,7 +206,7 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
if (gc::IsAboutToBeFinalized(&k))
e.removeFront();
else if (k != e.front().key)
e.rekeyFront(k, k);
entryMoved(e, k);
}
/*
* Once we've swept, all remaining edges should stay within the
@ -227,6 +228,16 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>, publ
}
}
/* Rekey an entry when moved, ensuring we do not trigger barriers. */
void entryMoved(Enum &eArg, const Key &k) {
typedef typename HashMap<typename Unbarriered<Key>::type,
typename Unbarriered<Value>::type,
typename Unbarriered<HashPolicy>::type,
RuntimeAllocPolicy>::Enum UnbarrieredEnum;
UnbarrieredEnum &e = reinterpret_cast<UnbarrieredEnum &>(eArg);
e.rekeyFront(reinterpret_cast<const typename Unbarriered<Key>::type &>(k));
}
protected:
void assertEntriesNotAboutToBeFinalized() {
#if DEBUG

Просмотреть файл

@ -1528,6 +1528,31 @@ js_IsDebugScopeSlow(ProxyObject *proxy)
/*****************************************************************************/
/* static */ JS_ALWAYS_INLINE void
DebugScopes::proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map,
const EncapsulatedPtr<JSObject> &key)
{
#ifdef JSGC_GENERATIONAL
/*
* Strip the barriers from the type before inserting into the store buffer.
* This will automatically ensure that barriers do not fire during GC.
*/
typedef WeakMap<JSObject *, JSObject *> UnbarrieredMap;
typedef gc::HashKeyRef<UnbarrieredMap, JSObject *> Ref;
if (key && IsInsideNursery(rt, key))
rt->gcStoreBuffer.putGeneric(Ref(reinterpret_cast<UnbarrieredMap *>(map), key.get()));
#endif
}
/* static */ JS_ALWAYS_INLINE void
DebugScopes::liveScopesPostWriteBarrier(JSRuntime *rt, LiveScopeMap *map, ScopeObject *key)
{
#ifdef JSGC_GENERATIONAL
if (key && IsInsideNursery(rt, key))
rt->gcStoreBuffer.putGeneric(gc::HashKeyRef<LiveScopeMap, ScopeObject *>(map, key));
#endif
}
DebugScopes::DebugScopes(JSContext *cx)
: proxiedScopes(cx),
missingScopes(cx->runtime()),
@ -1661,7 +1686,7 @@ DebugScopes::addDebugScope(JSContext *cx, ScopeObject &scope, DebugScopeObject &
return false;
}
HashTableWriteBarrierPost(cx->runtime(), &scopes->proxiedScopes, &scope);
proxiedScopesPostWriteBarrier(cx->runtime(), &scopes->proxiedScopes, &scope);
return true;
}
@ -1705,7 +1730,7 @@ DebugScopes::addDebugScope(JSContext *cx, const ScopeIter &si, DebugScopeObject
js_ReportOutOfMemory(cx);
return false;
}
HashTableWriteBarrierPost(cx->runtime(), &scopes->liveScopes, &debugScope.scope());
liveScopesPostWriteBarrier(cx->runtime(), &scopes->liveScopes, &debugScope.scope());
return true;
}
@ -1859,7 +1884,7 @@ DebugScopes::onGeneratorFrameChange(AbstractFramePtr from, AbstractFramePtr to,
livePtr->value = to;
} else {
scopes->liveScopes.add(livePtr, &toIter.scope(), to); // OOM here?
HashTableWriteBarrierPost(cx->runtime(), &scopes->liveScopes, &toIter.scope());
liveScopesPostWriteBarrier(cx->runtime(), &scopes->liveScopes, &toIter.scope());
}
} else {
ScopeIter si(toIter, from, cx);
@ -1921,7 +1946,7 @@ DebugScopes::updateLiveScopes(JSContext *cx)
return false;
if (!scopes->liveScopes.put(&si.scope(), frame))
return false;
HashTableWriteBarrierPost(cx->runtime(), &scopes->liveScopes, &si.scope());
liveScopesPostWriteBarrier(cx->runtime(), &scopes->liveScopes, &si.scope());
}
}

Просмотреть файл

@ -669,6 +669,8 @@ class DebugScopes
/* The map from (non-debug) scopes to debug scopes. */
typedef WeakMap<EncapsulatedPtrObject, RelocatablePtrObject> ObjectWeakMap;
ObjectWeakMap proxiedScopes;
static JS_ALWAYS_INLINE void proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map,
const EncapsulatedPtrObject &key);
/*
* The map from live frames which have optimized-away scopes to the
@ -692,6 +694,8 @@ class DebugScopes
DefaultHasher<ScopeObject *>,
RuntimeAllocPolicy> LiveScopeMap;
LiveScopeMap liveScopes;
static JS_ALWAYS_INLINE void liveScopesPostWriteBarrier(JSRuntime *rt, LiveScopeMap *map,
ScopeObject *key);
public:
DebugScopes(JSContext *c);