Bug 1581574 - Remove Heap<T> write barrier and rely on existing read barrier to make incremental marking work r=sfink

Differential Revision: https://phabricator.services.mozilla.com/D46133

--HG--
extra : moz-landing-system : lando
This commit is contained in:
Jon Coppeard 2019-09-20 10:09:19 +00:00
Родитель bc1b3fab70
Коммит ad4df9ae33
8 изменённых файлов: 75 добавлений и 58 удалений

Просмотреть файл

@ -79,17 +79,17 @@ template <class UncompiledT>
struct BarrierMethods<nsXBLMaybeCompiled<UncompiledT>> {
typedef struct BarrierMethods<JSObject*> Base;
static void writeBarriers(nsXBLMaybeCompiled<UncompiledT>* functionp,
nsXBLMaybeCompiled<UncompiledT> prev,
nsXBLMaybeCompiled<UncompiledT> next) {
static void postWriteBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp,
nsXBLMaybeCompiled<UncompiledT> prev,
nsXBLMaybeCompiled<UncompiledT> next) {
if (next.IsCompiled()) {
Base::writeBarriers(
Base::postWriteBarrier(
&functionp->UnsafeGetJSFunction(),
prev.IsCompiled() ? prev.UnsafeGetJSFunction() : nullptr,
next.UnsafeGetJSFunction());
} else if (prev.IsCompiled()) {
Base::writeBarriers(&prev.UnsafeGetJSFunction(),
prev.UnsafeGetJSFunction(), nullptr);
Base::postWriteBarrier(&prev.UnsafeGetJSFunction(),
prev.UnsafeGetJSFunction(), nullptr);
}
}
static void exposeToJS(nsXBLMaybeCompiled<UncompiledT> fun) {

Просмотреть файл

@ -201,13 +201,9 @@ struct BarrierMethods<jsid> {
}
return nullptr;
}
static void writeBarriers(jsid* idp, jsid prev, jsid next) {
if (JSID_IS_STRING(prev)) {
JS::IncrementalPreWriteBarrier(JS::GCCellPtr(JSID_TO_STRING(prev)));
}
if (JSID_IS_SYMBOL(prev)) {
JS::IncrementalPreWriteBarrier(JS::GCCellPtr(JSID_TO_SYMBOL(prev)));
}
static void postWriteBarrier(jsid* idp, jsid prev, jsid next) {
MOZ_ASSERT_IF(JSID_IS_STRING(next),
!gc::IsInsideNursery(JSID_TO_STRING(next)));
}
static void exposeToJS(jsid id) {
if (JSID_IS_GCTHING(id)) {

Просмотреть файл

@ -209,6 +209,10 @@ class Rooted;
template <typename T>
class PersistentRooted;
JS_FRIEND_API void HeapObjectPostWriteBarrier(JSObject** objp, JSObject* prev,
JSObject* next);
JS_FRIEND_API void HeapStringPostWriteBarrier(JSString** objp, JSString* prev,
JSString* next);
JS_FRIEND_API void HeapObjectWriteBarriers(JSObject** objp, JSObject* prev,
JSObject* next);
JS_FRIEND_API void HeapStringWriteBarriers(JSString** objp, JSString* prev,
@ -274,9 +278,13 @@ inline void AssertGCThingIsNotNurseryAllocable(js::gc::Cell* cell) {}
*
* Heap<T> implements the following barriers:
*
* - Pre-write barrier (necessary for incremental GC).
* - Post-write barrier (necessary for generational GC).
* - Read barrier (necessary for cycle collector integration).
* - Read barrier (necessary for incremental GC and cycle collector
* integration).
*
* Note Heap<T> does not have a pre-write barrier as used internally in the
* engine. The read barrier is used to mark anything read from a Heap<T> during
* an incremental GC.
*
* Heap<T> may be moved or destroyed outside of GC finalization and hence may be
* used in dynamic storage such as a Vector.
@ -314,7 +322,7 @@ class MOZ_NON_MEMMOVABLE Heap : public js::HeapBase<T, Heap<T>> {
*/
explicit Heap(const Heap<T>& p) { init(p.ptr); }
~Heap() { writeBarriers(ptr, SafelyInitialized<T>()); }
~Heap() { postWriteBarrier(ptr, SafelyInitialized<T>()); }
DECLARE_POINTER_CONSTREF_OPS(T);
DECLARE_POINTER_ASSIGN_OPS(Heap, T);
@ -342,17 +350,17 @@ class MOZ_NON_MEMMOVABLE Heap : public js::HeapBase<T, Heap<T>> {
private:
void init(const T& newPtr) {
ptr = newPtr;
writeBarriers(SafelyInitialized<T>(), ptr);
postWriteBarrier(SafelyInitialized<T>(), ptr);
}
void set(const T& newPtr) {
T tmp = ptr;
ptr = newPtr;
writeBarriers(tmp, ptr);
postWriteBarrier(tmp, ptr);
}
void writeBarriers(const T& prev, const T& next) {
js::BarrierMethods<T>::writeBarriers(&ptr, prev, next);
void postWriteBarrier(const T& prev, const T& next) {
js::BarrierMethods<T>::postWriteBarrier(&ptr, prev, next);
}
T ptr;
@ -446,7 +454,6 @@ class TenuredHeap : public js::HeapBase<T, TenuredHeap<T>> {
explicit TenuredHeap(const TenuredHeap<T>& p) : bits(0) {
setPtr(p.getPtr());
}
~TenuredHeap() { pre(); }
void setPtr(T newPtr) {
MOZ_ASSERT((reinterpret_cast<uintptr_t>(newPtr) & flagsMask) == 0);
@ -454,11 +461,6 @@ class TenuredHeap : public js::HeapBase<T, TenuredHeap<T>> {
if (newPtr) {
AssertGCThingMustBeTenured(newPtr);
}
pre();
unbarrieredSetPtr(newPtr);
}
void unbarrieredSetPtr(T newPtr) {
bits = (bits & flagsMask) | reinterpret_cast<uintptr_t>(newPtr);
}
@ -514,12 +516,6 @@ class TenuredHeap : public js::HeapBase<T, TenuredHeap<T>> {
flagsMask = (1 << maskBits) - 1,
};
void pre() {
if (T prev = unbarrieredGetPtr()) {
JS::IncrementalPreWriteBarrier(JS::GCCellPtr(prev));
}
}
uintptr_t bits;
};
@ -703,10 +699,7 @@ struct PtrBarrierMethodsBase {
template <typename T>
struct BarrierMethods<T*> : public detail::PtrBarrierMethodsBase<T> {
static void writeBarriers(T** vp, T* prev, T* next) {
if (prev) {
JS::IncrementalPreWriteBarrier(JS::GCCellPtr(prev));
}
static void postWriteBarrier(T** vp, T* prev, T* next) {
if (next) {
JS::AssertGCThingIsNotNurseryAllocable(
reinterpret_cast<js::gc::Cell*>(next));
@ -717,8 +710,8 @@ struct BarrierMethods<T*> : public detail::PtrBarrierMethodsBase<T> {
template <>
struct BarrierMethods<JSObject*>
: public detail::PtrBarrierMethodsBase<JSObject> {
static void writeBarriers(JSObject** vp, JSObject* prev, JSObject* next) {
JS::HeapObjectWriteBarriers(vp, prev, next);
static void postWriteBarrier(JSObject** vp, JSObject* prev, JSObject* next) {
JS::HeapObjectPostWriteBarrier(vp, prev, next);
}
static void exposeToJS(JSObject* obj) {
if (obj) {
@ -730,11 +723,11 @@ struct BarrierMethods<JSObject*>
template <>
struct BarrierMethods<JSFunction*>
: public detail::PtrBarrierMethodsBase<JSFunction> {
static void writeBarriers(JSFunction** vp, JSFunction* prev,
JSFunction* next) {
JS::HeapObjectWriteBarriers(reinterpret_cast<JSObject**>(vp),
reinterpret_cast<JSObject*>(prev),
reinterpret_cast<JSObject*>(next));
static void postWriteBarrier(JSFunction** vp, JSFunction* prev,
JSFunction* next) {
JS::HeapObjectPostWriteBarrier(reinterpret_cast<JSObject**>(vp),
reinterpret_cast<JSObject*>(prev),
reinterpret_cast<JSObject*>(next));
}
static void exposeToJS(JSFunction* fun) {
if (fun) {
@ -746,16 +739,8 @@ struct BarrierMethods<JSFunction*>
template <>
struct BarrierMethods<JSString*>
: public detail::PtrBarrierMethodsBase<JSString> {
static void writeBarriers(JSString** vp, JSString* prev, JSString* next) {
JS::HeapStringWriteBarriers(vp, prev, next);
}
};
template <>
struct BarrierMethods<JSScript*>
: public detail::PtrBarrierMethodsBase<JSScript> {
static void writeBarriers(JSScript** vp, JSScript* prev, JSScript* next) {
JS::HeapScriptWriteBarriers(vp, prev, next);
static void postWriteBarrier(JSString** vp, JSString* prev, JSString* next) {
JS::HeapStringPostWriteBarrier(vp, prev, next);
}
};

Просмотреть файл

@ -419,7 +419,7 @@ inline void TraceEdge(JSTracer* trc, JS::TenuredHeap<T>* thingp,
MOZ_ASSERT(thingp);
if (T ptr = thingp->unbarrieredGetPtr()) {
js::gc::TraceExternalEdge(trc, &ptr, name);
thingp->unbarrieredSetPtr(ptr);
thingp->setPtr(ptr);
}
}

Просмотреть файл

@ -1121,6 +1121,8 @@ inline bool SameType(const Value& lhs, const Value& rhs) {
/************************************************************************/
namespace JS {
JS_PUBLIC_API void HeapValuePostWriteBarrier(Value* valuep, const Value& prev,
const Value& next);
JS_PUBLIC_API void HeapValueWriteBarriers(Value* valuep, const Value& prev,
const Value& next);
@ -1146,9 +1148,9 @@ struct BarrierMethods<JS::Value> {
static gc::Cell* asGCThingOrNull(const JS::Value& v) {
return v.isGCThing() ? v.toGCThing() : nullptr;
}
static void writeBarriers(JS::Value* v, const JS::Value& prev,
const JS::Value& next) {
JS::HeapValueWriteBarriers(v, prev, next);
static void postWriteBarrier(JS::Value* v, const JS::Value& prev,
const JS::Value& next) {
JS::HeapValuePostWriteBarrier(v, prev, next);
}
static void exposeToJS(const JS::Value& v) { JS::ExposeValueToActiveJS(v); }
};

Просмотреть файл

@ -34,6 +34,14 @@ pub unsafe trait Trace {
* C/C++ stack must use Rooted/Handle/MutableHandle instead.
*
* Type T must be a public GC pointer type.
*
* Note that the rust version of Heap<T> implements different barriers to the
* C++ version, which also provides features to help integration with
* cycle-collected C++ objects. That version has a read barrier which performs
* gray unmarking and also marks the contents during an incremental GC. This
* version has a pre-write barrier instead, and this enforces the
* snapshot-at-the-beginning invariant which is necessary for incremental GC in
* the absence of the read barrier.
*/
#[repr(C)]
#[derive(Debug)]

Просмотреть файл

@ -191,6 +191,32 @@ template struct JS_PUBLIC_API MovableCellHasher<WasmInstanceObject*>;
} // namespace js
// Post-write barrier, used by the C++ Heap<T> implementation.
JS_PUBLIC_API void JS::HeapObjectPostWriteBarrier(JSObject** objp,
JSObject* prev,
JSObject* next) {
MOZ_ASSERT(objp);
js::InternalBarrierMethods<JSObject*>::postBarrier(objp, prev, next);
}
JS_PUBLIC_API void JS::HeapStringPostWriteBarrier(JSString** strp,
JSString* prev,
JSString* next) {
MOZ_ASSERT(strp);
js::InternalBarrierMethods<JSString*>::postBarrier(strp, prev, next);
}
JS_PUBLIC_API void JS::HeapValuePostWriteBarrier(JS::Value* valuep,
const Value& prev,
const Value& next) {
MOZ_ASSERT(valuep);
js::InternalBarrierMethods<JS::Value>::postBarrier(valuep, prev, next);
}
// Combined pre- and post-write barriers, used by the rust Heap<T>
// implementation.
JS_PUBLIC_API void JS::HeapObjectWriteBarriers(JSObject** objp, JSObject* prev,
JSObject* next) {
MOZ_ASSERT(objp);

Просмотреть файл

@ -284,7 +284,7 @@
*
* Barriers for use outside of the JS engine call into the same barrier
* implementations at InternalBarrierMethods<T>::post via an indirect call to
* Heap(.+)WriteBarriers.
* Heap(.+)PostWriteBarrier.
*
* These clases are designed to be used to wrap GC thing pointers or values that
* act like them (i.e. JS::Value and jsid). It is possible to use them for