From fb11d9f49678cc0e408e6c2a57cdaf5b1805b1cd Mon Sep 17 00:00:00 2001 From: Jon Coppeard Date: Wed, 2 Sep 2020 19:32:25 +0000 Subject: [PATCH] Bug 1662502 - Rename some methods on GC wrapped pointers to 'unbarriered' rather than 'unsafe' r=sfink This should clarify why they are unsafe. Also rename the overlong 'unsafeUnbarrieredForTracing' to 'unbarrieredAddress' to be more like Rooted::address and since it is used outside of tracing in a couple of places. Differential Revision: https://phabricator.services.mozilla.com/D89012 --- js/src/builtin/MapObject.cpp | 2 +- js/src/builtin/MapObject.h | 2 +- js/src/builtin/TypedObject.h | 4 +--- js/src/gc/Barrier.h | 18 ++++++++++-------- js/src/gc/Cell.h | 4 ++-- js/src/gc/Marking.cpp | 11 +++++------ js/src/gc/Marking.h | 7 +++---- js/src/gc/Tracer.h | 25 ++++++++++++------------- js/src/vm/GlobalObject.h | 2 +- js/src/vm/JSFunction-inl.h | 5 ++--- js/src/vm/ObjectGroup.cpp | 5 +++-- js/src/vm/Realm-inl.h | 4 ++-- js/src/vm/Realm.cpp | 9 ++++----- js/src/vm/Realm.h | 9 ++++----- js/src/vm/Shape-inl.h | 2 +- 15 files changed, 52 insertions(+), 57 deletions(-) diff --git a/js/src/builtin/MapObject.cpp b/js/src/builtin/MapObject.cpp index e546a4a143a1..0a2f31486807 100644 --- a/js/src/builtin/MapObject.cpp +++ b/js/src/builtin/MapObject.cpp @@ -470,7 +470,7 @@ static void TraceKey(Range& r, const HashableValue& key, JSTracer* trc) { } // Clear newKey to avoid the barrier in ~PreBarriered. - newKey.unsafeClear(); + newKey.unbarrieredClear(); } void MapObject::trace(JSTracer* trc, JSObject* obj) { diff --git a/js/src/builtin/MapObject.h b/js/src/builtin/MapObject.h index 7c474dc69a12..246dd84123a6 100644 --- a/js/src/builtin/MapObject.h +++ b/js/src/builtin/MapObject.h @@ -58,7 +58,7 @@ class HashableValue { void trace(JSTracer* trc) { TraceEdge(trc, &value, "HashableValue"); } // Clear the value without invoking the pre-barrier. - void unsafeClear() { value.unsafeSet(UndefinedValue()); } + void unbarrieredClear() { value.unbarrieredSet(UndefinedValue()); } }; template diff --git a/js/src/builtin/TypedObject.h b/js/src/builtin/TypedObject.h index 94adb56323d4..b9e1cea91c09 100644 --- a/js/src/builtin/TypedObject.h +++ b/js/src/builtin/TypedObject.h @@ -635,9 +635,7 @@ class TypedObject : public JSObject { // callee here is the type descriptor. static MOZ_MUST_USE bool construct(JSContext* cx, unsigned argc, Value* vp); - Shape** addressOfShapeFromGC() { - return shape_.unsafeUnbarrieredForTracing(); - } + Shape** addressOfShapeFromGC() { return shape_.unbarrieredAddress(); } }; using HandleTypedObject = Handle; diff --git a/js/src/gc/Barrier.h b/js/src/gc/Barrier.h index 17c331090cbe..fd8345c60a10 100644 --- a/js/src/gc/Barrier.h +++ b/js/src/gc/Barrier.h @@ -324,7 +324,7 @@ struct InternalBarrierMethods {}; template struct InternalBarrierMethods { - static bool isMarkable(T* v) { return v != nullptr; } + static bool isMarkable(const T* v) { return v != nullptr; } static void preBarrier(T* v) { T::writeBarrierPre(v); } @@ -426,7 +426,7 @@ class MOZ_NON_MEMMOVABLE BarrieredBase { // instantiation. Friending to the generic template leads to a number of // unintended consequences, including template resolution ambiguity and a // circular dependency with Tracing.h. - T* unsafeUnbarrieredForTracing() const { return const_cast(&value); } + T* unbarrieredAddress() const { return const_cast(&value); } }; // Base class for barriered pointer types that intercept only writes. @@ -447,7 +447,7 @@ class WriteBarriered : public BarrieredBase, // Use this if you want to change the value without invoking barriers. // Obviously this is dangerous unless you know the barrier is not needed. - void unsafeSet(const T& v) { this->value = v; } + void unbarrieredSet(const T& v) { this->value = v; } // For users who need to manually barrier the raw types. static void writeBarrierPre(const T& v) { @@ -798,14 +798,16 @@ class WeakHeapPtr : public ReadBarriered, const T& operator->() const { return get(); } - T* unsafeGet() { return &this->value; } - T const* unsafeGet() const { return &this->value; } - void set(const T& v) { AssertTargetIsNotGray(v); setUnchecked(v); } + void unbarrieredSet(const T& v) { + AssertTargetIsNotGray(v); + this->value = v; + } + private: void setUnchecked(const T& v) { T tmp = this->value; @@ -1078,7 +1080,7 @@ struct HeapPtrHasher { static HashNumber hash(Lookup obj) { return DefaultHasher::hash(obj); } static bool match(const Key& k, Lookup l) { return k.get() == l; } - static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); } + static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); } }; template @@ -1088,7 +1090,7 @@ struct PreBarrieredHasher { static HashNumber hash(Lookup obj) { return DefaultHasher::hash(obj); } static bool match(const Key& k, Lookup l) { return k.get() == l; } - static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); } + static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); } }; /* Useful for hashtables with a WeakHeapPtr as key. */ diff --git a/js/src/gc/Cell.h b/js/src/gc/Cell.h index 65f86c021fe7..f3d6ba0d5e3f 100644 --- a/js/src/gc/Cell.h +++ b/js/src/gc/Cell.h @@ -730,7 +730,7 @@ class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell { // As above, no flags are expected to be set here. MOZ_ASSERT(!IsInsideNursery(newValue)); PtrT::writeBarrierPre(headerPtr()); - unsafeSetHeaderPtr(newValue); + unbarrieredSetHeaderPtr(newValue); } public: @@ -744,7 +744,7 @@ class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell { return reinterpret_cast(this->header_); } - void unsafeSetHeaderPtr(PtrT* newValue) { + void unbarrieredSetHeaderPtr(PtrT* newValue) { uintptr_t data = uintptr_t(newValue); MOZ_ASSERT(this->flags() == 0); MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0); diff --git a/js/src/gc/Marking.cpp b/js/src/gc/Marking.cpp index 97cf72c01dcf..b7d80fe1ff65 100644 --- a/js/src/gc/Marking.cpp +++ b/js/src/gc/Marking.cpp @@ -2999,7 +2999,7 @@ void js::gc::StoreBuffer::SlotsEdge::trace(TenuringTracer& mover) const { MOZ_ASSERT(clampedStart <= clampedEnd); mover.traceSlots( static_cast(obj->getDenseElements() + clampedStart) - ->unsafeUnbarrieredForTracing(), + ->unbarrieredAddress(), clampedEnd - clampedStart); } else { uint32_t start = std::min(start_, obj->slotSpan()); @@ -3211,7 +3211,7 @@ void js::TenuringTracer::traceObject(JSObject* obj) { if (!nobj->hasEmptyElements() && !nobj->denseElementsAreCopyOnWrite() && ObjectDenseElementsMayBeMarkable(nobj)) { HeapSlotArray elements = nobj->getDenseElements(); - Value* elems = elements.begin()->unsafeUnbarrieredForTracing(); + Value* elems = elements.begin()->unbarrieredAddress(); traceSlots(elems, elems + nobj->getDenseInitializedLength()); } @@ -3226,12 +3226,11 @@ void js::TenuringTracer::traceObjectSlots(NativeObject* nobj, uint32_t start, HeapSlot* dynEnd; nobj->getSlotRange(start, length, &fixedStart, &fixedEnd, &dynStart, &dynEnd); if (fixedStart) { - traceSlots(fixedStart->unsafeUnbarrieredForTracing(), - fixedEnd->unsafeUnbarrieredForTracing()); + traceSlots(fixedStart->unbarrieredAddress(), + fixedEnd->unbarrieredAddress()); } if (dynStart) { - traceSlots(dynStart->unsafeUnbarrieredForTracing(), - dynEnd->unsafeUnbarrieredForTracing()); + traceSlots(dynStart->unbarrieredAddress(), dynEnd->unbarrieredAddress()); } } diff --git a/js/src/gc/Marking.h b/js/src/gc/Marking.h index 8e509a37879a..ec9b04d1608f 100644 --- a/js/src/gc/Marking.h +++ b/js/src/gc/Marking.h @@ -74,8 +74,7 @@ inline bool IsMarkedUnbarriered(JSRuntime* rt, T* thingp) { // are always reported as being marked. template inline bool IsMarked(JSRuntime* rt, BarrieredBase* thingp) { - return IsMarkedInternal(rt, - ConvertToBase(thingp->unsafeUnbarrieredForTracing())); + return IsMarkedInternal(rt, ConvertToBase(thingp->unbarrieredAddress())); } template @@ -86,13 +85,13 @@ inline bool IsAboutToBeFinalizedUnbarriered(T* thingp) { template inline bool IsAboutToBeFinalized(const WriteBarriered* thingp) { return IsAboutToBeFinalizedInternal( - ConvertToBase(thingp->unsafeUnbarrieredForTracing())); + ConvertToBase(thingp->unbarrieredAddress())); } template inline bool IsAboutToBeFinalized(ReadBarriered* thingp) { return IsAboutToBeFinalizedInternal( - ConvertToBase(thingp->unsafeUnbarrieredForTracing())); + ConvertToBase(thingp->unbarrieredAddress())); } inline bool IsAboutToBeFinalizedDuringMinorSweep(Cell* cell); diff --git a/js/src/gc/Tracer.h b/js/src/gc/Tracer.h index 86d4c8fd9555..cc2e7ade0a05 100644 --- a/js/src/gc/Tracer.h +++ b/js/src/gc/Tracer.h @@ -122,13 +122,13 @@ inline void AssertRootMarkingPhase(JSTracer* trc) {} template inline void TraceEdge(JSTracer* trc, const WriteBarriered* thingp, const char* name) { - gc::TraceEdgeInternal( - trc, gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name); + gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unbarrieredAddress()), + name); } template inline void TraceEdge(JSTracer* trc, WeakHeapPtr* thingp, const char* name) { - gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unsafeGet()), name); + gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unbarrieredAddress()), name); } template @@ -138,7 +138,7 @@ inline void TraceCellHeaderEdge(JSTracer* trc, T* thing = thingp->headerPtr(); gc::TraceEdgeInternal(trc, gc::ConvertToBase(&thing), name); if (thing != thingp->headerPtr()) { - thingp->unsafeSetHeaderPtr(thing); + thingp->unbarrieredSetHeaderPtr(thing); } } @@ -169,7 +169,7 @@ inline void TraceNullableCellHeaderEdge( if (thing) { gc::TraceEdgeInternal(trc, gc::ConvertToBase(&thing), name); if (thing != thingp->headerPtr()) { - thingp->unsafeSetHeaderPtr(thing); + thingp->unbarrieredSetHeaderPtr(thing); } } } @@ -186,7 +186,7 @@ inline void TraceRoot(JSTracer* trc, T* thingp, const char* name) { template inline void TraceRoot(JSTracer* trc, WeakHeapPtr* thingp, const char* name) { - TraceRoot(trc, thingp->unsafeGet(), name); + TraceRoot(trc, thingp->unbarrieredAddress(), name); } // Idential to TraceRoot, except that this variant will not crash if |*thingp| @@ -203,7 +203,7 @@ inline void TraceNullableRoot(JSTracer* trc, T* thingp, const char* name) { template inline void TraceNullableRoot(JSTracer* trc, WeakHeapPtr* thingp, const char* name) { - TraceNullableRoot(trc, thingp->unsafeGet(), name); + TraceNullableRoot(trc, thingp->unbarrieredAddress(), name); } // Like TraceEdge, but for edges that do not use one of the automatic barrier @@ -229,7 +229,7 @@ template inline bool TraceWeakEdge(JSTracer* trc, BarrieredBase* thingp, const char* name) { return gc::TraceEdgeInternal( - trc, gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name); + trc, gc::ConvertToBase(thingp->unbarrieredAddress()), name); } // Trace all edges contained in the given array. @@ -237,8 +237,8 @@ inline bool TraceWeakEdge(JSTracer* trc, BarrieredBase* thingp, template void TraceRange(JSTracer* trc, size_t len, BarrieredBase* vec, const char* name) { - gc::TraceRangeInternal( - trc, len, gc::ConvertToBase(vec[0].unsafeUnbarrieredForTracing()), name); + gc::TraceRangeInternal(trc, len, + gc::ConvertToBase(vec[0].unbarrieredAddress()), name); } // Trace all root edges in the given array. @@ -260,7 +260,7 @@ template void TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src, const WriteBarriered* dst, const char* name) { TraceManuallyBarrieredCrossCompartmentEdge( - trc, src, gc::ConvertToBase(dst->unsafeUnbarrieredForTracing()), name); + trc, src, gc::ConvertToBase(dst->unbarrieredAddress()), name); } // Trace a weak map key. For debugger weak maps these may be cross compartment, @@ -274,8 +274,7 @@ inline void TraceWeakMapKeyEdge(JSTracer* trc, Zone* weakMapZone, const WriteBarriered* thingp, const char* name) { TraceWeakMapKeyEdgeInternal( - trc, weakMapZone, - gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name); + trc, weakMapZone, gc::ConvertToBase(thingp->unbarrieredAddress()), name); } // Permanent atoms and well-known symbols are shared between runtimes and must diff --git a/js/src/vm/GlobalObject.h b/js/src/vm/GlobalObject.h index c81a9d29b36a..9c96071911c3 100644 --- a/js/src/vm/GlobalObject.h +++ b/js/src/vm/GlobalObject.h @@ -926,7 +926,7 @@ class GlobalObject : public NativeObject { } void clearSourceURLSHolder() { // This is called at the start of shrinking GCs, so avoids barriers. - getSlotRef(SOURCE_URLS).unsafeSet(UndefinedValue()); + getSlotRef(SOURCE_URLS).unbarrieredSet(UndefinedValue()); } // A class used in place of a prototype during off-thread parsing. diff --git a/js/src/vm/JSFunction-inl.h b/js/src/vm/JSFunction-inl.h index 9ebf296e8a5e..29ead97f6fa2 100644 --- a/js/src/vm/JSFunction-inl.h +++ b/js/src/vm/JSFunction-inl.h @@ -123,13 +123,12 @@ inline JSFunction* CloneFunctionObjectIfNotSingleton( // value to which we could sensibly initialize this. MOZ_MAKE_MEM_UNDEFINED(&fun->u, sizeof(u)); - // Safe: we're initializing for the very first time. - fun->atom_.unsafeSet(nullptr); + fun->atom_.init(nullptr); if (kind == js::gc::AllocKind::FUNCTION_EXTENDED) { fun->setFlags(FunctionFlags::EXTENDED); for (js::GCPtrValue& extendedSlot : fun->toExtended()->extendedSlots) { - extendedSlot.unsafeSet(JS::UndefinedValue()); + extendedSlot.init(JS::UndefinedValue()); } } else { fun->setFlags(0); diff --git a/js/src/vm/ObjectGroup.cpp b/js/src/vm/ObjectGroup.cpp index 1ff58e9a9aa4..d350726ebbe6 100644 --- a/js/src/vm/ObjectGroup.cpp +++ b/js/src/vm/ObjectGroup.cpp @@ -1348,8 +1348,9 @@ struct ObjectGroupRealm::AllocationSiteKey { } bool needsSweep() { - return IsAboutToBeFinalizedUnbarriered(script.unsafeGet()) || - (proto && IsAboutToBeFinalizedUnbarriered(proto.unsafeGet())); + return IsAboutToBeFinalizedUnbarriered(script.unbarrieredAddress()) || + (proto && + IsAboutToBeFinalizedUnbarriered(proto.unbarrieredAddress())); } bool operator==(const AllocationSiteKey& other) const { diff --git a/js/src/vm/Realm-inl.h b/js/src/vm/Realm-inl.h index 4672d976ad68..c9a4aec4e332 100644 --- a/js/src/vm/Realm-inl.h +++ b/js/src/vm/Realm-inl.h @@ -31,13 +31,13 @@ js::GlobalObject* JS::Realm::maybeGlobal() const { } js::LexicalEnvironmentObject* JS::Realm::unbarrieredLexicalEnvironment() const { - return *lexicalEnv_.unsafeGet(); + return lexicalEnv_.unbarrieredGet(); } inline bool JS::Realm::globalIsAboutToBeFinalized() { MOZ_ASSERT(zone_->isGCSweeping()); return global_ && - js::gc::IsAboutToBeFinalizedUnbarriered(global_.unsafeGet()); + js::gc::IsAboutToBeFinalizedUnbarriered(global_.unbarrieredAddress()); } inline bool JS::Realm::hasLiveGlobal() const { diff --git a/js/src/vm/Realm.cpp b/js/src/vm/Realm.cpp index 91b220070d3a..550268aee580 100644 --- a/js/src/vm/Realm.cpp +++ b/js/src/vm/Realm.cpp @@ -312,9 +312,8 @@ void Realm::traceRoots(JSTracer* trc, // // If a realm is on-stack, we mark its global so that // JSContext::global() remains valid. - if (shouldTraceGlobal() && global_.unbarrieredGet()) { - TraceRoot(trc, global_.unsafeUnbarrieredForTracing(), - "on-stack realm global"); + if (shouldTraceGlobal() && global_) { + TraceRoot(trc, global_.unbarrieredAddress(), "on-stack realm global"); } } @@ -453,9 +452,9 @@ void Realm::fixupAfterMovingGC(JSTracer* trc) { } void Realm::fixupGlobal() { - GlobalObject* global = *global_.unsafeGet(); + GlobalObject* global = global_.unbarrieredGet(); if (global) { - global_.set(MaybeForwarded(global)); + global_.unbarrieredSet(MaybeForwarded(global)); } } diff --git a/js/src/vm/Realm.h b/js/src/vm/Realm.h index b8f0d1a1d649..963fd29eaee6 100644 --- a/js/src/vm/Realm.h +++ b/js/src/vm/Realm.h @@ -836,14 +836,13 @@ class JS::Realm : public JS::shadow::Realm { inline js::Handle JSContext::global() const { /* - * It's safe to use |unsafeGet()| here because any realm that is - * on-stack will be marked automatically, so there's no need for a read - * barrier on it. Once the realm is popped, the handle is no longer - * safe to use. + * It's safe to use |unbarrieredGet()| here because any realm that is on-stack + * will be marked automatically, so there's no need for a read barrier on + * it. Once the realm is popped, the handle is no longer safe to use. */ MOZ_ASSERT(realm_, "Caller needs to enter a realm first"); return js::Handle::fromMarkedLocation( - realm_->global_.unsafeGet()); + realm_->global_.unbarrieredAddress()); } namespace js { diff --git a/js/src/vm/Shape-inl.h b/js/src/vm/Shape-inl.h index ee5dcc6b9567..dca89b730910 100644 --- a/js/src/vm/Shape-inl.h +++ b/js/src/vm/Shape-inl.h @@ -127,7 +127,7 @@ inline Shape* Shape::new_(JSContext* cx, Handle other, inline void Shape::updateBaseShapeAfterMovingGC() { BaseShape* base = this->base(); if (IsForwarded(base)) { - unsafeSetHeaderPtr(Forwarded(base)); + unbarrieredSetHeaderPtr(Forwarded(base)); } }