Bug 1662502 - Rename some methods on GC wrapped pointers to 'unbarriered' rather than 'unsafe' r=sfink

This should clarify why they are unsafe.

Also rename the overlong 'unsafeUnbarrieredForTracing' to 'unbarrieredAddress'
to be more like Rooted::address and since it is used outside of tracing in a
couple of places.

Differential Revision: https://phabricator.services.mozilla.com/D89012
This commit is contained in:
Jon Coppeard 2020-09-02 19:32:25 +00:00
Родитель 49959262e5
Коммит fb11d9f496
15 изменённых файлов: 52 добавлений и 57 удалений

Просмотреть файл

@ -470,7 +470,7 @@ static void TraceKey(Range& r, const HashableValue& key, JSTracer* trc) {
} }
// Clear newKey to avoid the barrier in ~PreBarriered. // Clear newKey to avoid the barrier in ~PreBarriered.
newKey.unsafeClear(); newKey.unbarrieredClear();
} }
void MapObject::trace(JSTracer* trc, JSObject* obj) { void MapObject::trace(JSTracer* trc, JSObject* obj) {

Просмотреть файл

@ -58,7 +58,7 @@ class HashableValue {
void trace(JSTracer* trc) { TraceEdge(trc, &value, "HashableValue"); } void trace(JSTracer* trc) { TraceEdge(trc, &value, "HashableValue"); }
// Clear the value without invoking the pre-barrier. // Clear the value without invoking the pre-barrier.
void unsafeClear() { value.unsafeSet(UndefinedValue()); } void unbarrieredClear() { value.unbarrieredSet(UndefinedValue()); }
}; };
template <typename Wrapper> template <typename Wrapper>

Просмотреть файл

@ -635,9 +635,7 @@ class TypedObject : public JSObject {
// callee here is the type descriptor. // callee here is the type descriptor.
static MOZ_MUST_USE bool construct(JSContext* cx, unsigned argc, Value* vp); static MOZ_MUST_USE bool construct(JSContext* cx, unsigned argc, Value* vp);
Shape** addressOfShapeFromGC() { Shape** addressOfShapeFromGC() { return shape_.unbarrieredAddress(); }
return shape_.unsafeUnbarrieredForTracing();
}
}; };
using HandleTypedObject = Handle<TypedObject*>; using HandleTypedObject = Handle<TypedObject*>;

Просмотреть файл

@ -324,7 +324,7 @@ struct InternalBarrierMethods {};
template <typename T> template <typename T>
struct InternalBarrierMethods<T*> { struct InternalBarrierMethods<T*> {
static bool isMarkable(T* v) { return v != nullptr; } static bool isMarkable(const T* v) { return v != nullptr; }
static void preBarrier(T* v) { T::writeBarrierPre(v); } static void preBarrier(T* v) { T::writeBarrierPre(v); }
@ -426,7 +426,7 @@ class MOZ_NON_MEMMOVABLE BarrieredBase {
// instantiation. Friending to the generic template leads to a number of // instantiation. Friending to the generic template leads to a number of
// unintended consequences, including template resolution ambiguity and a // unintended consequences, including template resolution ambiguity and a
// circular dependency with Tracing.h. // circular dependency with Tracing.h.
T* unsafeUnbarrieredForTracing() const { return const_cast<T*>(&value); } T* unbarrieredAddress() const { return const_cast<T*>(&value); }
}; };
// Base class for barriered pointer types that intercept only writes. // Base class for barriered pointer types that intercept only writes.
@ -447,7 +447,7 @@ class WriteBarriered : public BarrieredBase<T>,
// Use this if you want to change the value without invoking barriers. // Use this if you want to change the value without invoking barriers.
// Obviously this is dangerous unless you know the barrier is not needed. // Obviously this is dangerous unless you know the barrier is not needed.
void unsafeSet(const T& v) { this->value = v; } void unbarrieredSet(const T& v) { this->value = v; }
// For users who need to manually barrier the raw types. // For users who need to manually barrier the raw types.
static void writeBarrierPre(const T& v) { static void writeBarrierPre(const T& v) {
@ -798,14 +798,16 @@ class WeakHeapPtr : public ReadBarriered<T>,
const T& operator->() const { return get(); } const T& operator->() const { return get(); }
T* unsafeGet() { return &this->value; }
T const* unsafeGet() const { return &this->value; }
void set(const T& v) { void set(const T& v) {
AssertTargetIsNotGray(v); AssertTargetIsNotGray(v);
setUnchecked(v); setUnchecked(v);
} }
void unbarrieredSet(const T& v) {
AssertTargetIsNotGray(v);
this->value = v;
}
private: private:
void setUnchecked(const T& v) { void setUnchecked(const T& v) {
T tmp = this->value; T tmp = this->value;
@ -1078,7 +1080,7 @@ struct HeapPtrHasher {
static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); } static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
static bool match(const Key& k, Lookup l) { return k.get() == l; } static bool match(const Key& k, Lookup l) { return k.get() == l; }
static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); } static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); }
}; };
template <class T> template <class T>
@ -1088,7 +1090,7 @@ struct PreBarrieredHasher {
static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); } static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
static bool match(const Key& k, Lookup l) { return k.get() == l; } static bool match(const Key& k, Lookup l) { return k.get() == l; }
static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); } static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); }
}; };
/* Useful for hashtables with a WeakHeapPtr as key. */ /* Useful for hashtables with a WeakHeapPtr as key. */

Просмотреть файл

@ -730,7 +730,7 @@ class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell {
// As above, no flags are expected to be set here. // As above, no flags are expected to be set here.
MOZ_ASSERT(!IsInsideNursery(newValue)); MOZ_ASSERT(!IsInsideNursery(newValue));
PtrT::writeBarrierPre(headerPtr()); PtrT::writeBarrierPre(headerPtr());
unsafeSetHeaderPtr(newValue); unbarrieredSetHeaderPtr(newValue);
} }
public: public:
@ -744,7 +744,7 @@ class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell {
return reinterpret_cast<PtrT*>(this->header_); return reinterpret_cast<PtrT*>(this->header_);
} }
void unsafeSetHeaderPtr(PtrT* newValue) { void unbarrieredSetHeaderPtr(PtrT* newValue) {
uintptr_t data = uintptr_t(newValue); uintptr_t data = uintptr_t(newValue);
MOZ_ASSERT(this->flags() == 0); MOZ_ASSERT(this->flags() == 0);
MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0); MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0);

Просмотреть файл

@ -2999,7 +2999,7 @@ void js::gc::StoreBuffer::SlotsEdge::trace(TenuringTracer& mover) const {
MOZ_ASSERT(clampedStart <= clampedEnd); MOZ_ASSERT(clampedStart <= clampedEnd);
mover.traceSlots( mover.traceSlots(
static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart) static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart)
->unsafeUnbarrieredForTracing(), ->unbarrieredAddress(),
clampedEnd - clampedStart); clampedEnd - clampedStart);
} else { } else {
uint32_t start = std::min(start_, obj->slotSpan()); uint32_t start = std::min(start_, obj->slotSpan());
@ -3211,7 +3211,7 @@ void js::TenuringTracer::traceObject(JSObject* obj) {
if (!nobj->hasEmptyElements() && !nobj->denseElementsAreCopyOnWrite() && if (!nobj->hasEmptyElements() && !nobj->denseElementsAreCopyOnWrite() &&
ObjectDenseElementsMayBeMarkable(nobj)) { ObjectDenseElementsMayBeMarkable(nobj)) {
HeapSlotArray elements = nobj->getDenseElements(); HeapSlotArray elements = nobj->getDenseElements();
Value* elems = elements.begin()->unsafeUnbarrieredForTracing(); Value* elems = elements.begin()->unbarrieredAddress();
traceSlots(elems, elems + nobj->getDenseInitializedLength()); traceSlots(elems, elems + nobj->getDenseInitializedLength());
} }
@ -3226,12 +3226,11 @@ void js::TenuringTracer::traceObjectSlots(NativeObject* nobj, uint32_t start,
HeapSlot* dynEnd; HeapSlot* dynEnd;
nobj->getSlotRange(start, length, &fixedStart, &fixedEnd, &dynStart, &dynEnd); nobj->getSlotRange(start, length, &fixedStart, &fixedEnd, &dynStart, &dynEnd);
if (fixedStart) { if (fixedStart) {
traceSlots(fixedStart->unsafeUnbarrieredForTracing(), traceSlots(fixedStart->unbarrieredAddress(),
fixedEnd->unsafeUnbarrieredForTracing()); fixedEnd->unbarrieredAddress());
} }
if (dynStart) { if (dynStart) {
traceSlots(dynStart->unsafeUnbarrieredForTracing(), traceSlots(dynStart->unbarrieredAddress(), dynEnd->unbarrieredAddress());
dynEnd->unsafeUnbarrieredForTracing());
} }
} }

Просмотреть файл

@ -74,8 +74,7 @@ inline bool IsMarkedUnbarriered(JSRuntime* rt, T* thingp) {
// are always reported as being marked. // are always reported as being marked.
template <typename T> template <typename T>
inline bool IsMarked(JSRuntime* rt, BarrieredBase<T>* thingp) { inline bool IsMarked(JSRuntime* rt, BarrieredBase<T>* thingp) {
return IsMarkedInternal(rt, return IsMarkedInternal(rt, ConvertToBase(thingp->unbarrieredAddress()));
ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
} }
template <typename T> template <typename T>
@ -86,13 +85,13 @@ inline bool IsAboutToBeFinalizedUnbarriered(T* thingp) {
template <typename T> template <typename T>
inline bool IsAboutToBeFinalized(const WriteBarriered<T>* thingp) { inline bool IsAboutToBeFinalized(const WriteBarriered<T>* thingp) {
return IsAboutToBeFinalizedInternal( return IsAboutToBeFinalizedInternal(
ConvertToBase(thingp->unsafeUnbarrieredForTracing())); ConvertToBase(thingp->unbarrieredAddress()));
} }
template <typename T> template <typename T>
inline bool IsAboutToBeFinalized(ReadBarriered<T>* thingp) { inline bool IsAboutToBeFinalized(ReadBarriered<T>* thingp) {
return IsAboutToBeFinalizedInternal( return IsAboutToBeFinalizedInternal(
ConvertToBase(thingp->unsafeUnbarrieredForTracing())); ConvertToBase(thingp->unbarrieredAddress()));
} }
inline bool IsAboutToBeFinalizedDuringMinorSweep(Cell* cell); inline bool IsAboutToBeFinalizedDuringMinorSweep(Cell* cell);

Просмотреть файл

@ -122,13 +122,13 @@ inline void AssertRootMarkingPhase(JSTracer* trc) {}
template <typename T> template <typename T>
inline void TraceEdge(JSTracer* trc, const WriteBarriered<T>* thingp, inline void TraceEdge(JSTracer* trc, const WriteBarriered<T>* thingp,
const char* name) { const char* name) {
gc::TraceEdgeInternal( gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unbarrieredAddress()),
trc, gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name); name);
} }
template <typename T> template <typename T>
inline void TraceEdge(JSTracer* trc, WeakHeapPtr<T>* thingp, const char* name) { inline void TraceEdge(JSTracer* trc, WeakHeapPtr<T>* thingp, const char* name) {
gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unsafeGet()), name); gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unbarrieredAddress()), name);
} }
template <class BC, class T> template <class BC, class T>
@ -138,7 +138,7 @@ inline void TraceCellHeaderEdge(JSTracer* trc,
T* thing = thingp->headerPtr(); T* thing = thingp->headerPtr();
gc::TraceEdgeInternal(trc, gc::ConvertToBase(&thing), name); gc::TraceEdgeInternal(trc, gc::ConvertToBase(&thing), name);
if (thing != thingp->headerPtr()) { if (thing != thingp->headerPtr()) {
thingp->unsafeSetHeaderPtr(thing); thingp->unbarrieredSetHeaderPtr(thing);
} }
} }
@ -169,7 +169,7 @@ inline void TraceNullableCellHeaderEdge(
if (thing) { if (thing) {
gc::TraceEdgeInternal(trc, gc::ConvertToBase(&thing), name); gc::TraceEdgeInternal(trc, gc::ConvertToBase(&thing), name);
if (thing != thingp->headerPtr()) { if (thing != thingp->headerPtr()) {
thingp->unsafeSetHeaderPtr(thing); thingp->unbarrieredSetHeaderPtr(thing);
} }
} }
} }
@ -186,7 +186,7 @@ inline void TraceRoot(JSTracer* trc, T* thingp, const char* name) {
template <typename T> template <typename T>
inline void TraceRoot(JSTracer* trc, WeakHeapPtr<T>* thingp, const char* name) { inline void TraceRoot(JSTracer* trc, WeakHeapPtr<T>* thingp, const char* name) {
TraceRoot(trc, thingp->unsafeGet(), name); TraceRoot(trc, thingp->unbarrieredAddress(), name);
} }
// Idential to TraceRoot, except that this variant will not crash if |*thingp| // Idential to TraceRoot, except that this variant will not crash if |*thingp|
@ -203,7 +203,7 @@ inline void TraceNullableRoot(JSTracer* trc, T* thingp, const char* name) {
template <typename T> template <typename T>
inline void TraceNullableRoot(JSTracer* trc, WeakHeapPtr<T>* thingp, inline void TraceNullableRoot(JSTracer* trc, WeakHeapPtr<T>* thingp,
const char* name) { const char* name) {
TraceNullableRoot(trc, thingp->unsafeGet(), name); TraceNullableRoot(trc, thingp->unbarrieredAddress(), name);
} }
// Like TraceEdge, but for edges that do not use one of the automatic barrier // Like TraceEdge, but for edges that do not use one of the automatic barrier
@ -229,7 +229,7 @@ template <typename T>
inline bool TraceWeakEdge(JSTracer* trc, BarrieredBase<T>* thingp, inline bool TraceWeakEdge(JSTracer* trc, BarrieredBase<T>* thingp,
const char* name) { const char* name) {
return gc::TraceEdgeInternal( return gc::TraceEdgeInternal(
trc, gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name); trc, gc::ConvertToBase(thingp->unbarrieredAddress()), name);
} }
// Trace all edges contained in the given array. // Trace all edges contained in the given array.
@ -237,8 +237,8 @@ inline bool TraceWeakEdge(JSTracer* trc, BarrieredBase<T>* thingp,
template <typename T> template <typename T>
void TraceRange(JSTracer* trc, size_t len, BarrieredBase<T>* vec, void TraceRange(JSTracer* trc, size_t len, BarrieredBase<T>* vec,
const char* name) { const char* name) {
gc::TraceRangeInternal( gc::TraceRangeInternal(trc, len,
trc, len, gc::ConvertToBase(vec[0].unsafeUnbarrieredForTracing()), name); gc::ConvertToBase(vec[0].unbarrieredAddress()), name);
} }
// Trace all root edges in the given array. // Trace all root edges in the given array.
@ -260,7 +260,7 @@ template <typename T>
void TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src, void TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src,
const WriteBarriered<T>* dst, const char* name) { const WriteBarriered<T>* dst, const char* name) {
TraceManuallyBarrieredCrossCompartmentEdge( TraceManuallyBarrieredCrossCompartmentEdge(
trc, src, gc::ConvertToBase(dst->unsafeUnbarrieredForTracing()), name); trc, src, gc::ConvertToBase(dst->unbarrieredAddress()), name);
} }
// Trace a weak map key. For debugger weak maps these may be cross compartment, // Trace a weak map key. For debugger weak maps these may be cross compartment,
@ -274,8 +274,7 @@ inline void TraceWeakMapKeyEdge(JSTracer* trc, Zone* weakMapZone,
const WriteBarriered<T>* thingp, const WriteBarriered<T>* thingp,
const char* name) { const char* name) {
TraceWeakMapKeyEdgeInternal( TraceWeakMapKeyEdgeInternal(
trc, weakMapZone, trc, weakMapZone, gc::ConvertToBase(thingp->unbarrieredAddress()), name);
gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
} }
// Permanent atoms and well-known symbols are shared between runtimes and must // Permanent atoms and well-known symbols are shared between runtimes and must

Просмотреть файл

@ -926,7 +926,7 @@ class GlobalObject : public NativeObject {
} }
void clearSourceURLSHolder() { void clearSourceURLSHolder() {
// This is called at the start of shrinking GCs, so avoids barriers. // This is called at the start of shrinking GCs, so avoids barriers.
getSlotRef(SOURCE_URLS).unsafeSet(UndefinedValue()); getSlotRef(SOURCE_URLS).unbarrieredSet(UndefinedValue());
} }
// A class used in place of a prototype during off-thread parsing. // A class used in place of a prototype during off-thread parsing.

Просмотреть файл

@ -123,13 +123,12 @@ inline JSFunction* CloneFunctionObjectIfNotSingleton(
// value to which we could sensibly initialize this. // value to which we could sensibly initialize this.
MOZ_MAKE_MEM_UNDEFINED(&fun->u, sizeof(u)); MOZ_MAKE_MEM_UNDEFINED(&fun->u, sizeof(u));
// Safe: we're initializing for the very first time. fun->atom_.init(nullptr);
fun->atom_.unsafeSet(nullptr);
if (kind == js::gc::AllocKind::FUNCTION_EXTENDED) { if (kind == js::gc::AllocKind::FUNCTION_EXTENDED) {
fun->setFlags(FunctionFlags::EXTENDED); fun->setFlags(FunctionFlags::EXTENDED);
for (js::GCPtrValue& extendedSlot : fun->toExtended()->extendedSlots) { for (js::GCPtrValue& extendedSlot : fun->toExtended()->extendedSlots) {
extendedSlot.unsafeSet(JS::UndefinedValue()); extendedSlot.init(JS::UndefinedValue());
} }
} else { } else {
fun->setFlags(0); fun->setFlags(0);

Просмотреть файл

@ -1348,8 +1348,9 @@ struct ObjectGroupRealm::AllocationSiteKey {
} }
bool needsSweep() { bool needsSweep() {
return IsAboutToBeFinalizedUnbarriered(script.unsafeGet()) || return IsAboutToBeFinalizedUnbarriered(script.unbarrieredAddress()) ||
(proto && IsAboutToBeFinalizedUnbarriered(proto.unsafeGet())); (proto &&
IsAboutToBeFinalizedUnbarriered(proto.unbarrieredAddress()));
} }
bool operator==(const AllocationSiteKey& other) const { bool operator==(const AllocationSiteKey& other) const {

Просмотреть файл

@ -31,13 +31,13 @@ js::GlobalObject* JS::Realm::maybeGlobal() const {
} }
js::LexicalEnvironmentObject* JS::Realm::unbarrieredLexicalEnvironment() const { js::LexicalEnvironmentObject* JS::Realm::unbarrieredLexicalEnvironment() const {
return *lexicalEnv_.unsafeGet(); return lexicalEnv_.unbarrieredGet();
} }
inline bool JS::Realm::globalIsAboutToBeFinalized() { inline bool JS::Realm::globalIsAboutToBeFinalized() {
MOZ_ASSERT(zone_->isGCSweeping()); MOZ_ASSERT(zone_->isGCSweeping());
return global_ && return global_ &&
js::gc::IsAboutToBeFinalizedUnbarriered(global_.unsafeGet()); js::gc::IsAboutToBeFinalizedUnbarriered(global_.unbarrieredAddress());
} }
inline bool JS::Realm::hasLiveGlobal() const { inline bool JS::Realm::hasLiveGlobal() const {

Просмотреть файл

@ -312,9 +312,8 @@ void Realm::traceRoots(JSTracer* trc,
// //
// If a realm is on-stack, we mark its global so that // If a realm is on-stack, we mark its global so that
// JSContext::global() remains valid. // JSContext::global() remains valid.
if (shouldTraceGlobal() && global_.unbarrieredGet()) { if (shouldTraceGlobal() && global_) {
TraceRoot(trc, global_.unsafeUnbarrieredForTracing(), TraceRoot(trc, global_.unbarrieredAddress(), "on-stack realm global");
"on-stack realm global");
} }
} }
@ -453,9 +452,9 @@ void Realm::fixupAfterMovingGC(JSTracer* trc) {
} }
void Realm::fixupGlobal() { void Realm::fixupGlobal() {
GlobalObject* global = *global_.unsafeGet(); GlobalObject* global = global_.unbarrieredGet();
if (global) { if (global) {
global_.set(MaybeForwarded(global)); global_.unbarrieredSet(MaybeForwarded(global));
} }
} }

Просмотреть файл

@ -836,14 +836,13 @@ class JS::Realm : public JS::shadow::Realm {
inline js::Handle<js::GlobalObject*> JSContext::global() const { inline js::Handle<js::GlobalObject*> JSContext::global() const {
/* /*
* It's safe to use |unsafeGet()| here because any realm that is * It's safe to use |unbarrieredGet()| here because any realm that is on-stack
* on-stack will be marked automatically, so there's no need for a read * will be marked automatically, so there's no need for a read barrier on
* barrier on it. Once the realm is popped, the handle is no longer * it. Once the realm is popped, the handle is no longer safe to use.
* safe to use.
*/ */
MOZ_ASSERT(realm_, "Caller needs to enter a realm first"); MOZ_ASSERT(realm_, "Caller needs to enter a realm first");
return js::Handle<js::GlobalObject*>::fromMarkedLocation( return js::Handle<js::GlobalObject*>::fromMarkedLocation(
realm_->global_.unsafeGet()); realm_->global_.unbarrieredAddress());
} }
namespace js { namespace js {

Просмотреть файл

@ -127,7 +127,7 @@ inline Shape* Shape::new_(JSContext* cx, Handle<StackShape> other,
inline void Shape::updateBaseShapeAfterMovingGC() { inline void Shape::updateBaseShapeAfterMovingGC() {
BaseShape* base = this->base(); BaseShape* base = this->base();
if (IsForwarded(base)) { if (IsForwarded(base)) {
unsafeSetHeaderPtr(Forwarded(base)); unbarrieredSetHeaderPtr(Forwarded(base));
} }
} }