Bug 1662502 - Rename some methods on GC wrapped pointers to 'unbarriered' rather than 'unsafe' r=sfink

This should clarify why they are unsafe.

Also rename the overlong 'unsafeUnbarrieredForTracing' to 'unbarrieredAddress'
to be more like Rooted::address and since it is used outside of tracing in a
couple of places.

Differential Revision: https://phabricator.services.mozilla.com/D89012
This commit is contained in:
Jon Coppeard 2020-09-02 19:32:25 +00:00
Родитель 49959262e5
Коммит fb11d9f496
15 изменённых файлов: 52 добавлений и 57 удалений

Просмотреть файл

@ -470,7 +470,7 @@ static void TraceKey(Range& r, const HashableValue& key, JSTracer* trc) {
}
// Clear newKey to avoid the barrier in ~PreBarriered.
newKey.unsafeClear();
newKey.unbarrieredClear();
}
void MapObject::trace(JSTracer* trc, JSObject* obj) {

Просмотреть файл

@ -58,7 +58,7 @@ class HashableValue {
void trace(JSTracer* trc) { TraceEdge(trc, &value, "HashableValue"); }
// Clear the value without invoking the pre-barrier.
void unsafeClear() { value.unsafeSet(UndefinedValue()); }
void unbarrieredClear() { value.unbarrieredSet(UndefinedValue()); }
};
template <typename Wrapper>

Просмотреть файл

@ -635,9 +635,7 @@ class TypedObject : public JSObject {
// callee here is the type descriptor.
static MOZ_MUST_USE bool construct(JSContext* cx, unsigned argc, Value* vp);
Shape** addressOfShapeFromGC() {
return shape_.unsafeUnbarrieredForTracing();
}
Shape** addressOfShapeFromGC() { return shape_.unbarrieredAddress(); }
};
using HandleTypedObject = Handle<TypedObject*>;

Просмотреть файл

@ -324,7 +324,7 @@ struct InternalBarrierMethods {};
template <typename T>
struct InternalBarrierMethods<T*> {
static bool isMarkable(T* v) { return v != nullptr; }
static bool isMarkable(const T* v) { return v != nullptr; }
static void preBarrier(T* v) { T::writeBarrierPre(v); }
@ -426,7 +426,7 @@ class MOZ_NON_MEMMOVABLE BarrieredBase {
// instantiation. Friending to the generic template leads to a number of
// unintended consequences, including template resolution ambiguity and a
// circular dependency with Tracing.h.
T* unsafeUnbarrieredForTracing() const { return const_cast<T*>(&value); }
T* unbarrieredAddress() const { return const_cast<T*>(&value); }
};
// Base class for barriered pointer types that intercept only writes.
@ -447,7 +447,7 @@ class WriteBarriered : public BarrieredBase<T>,
// Use this if you want to change the value without invoking barriers.
// Obviously this is dangerous unless you know the barrier is not needed.
void unsafeSet(const T& v) { this->value = v; }
void unbarrieredSet(const T& v) { this->value = v; }
// For users who need to manually barrier the raw types.
static void writeBarrierPre(const T& v) {
@ -798,14 +798,16 @@ class WeakHeapPtr : public ReadBarriered<T>,
const T& operator->() const { return get(); }
T* unsafeGet() { return &this->value; }
T const* unsafeGet() const { return &this->value; }
void set(const T& v) {
AssertTargetIsNotGray(v);
setUnchecked(v);
}
void unbarrieredSet(const T& v) {
AssertTargetIsNotGray(v);
this->value = v;
}
private:
void setUnchecked(const T& v) {
T tmp = this->value;
@ -1078,7 +1080,7 @@ struct HeapPtrHasher {
static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
static bool match(const Key& k, Lookup l) { return k.get() == l; }
static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); }
};
template <class T>
@ -1088,7 +1090,7 @@ struct PreBarrieredHasher {
static HashNumber hash(Lookup obj) { return DefaultHasher<T>::hash(obj); }
static bool match(const Key& k, Lookup l) { return k.get() == l; }
static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
static void rekey(Key& k, const Key& newKey) { k.unbarrieredSet(newKey); }
};
/* Useful for hashtables with a WeakHeapPtr as key. */

Просмотреть файл

@ -730,7 +730,7 @@ class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell {
// As above, no flags are expected to be set here.
MOZ_ASSERT(!IsInsideNursery(newValue));
PtrT::writeBarrierPre(headerPtr());
unsafeSetHeaderPtr(newValue);
unbarrieredSetHeaderPtr(newValue);
}
public:
@ -744,7 +744,7 @@ class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell {
return reinterpret_cast<PtrT*>(this->header_);
}
void unsafeSetHeaderPtr(PtrT* newValue) {
void unbarrieredSetHeaderPtr(PtrT* newValue) {
uintptr_t data = uintptr_t(newValue);
MOZ_ASSERT(this->flags() == 0);
MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0);

Просмотреть файл

@ -2999,7 +2999,7 @@ void js::gc::StoreBuffer::SlotsEdge::trace(TenuringTracer& mover) const {
MOZ_ASSERT(clampedStart <= clampedEnd);
mover.traceSlots(
static_cast<HeapSlot*>(obj->getDenseElements() + clampedStart)
->unsafeUnbarrieredForTracing(),
->unbarrieredAddress(),
clampedEnd - clampedStart);
} else {
uint32_t start = std::min(start_, obj->slotSpan());
@ -3211,7 +3211,7 @@ void js::TenuringTracer::traceObject(JSObject* obj) {
if (!nobj->hasEmptyElements() && !nobj->denseElementsAreCopyOnWrite() &&
ObjectDenseElementsMayBeMarkable(nobj)) {
HeapSlotArray elements = nobj->getDenseElements();
Value* elems = elements.begin()->unsafeUnbarrieredForTracing();
Value* elems = elements.begin()->unbarrieredAddress();
traceSlots(elems, elems + nobj->getDenseInitializedLength());
}
@ -3226,12 +3226,11 @@ void js::TenuringTracer::traceObjectSlots(NativeObject* nobj, uint32_t start,
HeapSlot* dynEnd;
nobj->getSlotRange(start, length, &fixedStart, &fixedEnd, &dynStart, &dynEnd);
if (fixedStart) {
traceSlots(fixedStart->unsafeUnbarrieredForTracing(),
fixedEnd->unsafeUnbarrieredForTracing());
traceSlots(fixedStart->unbarrieredAddress(),
fixedEnd->unbarrieredAddress());
}
if (dynStart) {
traceSlots(dynStart->unsafeUnbarrieredForTracing(),
dynEnd->unsafeUnbarrieredForTracing());
traceSlots(dynStart->unbarrieredAddress(), dynEnd->unbarrieredAddress());
}
}

Просмотреть файл

@ -74,8 +74,7 @@ inline bool IsMarkedUnbarriered(JSRuntime* rt, T* thingp) {
// are always reported as being marked.
template <typename T>
inline bool IsMarked(JSRuntime* rt, BarrieredBase<T>* thingp) {
return IsMarkedInternal(rt,
ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
return IsMarkedInternal(rt, ConvertToBase(thingp->unbarrieredAddress()));
}
template <typename T>
@ -86,13 +85,13 @@ inline bool IsAboutToBeFinalizedUnbarriered(T* thingp) {
template <typename T>
inline bool IsAboutToBeFinalized(const WriteBarriered<T>* thingp) {
return IsAboutToBeFinalizedInternal(
ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
ConvertToBase(thingp->unbarrieredAddress()));
}
template <typename T>
inline bool IsAboutToBeFinalized(ReadBarriered<T>* thingp) {
return IsAboutToBeFinalizedInternal(
ConvertToBase(thingp->unsafeUnbarrieredForTracing()));
ConvertToBase(thingp->unbarrieredAddress()));
}
inline bool IsAboutToBeFinalizedDuringMinorSweep(Cell* cell);

Просмотреть файл

@ -122,13 +122,13 @@ inline void AssertRootMarkingPhase(JSTracer* trc) {}
template <typename T>
inline void TraceEdge(JSTracer* trc, const WriteBarriered<T>* thingp,
const char* name) {
gc::TraceEdgeInternal(
trc, gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unbarrieredAddress()),
name);
}
template <typename T>
inline void TraceEdge(JSTracer* trc, WeakHeapPtr<T>* thingp, const char* name) {
gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unsafeGet()), name);
gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unbarrieredAddress()), name);
}
template <class BC, class T>
@ -138,7 +138,7 @@ inline void TraceCellHeaderEdge(JSTracer* trc,
T* thing = thingp->headerPtr();
gc::TraceEdgeInternal(trc, gc::ConvertToBase(&thing), name);
if (thing != thingp->headerPtr()) {
thingp->unsafeSetHeaderPtr(thing);
thingp->unbarrieredSetHeaderPtr(thing);
}
}
@ -169,7 +169,7 @@ inline void TraceNullableCellHeaderEdge(
if (thing) {
gc::TraceEdgeInternal(trc, gc::ConvertToBase(&thing), name);
if (thing != thingp->headerPtr()) {
thingp->unsafeSetHeaderPtr(thing);
thingp->unbarrieredSetHeaderPtr(thing);
}
}
}
@ -186,7 +186,7 @@ inline void TraceRoot(JSTracer* trc, T* thingp, const char* name) {
template <typename T>
inline void TraceRoot(JSTracer* trc, WeakHeapPtr<T>* thingp, const char* name) {
TraceRoot(trc, thingp->unsafeGet(), name);
TraceRoot(trc, thingp->unbarrieredAddress(), name);
}
// Idential to TraceRoot, except that this variant will not crash if |*thingp|
@ -203,7 +203,7 @@ inline void TraceNullableRoot(JSTracer* trc, T* thingp, const char* name) {
template <typename T>
inline void TraceNullableRoot(JSTracer* trc, WeakHeapPtr<T>* thingp,
const char* name) {
TraceNullableRoot(trc, thingp->unsafeGet(), name);
TraceNullableRoot(trc, thingp->unbarrieredAddress(), name);
}
// Like TraceEdge, but for edges that do not use one of the automatic barrier
@ -229,7 +229,7 @@ template <typename T>
inline bool TraceWeakEdge(JSTracer* trc, BarrieredBase<T>* thingp,
const char* name) {
return gc::TraceEdgeInternal(
trc, gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
trc, gc::ConvertToBase(thingp->unbarrieredAddress()), name);
}
// Trace all edges contained in the given array.
@ -237,8 +237,8 @@ inline bool TraceWeakEdge(JSTracer* trc, BarrieredBase<T>* thingp,
template <typename T>
void TraceRange(JSTracer* trc, size_t len, BarrieredBase<T>* vec,
const char* name) {
gc::TraceRangeInternal(
trc, len, gc::ConvertToBase(vec[0].unsafeUnbarrieredForTracing()), name);
gc::TraceRangeInternal(trc, len,
gc::ConvertToBase(vec[0].unbarrieredAddress()), name);
}
// Trace all root edges in the given array.
@ -260,7 +260,7 @@ template <typename T>
void TraceCrossCompartmentEdge(JSTracer* trc, JSObject* src,
const WriteBarriered<T>* dst, const char* name) {
TraceManuallyBarrieredCrossCompartmentEdge(
trc, src, gc::ConvertToBase(dst->unsafeUnbarrieredForTracing()), name);
trc, src, gc::ConvertToBase(dst->unbarrieredAddress()), name);
}
// Trace a weak map key. For debugger weak maps these may be cross compartment,
@ -274,8 +274,7 @@ inline void TraceWeakMapKeyEdge(JSTracer* trc, Zone* weakMapZone,
const WriteBarriered<T>* thingp,
const char* name) {
TraceWeakMapKeyEdgeInternal(
trc, weakMapZone,
gc::ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
trc, weakMapZone, gc::ConvertToBase(thingp->unbarrieredAddress()), name);
}
// Permanent atoms and well-known symbols are shared between runtimes and must

Просмотреть файл

@ -926,7 +926,7 @@ class GlobalObject : public NativeObject {
}
void clearSourceURLSHolder() {
// This is called at the start of shrinking GCs, so avoids barriers.
getSlotRef(SOURCE_URLS).unsafeSet(UndefinedValue());
getSlotRef(SOURCE_URLS).unbarrieredSet(UndefinedValue());
}
// A class used in place of a prototype during off-thread parsing.

Просмотреть файл

@ -123,13 +123,12 @@ inline JSFunction* CloneFunctionObjectIfNotSingleton(
// value to which we could sensibly initialize this.
MOZ_MAKE_MEM_UNDEFINED(&fun->u, sizeof(u));
// Safe: we're initializing for the very first time.
fun->atom_.unsafeSet(nullptr);
fun->atom_.init(nullptr);
if (kind == js::gc::AllocKind::FUNCTION_EXTENDED) {
fun->setFlags(FunctionFlags::EXTENDED);
for (js::GCPtrValue& extendedSlot : fun->toExtended()->extendedSlots) {
extendedSlot.unsafeSet(JS::UndefinedValue());
extendedSlot.init(JS::UndefinedValue());
}
} else {
fun->setFlags(0);

Просмотреть файл

@ -1348,8 +1348,9 @@ struct ObjectGroupRealm::AllocationSiteKey {
}
bool needsSweep() {
return IsAboutToBeFinalizedUnbarriered(script.unsafeGet()) ||
(proto && IsAboutToBeFinalizedUnbarriered(proto.unsafeGet()));
return IsAboutToBeFinalizedUnbarriered(script.unbarrieredAddress()) ||
(proto &&
IsAboutToBeFinalizedUnbarriered(proto.unbarrieredAddress()));
}
bool operator==(const AllocationSiteKey& other) const {

Просмотреть файл

@ -31,13 +31,13 @@ js::GlobalObject* JS::Realm::maybeGlobal() const {
}
js::LexicalEnvironmentObject* JS::Realm::unbarrieredLexicalEnvironment() const {
return *lexicalEnv_.unsafeGet();
return lexicalEnv_.unbarrieredGet();
}
inline bool JS::Realm::globalIsAboutToBeFinalized() {
MOZ_ASSERT(zone_->isGCSweeping());
return global_ &&
js::gc::IsAboutToBeFinalizedUnbarriered(global_.unsafeGet());
js::gc::IsAboutToBeFinalizedUnbarriered(global_.unbarrieredAddress());
}
inline bool JS::Realm::hasLiveGlobal() const {

Просмотреть файл

@ -312,9 +312,8 @@ void Realm::traceRoots(JSTracer* trc,
//
// If a realm is on-stack, we mark its global so that
// JSContext::global() remains valid.
if (shouldTraceGlobal() && global_.unbarrieredGet()) {
TraceRoot(trc, global_.unsafeUnbarrieredForTracing(),
"on-stack realm global");
if (shouldTraceGlobal() && global_) {
TraceRoot(trc, global_.unbarrieredAddress(), "on-stack realm global");
}
}
@ -453,9 +452,9 @@ void Realm::fixupAfterMovingGC(JSTracer* trc) {
}
void Realm::fixupGlobal() {
GlobalObject* global = *global_.unsafeGet();
GlobalObject* global = global_.unbarrieredGet();
if (global) {
global_.set(MaybeForwarded(global));
global_.unbarrieredSet(MaybeForwarded(global));
}
}

Просмотреть файл

@ -836,14 +836,13 @@ class JS::Realm : public JS::shadow::Realm {
inline js::Handle<js::GlobalObject*> JSContext::global() const {
/*
* It's safe to use |unsafeGet()| here because any realm that is
* on-stack will be marked automatically, so there's no need for a read
* barrier on it. Once the realm is popped, the handle is no longer
* safe to use.
* It's safe to use |unbarrieredGet()| here because any realm that is on-stack
* will be marked automatically, so there's no need for a read barrier on
* it. Once the realm is popped, the handle is no longer safe to use.
*/
MOZ_ASSERT(realm_, "Caller needs to enter a realm first");
return js::Handle<js::GlobalObject*>::fromMarkedLocation(
realm_->global_.unsafeGet());
realm_->global_.unbarrieredAddress());
}
namespace js {

Просмотреть файл

@ -127,7 +127,7 @@ inline Shape* Shape::new_(JSContext* cx, Handle<StackShape> other,
inline void Shape::updateBaseShapeAfterMovingGC() {
BaseShape* base = this->base();
if (IsForwarded(base)) {
unsafeSetHeaderPtr(Forwarded(base));
unbarrieredSetHeaderPtr(Forwarded(base));
}
}