Bug 1647803 - Move cell header field to the Cell class and remove CellHeader classes r=tcampbell

Differential Revision: https://phabricator.services.mozilla.com/D80839
This commit is contained in:
Jon Coppeard 2020-06-30 08:55:59 +00:00
Родитель 50f1f5f460
Коммит 35ccce3ac8
27 изменённых файлов: 236 добавлений и 349 удалений

Просмотреть файл

@ -49,7 +49,6 @@ namespace gc {
class Arena;
enum class AllocKind : uint8_t;
class CellHeaderWithLengthAndFlags;
struct Chunk;
class StoreBuffer;
class TenuredCell;
@ -102,49 +101,41 @@ class CellColor {
Color color;
};
// The cell header contains flags used by the GC. All GC cells must start with a
// CellHeader, or one of its derived classes that allows use of spare bits to
// store data.
class CellHeader {
// [SMDOC] GC Cell
//
// A GC cell is the ultimate base class for all GC things. All types allocated
// on the GC heap extend either gc::Cell or gc::TenuredCell. If a type is always
// tenured, prefer the TenuredCell class as base.
//
// The first word of Cell is a uintptr_t that reserves the low three bits for GC
// purposes. The remaining bits are available to sub-classes and can be used
// store a pointer to another gc::Cell. It can also be used for temporary
// storage (see setTemporaryGCUnsafeData). To make use of the remaining space,
// sub-classes derive from a helper class such as TenuredCellWithNonGCPointer.
//
// During moving GC operation a Cell may be marked as forwarded. This indicates
// that a gc::RelocationOverlay is currently stored in the Cell's memory and
// should be used to find the new location of the Cell.
struct Cell {
protected:
// Cell header word. Stores GC flags and derived class data.
uintptr_t header_;
public:
static_assert(gc::CellFlagBitsReservedForGC >= 3,
"Not enough flag bits reserved for GC");
static constexpr uintptr_t RESERVED_MASK =
BitMask(gc::CellFlagBitsReservedForGC);
// Indicates if the cell has been forwarded (moved) by generational or
// Indicates whether the cell has been forwarded (moved) by generational or
// compacting GC and is now a RelocationOverlay.
static constexpr uintptr_t FORWARD_BIT = Bit(0);
// Bits 1 and 2 are currently unused.
bool isForwarded() const { return header_ & FORWARD_BIT; }
uintptr_t flags() const { return header_ & RESERVED_MASK; }
protected:
// NOTE: This word can also be used for temporary storage, see
// setTemporaryGCUnsafeData.
uintptr_t header_;
friend class CellHeaderWithLengthAndFlags;
};
// [SMDOC] GC Cell
//
// A GC cell is the base class for all GC things. All types allocated on the GC
// heap extend either gc::Cell or gc::TenuredCell. If a type is always tenured,
// prefer the TenuredCell class as base.
//
// The first word (a pointer or uintptr_t) of each Cell must reserve the low
// three bits for GC purposes. The remaining bits are available to sub-classes
// and typically store a pointer to another gc::Cell.
//
// During moving GC operation a Cell may be marked as forwarded. This indicates
// that a gc::RelocationOverlay is currently stored in the Cell's memory and
// should be used to find the new location of the Cell.
struct alignas(gc::CellAlignBytes) Cell {
public:
MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
MOZ_ALWAYS_INLINE const TenuredCell& asTenured() const;
MOZ_ALWAYS_INLINE TenuredCell& asTenured();
@ -178,10 +169,6 @@ struct alignas(gc::CellAlignBytes) Cell {
static MOZ_ALWAYS_INLINE bool needWriteBarrierPre(JS::Zone* zone);
inline bool isForwarded() const {
return reinterpret_cast<const CellHeader*>(this)->isForwarded();
}
template <typename T, typename = std::enable_if_t<JS::IsBaseTraceType_v<T>>>
inline bool is() const {
return getTraceKind() == JS::MapTypeToTraceKind<T>::kind;
@ -558,11 +545,11 @@ bool TenuredCell::isAligned() const {
#endif
// Cell header for GC things that have 32-bit length and 32-bit flags (currently
// JSString and BigInt).
// Base class for nusery-allocatable GC things that have 32-bit length and
// 32-bit flags (currently JSString and BigInt).
//
// This tries to store both in CellHeader::header_, but if that isn't large
// enough the length is stored separately.
// This tries to store both in Cell::header_, but if that isn't large enough the
// length is stored separately.
//
// 32 0
// ------------------
@ -572,119 +559,104 @@ bool TenuredCell::isAligned() const {
// The low bits of the flags word (see CellFlagBitsReservedForGC) are reserved
// for GC. Derived classes must ensure they don't use these flags for non-GC
// purposes.
class CellHeaderWithLengthAndFlags {
// Use composition rather than inheritance so this ends up a standard layout
// type.
CellHeader header_;
class alignas(gc::CellAlignBytes) CellWithLengthAndFlags : public Cell {
#if JS_BITS_PER_WORD == 32
// Additional storage for length if |header_| is too small to fit both.
uint32_t length_;
#endif
uintptr_t& header() { return header_.header_; }
const uintptr_t& header() const { return header_.header_; }
public:
uint32_t lengthField() const {
protected:
uint32_t headerLengthField() const {
#if JS_BITS_PER_WORD == 32
return length_;
#else
return uint32_t(header() >> 32);
return uint32_t(header_ >> 32);
#endif
}
uint32_t flagsField() const { return uint32_t(header()); }
uint32_t headerFlagsField() const { return uint32_t(header_); }
void setFlagBit(uint32_t flag) { header() |= uintptr_t(flag); }
void clearFlagBit(uint32_t flag) { header() &= ~uintptr_t(flag); }
void toggleFlagBit(uint32_t flag) { header() ^= uintptr_t(flag); }
void setHeaderFlagBit(uint32_t flag) { header_ |= uintptr_t(flag); }
void clearHeaderFlagBit(uint32_t flag) { header_ &= ~uintptr_t(flag); }
void toggleHeaderFlagBit(uint32_t flag) { header_ ^= uintptr_t(flag); }
void setLengthAndFlags(uint32_t len, uint32_t flags) {
void setHeaderLengthAndFlags(uint32_t len, uint32_t flags) {
#if JS_BITS_PER_WORD == 32
header() = flags;
header_ = flags;
length_ = len;
#else
header() = (uint64_t(len) << 32) | uint64_t(flags);
#endif
}
bool operator==(const CellHeaderWithLengthAndFlags& other) const {
#if JS_BITS_PER_WORD == 32
return length_ == other.length_ && flagsField() == other.flagsField();
#else
return header() == other.header();
header_ = (uint64_t(len) << 32) | uint64_t(flags);
#endif
}
// Sub classes can store temporary data in the flags word. This is not GC safe
// and users must ensure flags/length are never checked (including by asserts)
// while this data is stored. Use of this method is strongly discouraged!
void setTemporaryGCUnsafeData(uintptr_t data) { header() = data; }
void setTemporaryGCUnsafeData(uintptr_t data) { header_ = data; }
// To get back the data, values to safely re-initialize clobbered flags
// must be provided.
uintptr_t unsetTemporaryGCUnsafeData(uint32_t len, uint32_t flags) {
uintptr_t data = header();
setLengthAndFlags(len, flags);
uintptr_t data = header_;
setHeaderLengthAndFlags(len, flags);
return data;
}
const js::gc::CellHeader& cellHeader() const { return header_; }
public:
// Returns the offset of header_. JIT code should use offsetOfFlags
// below.
static constexpr size_t offsetOfRawFlagsField() {
return offsetof(CellHeaderWithLengthAndFlags, header_);
static constexpr size_t offsetOfRawHeaderFlagsField() {
return offsetof(CellWithLengthAndFlags, header_);
}
// Offsets for direct field from jit code. A number of places directly
// access 32-bit length and flags fields so do endian trickery here.
#if JS_BITS_PER_WORD == 32
static constexpr size_t offsetOfFlags() {
return offsetof(CellHeaderWithLengthAndFlags, header_);
static constexpr size_t offsetOfHeaderFlags() {
return offsetof(CellWithLengthAndFlags, header_);
}
static constexpr size_t offsetOfLength() {
return offsetof(CellHeaderWithLengthAndFlags, length_);
static constexpr size_t offsetOfHeaderLength() {
return offsetof(CellWithLengthAndFlags, length_);
}
#elif MOZ_LITTLE_ENDIAN()
static constexpr size_t offsetOfFlags() {
return offsetof(CellHeaderWithLengthAndFlags, header_);
static constexpr size_t offsetOfHeaderFlags() {
return offsetof(CellWithLengthAndFlags, header_);
}
static constexpr size_t offsetOfLength() {
return offsetof(CellHeaderWithLengthAndFlags, header_) + sizeof(uint32_t);
static constexpr size_t offsetOfHeaderLength() {
return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
}
#else
static constexpr size_t offsetOfFlags() {
return offsetof(CellHeaderWithLengthAndFlags, header_) + sizeof(uint32_t);
static constexpr size_t offsetOfHeaderFlags() {
return offsetof(CellWithLengthAndFlags, header_) + sizeof(uint32_t);
}
static constexpr size_t offsetOfLength() {
return offsetof(CellHeaderWithLengthAndFlags, header_);
static constexpr size_t offsetOfHeaderLength() {
return offsetof(CellWithLengthAndFlags, header_);
}
#endif
};
// Cell header for GC things that allows storing a non-GC thing pointer in the
// first word.
// Base class for non-nursery-allocatable GC things that allows storing a non-GC
// thing pointer in the first word.
//
// The low bits of the word (see CellFlagBitsReservedForGC) are reserved for GC.
template <class PtrT>
class CellHeaderWithNonGCPointer : public CellHeader {
class alignas(gc::CellAlignBytes) TenuredCellWithNonGCPointer
: public TenuredCell {
static_assert(!std::is_pointer_v<PtrT>,
"PtrT should be the type of the referent, not of the pointer");
static_assert(
!std::is_base_of_v<Cell, PtrT>,
"Don't use CellHeaderWithNonGCPointer for pointers to GC things");
"Don't use TenuredCellWithNonGCPointer for pointers to GC things");
public:
CellHeaderWithNonGCPointer() = default;
explicit CellHeaderWithNonGCPointer(PtrT* initial) : CellHeader() {
protected:
TenuredCellWithNonGCPointer() = default;
explicit TenuredCellWithNonGCPointer(PtrT* initial) {
uintptr_t data = uintptr_t(initial);
MOZ_ASSERT((data & RESERVED_MASK) == 0);
header_ = data;
}
PtrT* ptr() const {
PtrT* headerPtr() const {
// Currently we never observe any flags set here because this base class is
// only used for JSObject (for which the nursery kind flags are always
// clear) or GC things that are always tenured (for which the nursery kind
@ -694,7 +666,7 @@ class CellHeaderWithNonGCPointer : public CellHeader {
return reinterpret_cast<PtrT*>(header_);
}
void setPtr(PtrT* newValue) {
void setHeaderPtr(PtrT* newValue) {
// As above, no flags are expected to be set here.
uintptr_t data = uintptr_t(newValue);
MOZ_ASSERT(flags() == 0);
@ -702,8 +674,9 @@ class CellHeaderWithNonGCPointer : public CellHeader {
header_ = data;
}
static constexpr size_t offsetOfPtr() {
return offsetof(CellHeaderWithNonGCPointer, header_);
public:
static constexpr size_t offsetOfHeaderPtr() {
return offsetof(TenuredCellWithNonGCPointer, header_);
}
};
@ -714,58 +687,60 @@ class CellHeaderWithNonGCPointer : public CellHeader {
//
// This includes a pre write barrier when the pointer is update. No post barrier
// is necessary as the pointer is always tenured.
template <class PtrT>
class CellHeaderWithTenuredGCPointer : public CellHeader {
template <class BaseCell, class PtrT>
class alignas(gc::CellAlignBytes) CellWithTenuredGCPointer : public BaseCell {
static void staticAsserts() {
// These static asserts are not in class scope because the PtrT may not be
// defined when this class template is instantiated.
static_assert(
std::is_same_v<BaseCell, Cell> || std::is_same_v<BaseCell, TenuredCell>,
"BaseCell must be either Cell or TenuredCell");
static_assert(
!std::is_pointer_v<PtrT>,
"PtrT should be the type of the referent, not of the pointer");
static_assert(
std::is_base_of_v<Cell, PtrT>,
"Only use CellHeaderWithTenuredGCPointer for pointers to GC things");
"Only use CellWithTenuredGCPointer for pointers to GC things");
}
public:
CellHeaderWithTenuredGCPointer() = default;
explicit CellHeaderWithTenuredGCPointer(PtrT* initial) : CellHeader() {
initPtr(initial);
}
protected:
CellWithTenuredGCPointer() = default;
explicit CellWithTenuredGCPointer(PtrT* initial) { initHeaderPtr(initial); }
void initPtr(PtrT* initial) {
void initHeaderPtr(PtrT* initial) {
MOZ_ASSERT(!IsInsideNursery(initial));
uintptr_t data = uintptr_t(initial);
MOZ_ASSERT((data & RESERVED_MASK) == 0);
MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0);
this->header_ = data;
}
PtrT* ptr() const {
void setHeaderPtr(PtrT* newValue) {
// As above, no flags are expected to be set here.
MOZ_ASSERT(!IsInsideNursery(newValue));
PtrT::writeBarrierPre(headerPtr());
unsafeSetHeaderPtr(newValue);
}
public:
PtrT* headerPtr() const {
// Currently we never observe any flags set here because this base class is
// only used for GC things that are always tenured (for which the nursery
// kind flags are also always clear). This means we don't need to use
// masking to get and set the pointer.
staticAsserts();
MOZ_ASSERT(flags() == 0);
MOZ_ASSERT(this->flags() == 0);
return reinterpret_cast<PtrT*>(this->header_);
}
void setPtr(PtrT* newValue) {
// As above, no flags are expected to be set here.
MOZ_ASSERT(!IsInsideNursery(newValue));
PtrT::writeBarrierPre(ptr());
unsafeSetPtr(newValue);
}
void unsafeSetPtr(PtrT* newValue) {
void unsafeSetHeaderPtr(PtrT* newValue) {
uintptr_t data = uintptr_t(newValue);
MOZ_ASSERT(flags() == 0);
MOZ_ASSERT((data & RESERVED_MASK) == 0);
MOZ_ASSERT(this->flags() == 0);
MOZ_ASSERT((data & Cell::RESERVED_MASK) == 0);
this->header_ = data;
}
static constexpr size_t offsetOfPtr() {
return offsetof(CellHeaderWithTenuredGCPointer, header_);
static constexpr size_t offsetOfHeaderPtr() {
return offsetof(CellWithTenuredGCPointer, header_);
}
};

Просмотреть файл

@ -314,17 +314,6 @@ static_assert(mozilla::ArrayLength(slotsToThingKind) ==
FOR_EACH_ALLOCKIND(CHECK_THING_SIZE);
#undef CHECK_THING_SIZE
// GC things must be standard-layout classes so we can access the cell header by
// casting the thing pointer to a CellHeader*. This checks the property for the
// least derived thing type.
#define CHECK_THING_LAYOUT(_1, traceKind, _2, _3, _4, _5, _6) \
static_assert( \
std::is_standard_layout< \
MapTraceKindToType<JS::TraceKind::traceKind>::Type>::value, \
"The class for " #traceKind " must by a standard layout type.");
FOR_EACH_ALLOCKIND(CHECK_THING_LAYOUT)
#undef CHECK_THING_LAYOUT
template <typename T>
struct ArenaLayout {
static constexpr size_t thingSize() { return sizeof(T); }

Просмотреть файл

@ -106,16 +106,13 @@ inline T MaybeForwarded(T t) {
return t;
}
inline RelocatedCellHeader::RelocatedCellHeader(Cell* location,
uintptr_t flags) {
uintptr_t ptr = uintptr_t(location);
inline RelocationOverlay::RelocationOverlay(Cell* dst) {
MOZ_ASSERT(dst->flags() == 0);
uintptr_t ptr = uintptr_t(dst);
MOZ_ASSERT((ptr & RESERVED_MASK) == 0);
MOZ_ASSERT((flags & ~RESERVED_MASK) == 0);
header_ = ptr | flags | FORWARD_BIT;
header_ = ptr | FORWARD_BIT;
}
inline RelocationOverlay::RelocationOverlay(Cell* dst) : header_(dst, 0) {}
/* static */
inline RelocationOverlay* RelocationOverlay::forwardCell(Cell* src, Cell* dst) {
MOZ_ASSERT(!src->isForwarded());

Просмотреть файл

@ -191,15 +191,6 @@ void js::CheckTracedThing(JSTracer* trc, T* thing) {
MOZ_ASSERT(trc);
MOZ_ASSERT(thing);
// Check that CellHeader is the first field in the cell.
static_assert(
std::is_base_of_v<CellHeader, std::remove_const_t<std::remove_reference_t<
decltype(thing->cellHeader())>>>,
"GC things must provide a cellHeader() method that returns a reference "
"to the cell header");
MOZ_ASSERT(static_cast<const void*>(&thing->cellHeader()) ==
static_cast<const void*>(thing));
if (!trc->checkEdges()) {
return;
}
@ -1173,7 +1164,7 @@ void BaseScript::traceChildren(JSTracer* trc) {
}
void Shape::traceChildren(JSTracer* trc) {
TraceEdge(trc, &headerAndBase_, "base");
TraceCellHeaderEdge(trc, this, "base");
TraceEdge(trc, &propidRef(), "propid");
if (parent) {
TraceEdge(trc, &parent, "parent");
@ -1386,7 +1377,7 @@ void WasmFunctionScope::AbstractData<JSAtom>::trace(JSTracer* trc) {
TraceBindingNames(trc, trailingNames.start(), length);
}
void Scope::traceChildren(JSTracer* trc) {
TraceNullableEdge(trc, &headerAndEnclosingScope_, "scope enclosing");
TraceNullableCellHeaderEdge(trc, this, "scope enclosing");
TraceNullableEdge(trc, &environmentShape_, "scope env shape");
applyScopeDataTyped([trc](auto data) { data->trace(trc); });
}

Просмотреть файл

@ -20,29 +20,19 @@
namespace js {
namespace gc {
class RelocatedCellHeader : public CellHeader {
public:
RelocatedCellHeader(Cell* location, uintptr_t flags);
Cell* location() const {
return reinterpret_cast<Cell*>(header_ & ~RESERVED_MASK);
}
};
/*
* This structure overlays a Cell that has been moved and provides a way to find
* its new location. It's used during generational and compacting GC.
*/
class RelocationOverlay : public Cell {
protected:
// First word of a Cell has additional requirements from GC. The GC flags
// determine if a Cell is a normal entry or is a RelocationOverlay.
// 3 0
// -------------------------
// | NewLocation | GCFlags |
// -------------------------
RelocatedCellHeader header_;
public:
/* The location the cell has been moved to, stored in the cell header. */
Cell* forwardingAddress() const {
MOZ_ASSERT(isForwarded());
return reinterpret_cast<Cell*>(header_ & ~RESERVED_MASK);
}
protected:
/* A list entry to track all relocated things. */
RelocationOverlay* next_;
@ -59,11 +49,6 @@ class RelocationOverlay : public Cell {
static RelocationOverlay* forwardCell(Cell* src, Cell* dst);
Cell* forwardingAddress() const {
MOZ_ASSERT(isForwarded());
return header_.location();
}
RelocationOverlay*& nextRef() {
MOZ_ASSERT(isForwarded());
return next_;

Просмотреть файл

@ -131,14 +131,14 @@ inline void TraceEdge(JSTracer* trc, WeakHeapPtr<T>* thingp, const char* name) {
gc::TraceEdgeInternal(trc, gc::ConvertToBase(thingp->unsafeGet()), name);
}
template <class T>
inline void TraceEdge(JSTracer* trc,
gc::CellHeaderWithTenuredGCPointer<T>* thingp,
const char* name) {
T* thing = thingp->ptr();
template <class BC, class T>
inline void TraceCellHeaderEdge(JSTracer* trc,
gc::CellWithTenuredGCPointer<BC, T>* thingp,
const char* name) {
T* thing = thingp->headerPtr();
gc::TraceEdgeInternal(trc, gc::ConvertToBase(&thing), name);
if (thing != thingp->ptr()) {
thingp->unsafeSetPtr(thing);
if (thing != thingp->headerPtr()) {
thingp->unsafeSetHeaderPtr(thing);
}
}
@ -161,15 +161,15 @@ inline void TraceNullableEdge(JSTracer* trc, WeakHeapPtr<T>* thingp,
}
}
template <class T>
inline void TraceNullableEdge(JSTracer* trc,
gc::CellHeaderWithTenuredGCPointer<T>* thingp,
const char* name) {
T* thing = thingp->ptr();
template <class BC, class T>
inline void TraceNullableCellHeaderEdge(
JSTracer* trc, gc::CellWithTenuredGCPointer<BC, T>* thingp,
const char* name) {
T* thing = thingp->headerPtr();
if (thing) {
gc::TraceEdgeInternal(trc, gc::ConvertToBase(&thing), name);
if (thing != thingp->ptr()) {
thingp->unsafeSetPtr(thing);
if (thing != thingp->headerPtr()) {
thingp->unsafeSetHeaderPtr(thing);
}
}
}

Просмотреть файл

@ -6,17 +6,15 @@ import gdb
def get_header_ptr(value, ptr_t):
# Return the pointer stored in a CellHeader subclass that wraps a pointer.
assert value.type.strip_typedefs().tag.startswith('js::gc::CellHeaderWith')
# Return the pointer stored in Cell::header_ for subclasses of
# TenuredCellWithNonGCPointer and CellWithTenuredGCPointer.
return value['header_'].cast(ptr_t)
def get_header_length_and_flags(value):
# Return the contents of a CellHeaderWithLengthAndFlags.
assert value.type.strip_typedefs().tag == \
'js::gc::CellHeaderWithLengthAndFlags'
header = value['header_']
flags = header['header_']
# Return the length and flags values for subclasses of
# CellWithLengthAndFlags.
flags = value['header_']
try:
length = value['length_']
except gdb.error:

Просмотреть файл

@ -43,10 +43,8 @@ class JSObjectPtrOrRef(prettyprinters.Pointer):
self.otc = cache.mod_JSObject
def summary(self):
group = get_header_ptr(self.value['headerAndGroup_'],
self.otc.ObjectGroup_ptr_t)
classp = get_header_ptr(group['headerAndClasp_'],
self.otc.JSClass_ptr_t)
group = get_header_ptr(self.value, self.otc.ObjectGroup_ptr_t)
classp = get_header_ptr(group, self.otc.JSClass_ptr_t)
non_native = classp['flags'] & self.otc.class_NON_NATIVE
# Use GDB to format the class name, but then strip off the address
@ -61,8 +59,7 @@ class JSObjectPtrOrRef(prettyprinters.Pointer):
else:
native = self.value.cast(self.otc.NativeObject_ptr_t)
shape = deref(native['shape_'])
baseshape = get_header_ptr(shape['headerAndBase_'],
self.otc.BaseShape_ptr_t)
baseshape = get_header_ptr(shape, self.otc.BaseShape_ptr_t)
flags = baseshape['flags']
is_delegate = bool(flags & self.otc.flag_DELEGATE)
name = None

Просмотреть файл

@ -44,7 +44,7 @@ class JSStringPtr(Common):
def chars(self):
d = self.value['d']
length, flags = get_header_length_and_flags(self.value['header_'])
length, flags = get_header_length_and_flags(self.value)
corrupt = {
0x2f2f2f2f: 'JS_FRESH_NURSERY_PATTERN',

Просмотреть файл

@ -25,8 +25,7 @@ class JSSymbolPtr(mozilla.prettyprinters.Pointer):
def to_string(self):
code = int(self.value['code_']) & 0xffffffff
desc = str(get_header_ptr(self.value['headerAndDescription_'],
self.cache.JSString_ptr_t))
desc = str(get_header_ptr(self.value, self.cache.JSString_ptr_t))
if code == InSymbolRegistry:
return "Symbol.for({})".format(desc)
elif code == UniqueSymbol:

Просмотреть файл

@ -579,7 +579,7 @@ void JitCode::finalize(JSFreeOp* fop) {
headerSize_ + bufferSize_))) {
pool_->addRef();
}
cellHeaderAndCode_.setPtr(nullptr);
setHeaderPtr(nullptr);
// Code buffers are stored inside ExecutablePools. Pools are refcounted.
// Releasing the pool may free it. Horrible hack: if we are using perf

Просмотреть файл

@ -15,7 +15,7 @@
#include "jstypes.h"
#include "gc/Allocator.h" // AllowGC
#include "gc/Cell.h" // gc::TenuredCell, gc::CellHeaderWithNonGCPointer
#include "gc/Cell.h" // gc::TenuredCellWithNonGCPointer
#include "jit/ExecutableAllocator.h" // ExecutablePool
#include "js/TraceKind.h" // JS::TraceKind
#include "js/UbiNode.h" // ubi::{TracerConcrete, Size, CourseType}
@ -44,10 +44,12 @@ struct JitCodeHeader {
}
};
class JitCode : public gc::TenuredCell {
class JitCode : public gc::TenuredCellWithNonGCPointer<uint8_t> {
public:
// Raw code pointer, stored in the cell header.
uint8_t* raw() const { return headerPtr(); }
protected:
using CellHeaderWithCodePtr = gc::CellHeaderWithNonGCPointer<uint8_t>;
CellHeaderWithCodePtr cellHeaderAndCode_;
ExecutablePool* pool_;
uint32_t bufferSize_; // Total buffer size. Does not include headerSize_.
uint32_t insnSize_; // Instruction stream size.
@ -64,7 +66,7 @@ class JitCode : public gc::TenuredCell {
JitCode() = delete;
JitCode(uint8_t* code, uint32_t bufferSize, uint32_t headerSize,
ExecutablePool* pool, CodeKind kind)
: cellHeaderAndCode_(code),
: TenuredCellWithNonGCPointer(code),
pool_(pool),
bufferSize_(bufferSize),
insnSize_(0),
@ -86,7 +88,6 @@ class JitCode : public gc::TenuredCell {
}
public:
uint8_t* raw() const { return cellHeaderAndCode_.ptr(); }
uint8_t* rawEnd() const { return raw() + insnSize_; }
bool containsNativePC(const void* addr) const {
const uint8_t* addr_u8 = (const uint8_t*)addr;
@ -120,10 +121,7 @@ class JitCode : public gc::TenuredCell {
return code;
}
static size_t offsetOfCode() {
return offsetof(JitCode, cellHeaderAndCode_) +
CellHeaderWithCodePtr::offsetOfPtr();
}
static size_t offsetOfCode() { return offsetOfHeaderPtr(); }
uint8_t* jumpRelocTable() { return raw() + jumpRelocTableOffset(); }
@ -136,7 +134,6 @@ class JitCode : public gc::TenuredCell {
public:
static const JS::TraceKind TraceKind = JS::TraceKind::JitCode;
const gc::CellHeader& cellHeader() const { return cellHeaderAndCode_; }
};
} // namespace jit

Просмотреть файл

@ -261,7 +261,7 @@ BigInt* BigInt::neg(JSContext* cx, HandleBigInt x) {
if (!result) {
return nullptr;
}
result->header_.toggleFlagBit(SignBit);
result->toggleHeaderFlagBit(SignBit);
return result;
}
@ -1789,7 +1789,7 @@ BigInt* BigInt::createFromInt64(JSContext* cx, int64_t n) {
}
if (n < 0) {
res->header_.setFlagBit(SignBit);
res->setHeaderFlagBit(SignBit);
}
MOZ_ASSERT(res->isNegative() == (n < 0));

Просмотреть файл

@ -39,22 +39,23 @@ XDRResult XDRBigInt(XDRState<mode>* xdr, MutableHandle<JS::BigInt*> bi);
namespace JS {
class BigInt final : public js::gc::Cell {
class BigInt final : public js::gc::CellWithLengthAndFlags {
public:
using Digit = uintptr_t;
private:
using Header = js::gc::CellHeaderWithLengthAndFlags;
// The low CellFlagBitsReservedForGC flag bits are reserved.
static constexpr uintptr_t SignBit =
js::Bit(js::gc::CellFlagBitsReservedForGC);
static constexpr size_t InlineDigitsLength =
(js::gc::MinCellSize - sizeof(Header)) / sizeof(Digit);
(js::gc::MinCellSize - sizeof(CellWithLengthAndFlags)) / sizeof(Digit);
Header header_;
public:
// The number of digits and the flags are stored in the cell header.
size_t digitLength() const { return headerLengthField(); }
private:
// The digit storage starts with the least significant digit (little-endian
// digit order). Byte order within a digit is of course native endian.
union {
@ -63,22 +64,20 @@ class BigInt final : public js::gc::Cell {
};
void setLengthAndFlags(uint32_t len, uint32_t flags) {
header_.setLengthAndFlags(len, flags);
setHeaderLengthAndFlags(len, flags);
}
public:
static const JS::TraceKind TraceKind = JS::TraceKind::BigInt;
const js::gc::CellHeader& cellHeader() const { return header_.cellHeader(); }
void fixupAfterMovingGC() {}
js::gc::AllocKind getAllocKind() const { return js::gc::AllocKind::BIGINT; }
size_t digitLength() const { return header_.lengthField(); }
// Offset for direct access from JIT code.
static constexpr size_t offsetOfDigitLength() {
return offsetof(BigInt, header_) + Header::offsetOfLength();
return offsetOfHeaderLength();
}
bool hasInlineDigits() const { return digitLength() <= InlineDigitsLength; }
@ -98,7 +97,7 @@ class BigInt final : public js::gc::Cell {
void setDigit(size_t idx, Digit digit) { digits()[idx] = digit; }
bool isZero() const { return digitLength() == 0; }
bool isNegative() const { return header_.flagsField() & SignBit; }
bool isNegative() const { return headerFlagsField() & SignBit; }
void initializeDigitsToZero();
@ -446,9 +445,8 @@ class BigInt final : public js::gc::Cell {
// call the methods below.
friend class js::jit::MacroAssembler;
// Make offset accessors accessible to the MacroAssembler.
static constexpr size_t offsetOfFlags() { return Header::offsetOfFlags(); }
static constexpr size_t offsetOfLength() { return Header::offsetOfLength(); }
static constexpr size_t offsetOfFlags() { return offsetOfHeaderFlags(); }
static constexpr size_t offsetOfLength() { return offsetOfHeaderLength(); }
static size_t offsetOfInlineDigits() {
return offsetof(BigInt, inlineDigits_);

Просмотреть файл

@ -3910,7 +3910,7 @@ JS::ubi::Node::Size JS::ubi::Concrete<JSObject>::size(
const char16_t JS::ubi::Concrete<JSObject>::concreteTypeName[] = u"JSObject";
void JSObject::traceChildren(JSTracer* trc) {
TraceEdge(trc, &headerAndGroup_, "group");
TraceCellHeaderEdge(trc, this, "group");
traceShape(trc);

Просмотреть файл

@ -82,11 +82,13 @@ bool SetImmutablePrototype(JSContext* cx, JS::HandleObject obj,
* as before.
* - JSObject::swap()
*/
class JSObject : public js::gc::Cell {
class JSObject
: public js::gc::CellWithTenuredGCPointer<js::gc::Cell, js::ObjectGroup> {
public:
// The ObjectGroup is stored in the cell header.
js::ObjectGroup* groupRaw() const { return headerPtr(); }
protected:
using HeaderWithObjectGroup =
js::gc::CellHeaderWithTenuredGCPointer<js::ObjectGroup>;
HeaderWithObjectGroup headerAndGroup_;
js::GCPtrShape shape_;
private:
@ -103,7 +105,7 @@ class JSObject : public js::gc::Cell {
// Make a new group to use for a singleton object.
static js::ObjectGroup* makeLazyGroup(JSContext* cx, js::HandleObject obj);
void setGroupRaw(js::ObjectGroup* group) { headerAndGroup_.setPtr(group); }
void setGroupRaw(js::ObjectGroup* group) { setHeaderPtr(group); }
public:
bool isNative() const { return getClass()->isNative(); }
@ -144,9 +146,7 @@ class JSObject : public js::gc::Cell {
return groupRaw();
}
js::ObjectGroup* groupRaw() const { return headerAndGroup_.ptr(); }
void initGroup(js::ObjectGroup* group) { headerAndGroup_.initPtr(group); }
void initGroup(js::ObjectGroup* group) { initHeaderPtr(group); }
/*
* Whether this is the only object which has its specified group. This
@ -270,7 +270,6 @@ class JSObject : public js::gc::Cell {
void fixupAfterMovingGC();
static const JS::TraceKind TraceKind = JS::TraceKind::Object;
const js::gc::CellHeader& cellHeader() const { return headerAndGroup_; }
MOZ_ALWAYS_INLINE JS::Zone* zone() const {
MOZ_ASSERT_IF(!isTenured(), nurseryZone() == groupRaw()->zone());
@ -581,10 +580,7 @@ class JSObject : public js::gc::Cell {
friend class js::jit::MacroAssembler;
friend class js::jit::CacheIRCompiler;
static constexpr size_t offsetOfGroup() {
return offsetof(JSObject, headerAndGroup_) +
HeaderWithObjectGroup::offsetOfPtr();
}
static constexpr size_t offsetOfGroup() { return offsetOfHeaderPtr(); }
static constexpr size_t offsetOfShape() { return offsetof(JSObject, shape_); }
private:

Просмотреть файл

@ -1622,19 +1622,19 @@ struct SourceExtent {
//
// NOTE: Scripts may be directly created with bytecode and skip the lazy script
// form. This is always the case for top-level scripts.
class BaseScript : public gc::TenuredCell {
class BaseScript : public gc::TenuredCellWithNonGCPointer<uint8_t> {
public:
// The definition of flags is shared with the frontend for consistency.
using ImmutableFlags = ImmutableScriptFlagsEnum;
using MutableFlags = MutableScriptFlagsEnum;
protected:
public:
// Pointer to baseline->method()->raw(), ion->method()->raw(), a wasm jit
// entry, the JIT's EnterInterpreter stub, or the lazy link stub. Must be
// non-null (except on no-jit builds).
using HeaderWithCodePtr = gc::CellHeaderWithNonGCPointer<uint8_t>;
HeaderWithCodePtr headerAndJitCodeRaw_;
// non-null (except on no-jit builds). This is stored in the cell header.
uint8_t* jitCodeRaw() const { return headerPtr(); }
protected:
// Object that determines what Realm this script is compiled for. For function
// scripts this is the canonical function, otherwise it is the GlobalObject of
// the realm.
@ -1681,7 +1681,7 @@ class BaseScript : public gc::TenuredCell {
BaseScript(uint8_t* stubEntry, JSObject* functionOrGlobal,
ScriptSourceObject* sourceObject, SourceExtent extent,
uint32_t immutableFlags)
: headerAndJitCodeRaw_(stubEntry),
: TenuredCellWithNonGCPointer(stubEntry),
functionOrGlobal_(functionOrGlobal),
sourceObject_(sourceObject),
extent_(extent),
@ -1692,7 +1692,7 @@ class BaseScript : public gc::TenuredCell {
MOZ_ASSERT(extent_.sourceEnd <= extent_.toStringEnd);
}
void setJitCodeRaw(uint8_t* code) { headerAndJitCodeRaw_.setPtr(code); }
void setJitCodeRaw(uint8_t* code) { setHeaderPtr(code); }
public:
static BaseScript* New(JSContext* cx, js::HandleObject functionOrGlobal,
@ -1707,7 +1707,6 @@ class BaseScript : public gc::TenuredCell {
const SourceExtent& extent,
uint32_t immutableFlags);
uint8_t* jitCodeRaw() const { return headerAndJitCodeRaw_.ptr(); }
bool isUsingInterpreterTrampoline(JSRuntime* rt) const;
// Canonical function for the script, if it has a function. For top-level
@ -1983,7 +1982,6 @@ class BaseScript : public gc::TenuredCell {
public:
static const JS::TraceKind TraceKind = JS::TraceKind::Script;
const gc::CellHeader& cellHeader() const { return headerAndJitCodeRaw_; }
void traceChildren(JSTracer* trc);
void finalize(JSFreeOp* fop);
@ -2001,10 +1999,7 @@ class BaseScript : public gc::TenuredCell {
bool hasFieldInitializer);
// JIT accessors
static constexpr size_t offsetOfJitCodeRaw() {
return offsetof(BaseScript, headerAndJitCodeRaw_) +
HeaderWithCodePtr::offsetOfPtr();
}
static constexpr size_t offsetOfJitCodeRaw() { return offsetOfHeaderPtr(); }
static constexpr size_t offsetOfPrivateData() {
return offsetof(BaseScript, data_);
}

Просмотреть файл

@ -41,7 +41,7 @@ using namespace js;
ObjectGroup::ObjectGroup(const JSClass* clasp, TaggedProto proto,
JS::Realm* realm, ObjectGroupFlags initialFlags)
: headerAndClasp_(clasp),
: TenuredCellWithNonGCPointer(clasp),
proto_(proto),
realm_(realm),
flags_(initialFlags) {

Просмотреть файл

@ -79,15 +79,14 @@ enum NewObjectKind {
*/
/* Type information about an object accessed by a script. */
class ObjectGroup : public gc::TenuredCell {
class ObjectGroup : public gc::TenuredCellWithNonGCPointer<const JSClass> {
public:
class Property;
private:
/* Class shared by objects in this group stored in header. */
using HeaderWithJSClass = gc::CellHeaderWithNonGCPointer<const JSClass>;
HeaderWithJSClass headerAndClasp_;
/* Class shared by objects in this group, stored in the cell header. */
const JSClass* clasp() const { return headerPtr(); }
private:
/* Prototype shared by objects in this group. */
GCPtr<TaggedProto> proto_; // set by constructor
@ -147,10 +146,7 @@ class ObjectGroup : public gc::TenuredCell {
// END OF PROPERTIES
private:
static inline uint32_t offsetOfClasp() {
return offsetof(ObjectGroup, headerAndClasp_) +
HeaderWithJSClass::offsetOfPtr();
}
static inline uint32_t offsetOfClasp() { return offsetOfHeaderPtr(); }
static inline uint32_t offsetOfProto() {
return offsetof(ObjectGroup, proto_);
@ -174,7 +170,6 @@ class ObjectGroup : public gc::TenuredCell {
friend class js::jit::MacroAssembler;
public:
const JSClass* clasp() const { return headerAndClasp_.ptr(); }
bool hasDynamicPrototype() const { return proto_.isDynamic(); }
@ -431,7 +426,6 @@ class ObjectGroup : public gc::TenuredCell {
void finalize(JSFreeOp* fop);
static const JS::TraceKind TraceKind = JS::TraceKind::ObjectGroup;
const gc::CellHeader& cellHeader() const { return headerAndClasp_; }
public:
const ObjectGroupFlags* addressOfFlags() const { return &flags_; }

Просмотреть файл

@ -599,7 +599,7 @@ bool js::StringHasRegExpMetaChars(JSLinearString* str) {
/* RegExpShared */
RegExpShared::RegExpShared(JSAtom* source, RegExpFlags flags)
: headerAndSource(source), pairCount_(0), flags(flags) {}
: CellWithTenuredGCPointer(source), pairCount_(0), flags(flags) {}
void RegExpShared::traceChildren(JSTracer* trc) {
// Discard code to avoid holding onto ExecutablePools.
@ -607,7 +607,7 @@ void RegExpShared::traceChildren(JSTracer* trc) {
discardJitCode();
}
TraceNullableEdge(trc, &headerAndSource, "RegExpShared source");
TraceNullableCellHeaderEdge(trc, this, "RegExpShared source");
if (kind() == RegExpShared::Kind::Atom) {
TraceNullableEdge(trc, &patternAtom_, "RegExpShared pattern atom");
} else {

Просмотреть файл

@ -71,7 +71,8 @@ inline bool IsNativeRegExpEnabled() {
* objects when we are preserving jitcode in their zone, to avoid the same
* recompilation inefficiencies as normal Ion and baseline compilation.
*/
class RegExpShared : public gc::TenuredCell {
class RegExpShared
: public gc::CellWithTenuredGCPointer<gc::TenuredCell, JSAtom> {
public:
enum class Kind { Unparsed, Atom, RegExp };
enum class CodeKind { Bytecode, Jitcode, Any };
@ -106,10 +107,11 @@ class RegExpShared : public gc::TenuredCell {
}
};
/* Source to the RegExp, for lazy compilation. */
using HeaderWithAtom = gc::CellHeaderWithTenuredGCPointer<JSAtom>;
HeaderWithAtom headerAndSource;
public:
/* Source to the RegExp, for lazy compilation. Stored in the cell header. */
JSAtom* getSource() const { return headerPtr(); }
private:
RegExpCompilation compilationArray[2];
uint32_t pairCount_;
@ -210,8 +212,6 @@ class RegExpShared : public gc::TenuredCell {
return namedCaptureIndices_[idx];
}
JSAtom* getSource() const { return headerAndSource.ptr(); }
JSAtom* patternAtom() const { return patternAtom_; }
JS::RegExpFlags getFlags() const { return flags; }
@ -232,10 +232,7 @@ class RegExpShared : public gc::TenuredCell {
void discardJitCode();
void finalize(JSFreeOp* fop);
static size_t offsetOfSource() {
return offsetof(RegExpShared, headerAndSource) +
HeaderWithAtom::offsetOfPtr();
}
static size_t offsetOfSource() { return offsetOfHeaderPtr(); }
static size_t offsetOfPatternAtom() {
return offsetof(RegExpShared, patternAtom_);
@ -266,7 +263,6 @@ class RegExpShared : public gc::TenuredCell {
public:
static const JS::TraceKind TraceKind = JS::TraceKind::RegExpShared;
const gc::CellHeader& cellHeader() const { return headerAndSource; }
};
class RegExpZone {

Просмотреть файл

@ -265,15 +265,15 @@ class WrappedPtrOperations<Scope*, Wrapper> {
//
// The base class of all Scopes.
//
class Scope : public js::gc::TenuredCell {
class Scope : public gc::CellWithTenuredGCPointer<gc::TenuredCell, Scope> {
friend class GCMarker;
friend class frontend::ScopeCreationData;
protected:
// The enclosing scope or nullptr.
using HeaderWithScope = gc::CellHeaderWithTenuredGCPointer<Scope>;
HeaderWithScope headerAndEnclosingScope_;
public:
// The enclosing scope or nullptr, stored in the cell header.
Scope* enclosing() const { return headerPtr(); }
protected:
// The kind determines data_.
const ScopeKind kind_;
@ -284,7 +284,7 @@ class Scope : public js::gc::TenuredCell {
BaseScopeData* data_;
Scope(ScopeKind kind, Scope* enclosing, Shape* environmentShape)
: headerAndEnclosingScope_(enclosing),
: CellWithTenuredGCPointer(enclosing),
kind_(kind),
environmentShape_(environmentShape),
data_(nullptr) {}
@ -313,7 +313,6 @@ class Scope : public js::gc::TenuredCell {
MutableHandle<UniquePtr<typename ConcreteScope::Data>> data);
static const JS::TraceKind TraceKind = JS::TraceKind::Scope;
const gc::CellHeader& cellHeader() const { return headerAndEnclosingScope_; }
template <typename T>
bool is() const {
@ -334,8 +333,6 @@ class Scope : public js::gc::TenuredCell {
ScopeKind kind() const { return kind_; }
Scope* enclosing() const { return headerAndEnclosingScope_.ptr(); }
Shape* environmentShape() const { return environmentShape_; }
static bool hasEnvironment(ScopeKind kind, bool environmentShape) {

Просмотреть файл

@ -127,7 +127,7 @@ inline Shape* Shape::new_(JSContext* cx, Handle<StackShape> other,
inline void Shape::updateBaseShapeAfterMovingGC() {
BaseShape* base = this->base();
if (IsForwarded(base)) {
headerAndBase_.unsafeSetPtr(Forwarded(base));
unsafeSetHeaderPtr(Forwarded(base));
}
}

Просмотреть файл

@ -1582,14 +1582,14 @@ Shape* Shape::setObjectFlags(JSContext* cx, BaseShape::Flag flags,
}
inline BaseShape::BaseShape(const StackBaseShape& base)
: headerAndClasp_(base.clasp),
: TenuredCellWithNonGCPointer(base.clasp),
flags(base.flags),
slotSpan_(0),
unowned_(nullptr) {}
/* static */
void BaseShape::copyFromUnowned(BaseShape& dest, UnownedBaseShape& src) {
dest.headerAndClasp_.setPtr(src.clasp());
dest.setHeaderPtr(src.clasp());
dest.slotSpan_ = src.slotSpan_;
dest.unowned_ = &src;
dest.flags = src.flags | OWNED_SHAPE;

Просмотреть файл

@ -665,7 +665,7 @@ class Shape;
class UnownedBaseShape;
struct StackBaseShape;
class BaseShape : public gc::TenuredCell {
class BaseShape : public gc::TenuredCellWithNonGCPointer<const JSClass> {
public:
friend class Shape;
friend struct StackBaseShape;
@ -708,8 +708,9 @@ class BaseShape : public gc::TenuredCell {
};
private:
using HeaderWithJSClass = gc::CellHeaderWithNonGCPointer<const JSClass>;
HeaderWithJSClass headerAndClasp_; /* Class of referring object. */
/* Class of referring object, stored in the cell header */
const JSClass* clasp() const { return headerPtr(); }
uint32_t flags; /* Vector of above flags. */
uint32_t slotSpan_; /* Object slot span for BaseShapes at
* dictionary last properties. */
@ -731,7 +732,6 @@ class BaseShape : public gc::TenuredCell {
/* Not defined: BaseShapes must not be stack allocated. */
~BaseShape();
const JSClass* clasp() const { return headerAndClasp_.ptr(); }
bool isOwned() const { return !!(flags & OWNED_SHAPE); }
@ -828,7 +828,6 @@ class BaseShape : public gc::TenuredCell {
static inline size_t offsetOfFlags() { return offsetof(BaseShape, flags); }
static const JS::TraceKind TraceKind = JS::TraceKind::BaseShape;
const gc::CellHeader& cellHeader() const { return headerAndClasp_; }
void traceChildren(JSTracer* trc);
void traceChildrenSkipShapeCache(JSTracer* trc);
@ -839,7 +838,7 @@ class BaseShape : public gc::TenuredCell {
private:
static void staticAsserts() {
static_assert(offsetof(BaseShape, headerAndClasp_) ==
static_assert(offsetOfHeaderPtr() ==
offsetof(js::shadow::BaseShape, clasp_));
static_assert(sizeof(BaseShape) % gc::CellAlignBytes == 0,
"Things inheriting from gc::Cell must have a size that's "
@ -937,7 +936,7 @@ using BaseShapeSet =
JS::WeakCache<JS::GCHashSet<WeakHeapPtr<UnownedBaseShape*>, StackBaseShape,
SystemAllocPolicy>>;
class Shape : public gc::TenuredCell {
class Shape : public gc::CellWithTenuredGCPointer<gc::TenuredCell, BaseShape> {
friend class ::JSObject;
friend class ::JSFunction;
friend class GCMarker;
@ -949,9 +948,11 @@ class Shape : public gc::TenuredCell {
friend class JS::ubi::Concrete<Shape>;
friend class js::gc::RelocationOverlay;
public:
// Base shape, stored in the cell header.
BaseShape* base() const { return headerPtr(); }
protected:
using HeaderWithBaseShape = gc::CellHeaderWithTenuredGCPointer<BaseShape>;
HeaderWithBaseShape headerAndBase_;
const GCPtrId propid_;
// Flags that are not modified after the Shape is created. Off-thread Ion
@ -1276,8 +1277,6 @@ class Shape : public gc::TenuredCell {
attrs == aattrs && getter() == rawGetter && setter() == rawSetter;
}
BaseShape* base() const { return headerAndBase_.ptr(); }
static bool isDataProperty(unsigned attrs, GetterOp getter, SetterOp setter) {
return !(attrs & (JSPROP_GETTER | JSPROP_SETTER)) && !getter && !setter;
}
@ -1376,7 +1375,7 @@ class Shape : public gc::TenuredCell {
private:
void setBase(BaseShape* base) {
MOZ_ASSERT(base);
headerAndBase_.setPtr(base);
setHeaderPtr(base);
}
bool isBigEnoughForAShapeTableSlow() {
@ -1428,7 +1427,6 @@ class Shape : public gc::TenuredCell {
void removeChild(JSFreeOp* fop, Shape* child);
static const JS::TraceKind TraceKind = JS::TraceKind::Shape;
const gc::CellHeader& cellHeader() const { return headerAndBase_; }
void traceChildren(JSTracer* trc);
@ -1440,9 +1438,7 @@ class Shape : public gc::TenuredCell {
void updateBaseShapeAfterMovingGC();
// For JIT usage.
static constexpr size_t offsetOfBaseShape() {
return offsetof(Shape, headerAndBase_) + HeaderWithBaseShape::offsetOfPtr();
}
static constexpr size_t offsetOfBaseShape() { return offsetOfHeaderPtr(); }
#ifdef DEBUG
static inline size_t offsetOfImmutableFlags() {
@ -1724,7 +1720,7 @@ class MutableWrappedPtrOperations<StackShape, Wrapper>
};
inline Shape::Shape(const StackShape& other, uint32_t nfixed)
: headerAndBase_(other.base),
: CellWithTenuredGCPointer(other.base),
propid_(other.propid),
immutableFlags(other.immutableFlags),
attrs(other.attrs),
@ -1756,7 +1752,7 @@ class NurseryShapesRef : public gc::BufferableRef {
};
inline Shape::Shape(UnownedBaseShape* base, uint32_t nfixed)
: headerAndBase_(base),
: CellWithTenuredGCPointer(base),
propid_(JSID_EMPTY),
immutableFlags(SHAPE_INVALID_SLOT | (nfixed << FIXED_SLOTS_SHIFT)),
attrs(0),

Просмотреть файл

@ -163,16 +163,21 @@ static const size_t UINT32_CHAR_BUFFER_LENGTH = sizeof("4294967295") - 1;
*/
// clang-format on
class JSString : public js::gc::Cell {
class JSString : public js::gc::CellWithLengthAndFlags {
protected:
static const size_t NUM_INLINE_CHARS_LATIN1 =
2 * sizeof(void*) / sizeof(JS::Latin1Char);
static const size_t NUM_INLINE_CHARS_TWO_BYTE =
2 * sizeof(void*) / sizeof(char16_t);
using Header = js::gc::CellHeaderWithLengthAndFlags;
Header header_;
public:
// String length and flags are stored in the cell header.
MOZ_ALWAYS_INLINE
size_t length() const { return headerLengthField(); }
MOZ_ALWAYS_INLINE
uint32_t flags() const { return headerFlagsField(); }
protected:
/* Fields only apply to string types commented on the right. */
struct Data {
// Note: 32-bit length and flags fields are inherited from
@ -284,7 +289,7 @@ class JSString : public js::gc::Cell {
static const uint32_t INIT_DEPENDENT_FLAGS = LINEAR_BIT | DEPENDENT_BIT;
static const uint32_t TYPE_FLAGS_MASK = js::BitMask(9) - js::BitMask(3);
static_assert((TYPE_FLAGS_MASK & js::gc::CellHeader::RESERVED_MASK) == 0,
static_assert((TYPE_FLAGS_MASK & RESERVED_MASK) == 0,
"GC reserved bits must not be used for Strings");
static const uint32_t LATIN1_CHARS_BIT = js::Bit(9);
@ -310,19 +315,11 @@ class JSString : public js::gc::Cell {
*/
static inline bool validateLength(JSContext* maybecx, size_t length);
static constexpr size_t offsetOfRawFlagsField() {
return offsetof(JSString, header_) + Header::offsetOfRawFlagsField();
}
static constexpr size_t offsetOfFlags() {
return offsetof(JSString, header_) + Header::offsetOfFlags();
}
static constexpr size_t offsetOfLength() {
return offsetof(JSString, header_) + Header::offsetOfLength();
}
static constexpr size_t offsetOfFlags() { return offsetOfHeaderFlags(); }
static constexpr size_t offsetOfLength() { return offsetOfHeaderLength(); }
bool sameLengthAndFlags(const JSString& other) const {
return header_ == other.header_;
return length() == other.length() && flags() == other.flags();
}
static void staticAsserts() {
@ -339,8 +336,9 @@ class JSString : public js::gc::Cell {
/* Ensure js::shadow::String has the same layout. */
using JS::shadow::String;
static_assert(JSString::offsetOfRawFlagsField() == offsetof(String, flags_),
"shadow::String flags offset must match JSString");
static_assert(
JSString::offsetOfRawHeaderFlagsField() == offsetof(String, flags_),
"shadow::String flags offset must match JSString");
#if JS_BITS_PER_WORD == 32
static_assert(JSString::offsetOfLength() == offsetof(String, length_),
"shadow::String length offset must match JSString");
@ -395,20 +393,11 @@ class JSString : public js::gc::Cell {
#endif
}
public:
MOZ_ALWAYS_INLINE
size_t length() const { return header_.lengthField(); }
MOZ_ALWAYS_INLINE
uint32_t flags() const { return header_.flagsField(); }
protected:
void setFlattenData(uintptr_t data) {
header_.setTemporaryGCUnsafeData(data);
}
void setFlattenData(uintptr_t data) { setTemporaryGCUnsafeData(data); }
uintptr_t unsetFlattenData(uint32_t len, uint32_t flags) {
return header_.unsetTemporaryGCUnsafeData(len, flags);
return unsetTemporaryGCUnsafeData(len, flags);
}
// Get correct non-inline chars enum arm for given type
@ -597,7 +586,6 @@ class JSString : public js::gc::Cell {
public:
static const JS::TraceKind TraceKind = JS::TraceKind::String;
const js::gc::CellHeader& cellHeader() const { return header_.cellHeader(); }
JS::Zone* zone() const {
if (isTenured()) {
@ -611,10 +599,10 @@ class JSString : public js::gc::Cell {
}
void setLengthAndFlags(uint32_t len, uint32_t flags) {
header_.setLengthAndFlags(len, flags);
setHeaderLengthAndFlags(len, flags);
}
void setFlagBit(uint32_t flag) { header_.setFlagBit(flag); }
void clearFlagBit(uint32_t flag) { header_.clearFlagBit(flag); }
void setFlagBit(uint32_t flag) { setHeaderFlagBit(flag); }
void clearFlagBit(uint32_t flag) { clearHeaderFlagBit(flag); }
void fixupAfterMovingGC() {}

Просмотреть файл

@ -31,12 +31,13 @@ class AutoAccessAtomsZone;
namespace JS {
class Symbol : public js::gc::TenuredCell {
private:
// User description of symbol. Also meets gc::Cell requirements.
using HeaderWithAtom = js::gc::CellHeaderWithTenuredGCPointer<JSAtom>;
HeaderWithAtom headerAndDescription_;
class Symbol
: public js::gc::CellWithTenuredGCPointer<js::gc::TenuredCell, JSAtom> {
public:
// User description of symbol, stored in the cell header.
JSAtom* description() const { return headerPtr(); }
private:
SymbolCode code_;
// Each Symbol gets its own hash code so that we don't have to use
@ -44,7 +45,7 @@ class Symbol : public js::gc::TenuredCell {
js::HashNumber hash_;
Symbol(SymbolCode code, js::HashNumber hash, JSAtom* desc)
: headerAndDescription_(desc), code_(code), hash_(hash) {}
: CellWithTenuredGCPointer(desc), code_(code), hash_(hash) {}
Symbol(const Symbol&) = delete;
void operator=(const Symbol&) = delete;
@ -67,7 +68,6 @@ class Symbol : public js::gc::TenuredCell {
js::HandleString description);
static Symbol* for_(JSContext* cx, js::HandleString description);
JSAtom* description() const { return headerAndDescription_.ptr(); }
SymbolCode code() const { return code_; }
js::HashNumber hash() const { return hash_; }
@ -88,10 +88,9 @@ class Symbol : public js::gc::TenuredCell {
bool isPrivateName() const { return code_ == SymbolCode::PrivateNameSymbol; }
static const JS::TraceKind TraceKind = JS::TraceKind::Symbol;
const js::gc::CellHeader& cellHeader() const { return headerAndDescription_; }
inline void traceChildren(JSTracer* trc) {
js::TraceNullableEdge(trc, &headerAndDescription_, "symbol description");
js::TraceNullableCellHeaderEdge(trc, this, "symbol description");
}
inline void finalize(JSFreeOp*) {}