Bug 1068223 - Clean up the Cell hierarchy; r=jonco

--HG--
extra : rebase_source : 3b9e05416971eee3eb9e8cdc30008fe41f2fd539
This commit is contained in:
Terrence Cole 2014-09-17 10:32:37 -07:00
Родитель 5a0cd66f24
Коммит fd64eb51f9
40 изменённых файлов: 474 добавлений и 416 удалений

Просмотреть файл

@ -190,9 +190,6 @@ class JitCode;
}
#ifdef DEBUG
bool
RuntimeFromMainThreadIsHeapMajorCollecting(JS::shadow::Zone *shadowZone);
// Barriers can't be triggered during backend Ion compilation, which may run on
// a helper thread.
bool
@ -233,10 +230,6 @@ template <> struct MapTypeToTraceKind<UnownedBaseShape> { static const JSGCTrace
template <> struct MapTypeToTraceKind<jit::JitCode> { static const JSGCTraceKind kind = JSTRACE_JITCODE; };
template <> struct MapTypeToTraceKind<types::TypeObject>{ static const JSGCTraceKind kind = JSTRACE_TYPE_OBJECT; };
template <typename T>
void
MarkUnbarriered(JSTracer *trc, T **thingp, const char *name);
// Direct value access used by the write barriers and the jits.
void
MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name);
@ -244,104 +237,19 @@ MarkValueUnbarriered(JSTracer *trc, Value *v, const char *name);
// These three declarations are also present in gc/Marking.h, via the DeclMarker
// macro. Not great, but hard to avoid.
void
MarkObjectUnbarriered(JSTracer *trc, JSObject **obj, const char *name);
void
MarkStringUnbarriered(JSTracer *trc, JSString **str, const char *name);
void
MarkSymbolUnbarriered(JSTracer *trc, JS::Symbol **sym, const char *name);
// Note that some subclasses (e.g. ObjectImpl) specialize some of these
// methods.
template <typename T>
class BarrieredCell : public gc::Cell
{
public:
MOZ_ALWAYS_INLINE JS::Zone *zone() const { return tenuredZone(); }
MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZone() const { return JS::shadow::Zone::asShadowZone(zone()); }
MOZ_ALWAYS_INLINE JS::Zone *zoneFromAnyThread() const { return tenuredZoneFromAnyThread(); }
MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZoneFromAnyThread() const {
return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
}
static MOZ_ALWAYS_INLINE void readBarrier(T *thing) {
#ifdef JSGC_INCREMENTAL
JS_ASSERT(!CurrentThreadIsIonCompiling());
JS_ASSERT(!T::isNullLike(thing));
JS::shadow::Zone *shadowZone = thing->shadowZoneFromAnyThread();
if (shadowZone->needsIncrementalBarrier()) {
MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
T *tmp = thing;
js::gc::MarkUnbarriered<T>(shadowZone->barrierTracer(), &tmp, "read barrier");
JS_ASSERT(tmp == thing);
}
if (JS::GCThingIsMarkedGray(thing))
JS::UnmarkGrayGCThingRecursively(thing, MapTypeToTraceKind<T>::kind);
#endif
}
static MOZ_ALWAYS_INLINE bool needWriteBarrierPre(JS::Zone *zone) {
#ifdef JSGC_INCREMENTAL
return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
#else
return false;
#endif
}
static MOZ_ALWAYS_INLINE bool isNullLike(T *thing) { return !thing; }
static MOZ_ALWAYS_INLINE void writeBarrierPre(T *thing) {
#ifdef JSGC_INCREMENTAL
JS_ASSERT(!CurrentThreadIsIonCompiling());
if (isNullLike(thing) || !thing->shadowRuntimeFromAnyThread()->needsIncrementalBarrier())
return;
JS::shadow::Zone *shadowZone = thing->shadowZoneFromAnyThread();
if (shadowZone->needsIncrementalBarrier()) {
MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
T *tmp = thing;
js::gc::MarkUnbarriered<T>(shadowZone->barrierTracer(), &tmp, "write barrier");
JS_ASSERT(tmp == thing);
}
#endif
}
static void writeBarrierPost(T *thing, void *cellp) {}
static void writeBarrierPostRelocate(T *thing, void *cellp) {}
static void writeBarrierPostRemove(T *thing, void *cellp) {}
template <typename S>
S *pod_malloc(size_t numElems) {
return zone()->template pod_malloc<S>(numElems);
}
template <typename S>
S *pod_calloc(size_t numElems) {
return zone()->template pod_calloc<S>(numElems);
}
template <typename S>
S *pod_realloc(S *prior, size_t oldSize, size_t newSize) {
return zone()->template pod_realloc<S>(prior, oldSize, newSize);
}
template <typename S, typename U>
S *pod_malloc_with_extra(size_t numExtra) {
return zone()->template pod_malloc_with_extra<S, U>(numExtra);
}
template <typename S, typename U>
S *pod_calloc_with_extra(size_t numExtra) {
return zone()->template pod_calloc_with_extra<S, U>(numExtra);
}
};
} // namespace gc
// Note: the following Zone-getting functions must be equivalent to the zone()
// and shadowZone() functions implemented by the subclasses of BarrieredCell.
static inline JS::shadow::Zone *
ShadowZoneOfString(JSString *str)
{
return JS::shadow::Zone::asShadowZone(reinterpret_cast<const js::gc::Cell *>(str)->tenuredZone());
// This context is more basal than the GC things being implemented, so C++ does
// not know about the inheritance hierarchy yet.
static inline const gc::TenuredCell *AsTenuredCell(const JSString *str) {
return reinterpret_cast<const gc::TenuredCell *>(str);
}
static inline const gc::TenuredCell *AsTenuredCell(const JS::Symbol *sym) {
return reinterpret_cast<const gc::TenuredCell *>(sym);
}
JS::Zone *
@ -356,15 +264,13 @@ ShadowZoneOfObjectFromAnyThread(JSObject *obj)
static inline JS::shadow::Zone *
ShadowZoneOfStringFromAnyThread(JSString *str)
{
return JS::shadow::Zone::asShadowZone(
reinterpret_cast<const js::gc::Cell *>(str)->tenuredZoneFromAnyThread());
return JS::shadow::Zone::asShadowZone(AsTenuredCell(str)->zoneFromAnyThread());
}
static inline JS::shadow::Zone *
ShadowZoneOfSymbolFromAnyThread(JS::Symbol *sym)
{
return JS::shadow::Zone::asShadowZone(
reinterpret_cast<const js::gc::Cell *>(sym)->tenuredZoneFromAnyThread());
return JS::shadow::Zone::asShadowZone(AsTenuredCell(sym)->zoneFromAnyThread());
}
MOZ_ALWAYS_INLINE JS::Zone *
@ -373,7 +279,7 @@ ZoneOfValueFromAnyThread(const JS::Value &value)
JS_ASSERT(value.isMarkable());
if (value.isObject())
return ZoneOfObjectFromAnyThread(value.toObject());
return static_cast<js::gc::Cell *>(value.toGCThing())->tenuredZoneFromAnyThread();
return js::gc::TenuredCell::fromPointer(value.toGCThing())->zoneFromAnyThread();
}
void

Просмотреть файл

@ -554,7 +554,7 @@ ForkJoinNursery::allocateSlots(JSObject *obj, uint32_t nslots)
return nullptr;
if (!isInsideNewspace(obj))
return obj->pod_malloc<HeapSlot>(nslots);
return obj->zone()->pod_malloc<HeapSlot>(nslots);
if (nslots > MaxNurserySlots)
return allocateHugeSlots(obj, nslots);
@ -576,7 +576,7 @@ ForkJoinNursery::reallocateSlots(JSObject *obj, HeapSlot *oldSlots,
if (!isInsideNewspace(obj)) {
JS_ASSERT_IF(oldSlots, !isInsideNewspace(oldSlots));
return obj->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
return obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
}
if (!isInsideNewspace(oldSlots))
@ -626,7 +626,7 @@ ForkJoinNursery::allocateHugeSlots(JSObject *obj, size_t nslots)
if (nslots & mozilla::tl::MulOverflowMask<sizeof(HeapSlot)>::value)
return nullptr;
HeapSlot *slots = obj->pod_malloc<HeapSlot>(nslots);
HeapSlot *slots = obj->zone()->pod_malloc<HeapSlot>(nslots);
if (!slots)
return slots;
@ -639,7 +639,7 @@ HeapSlot *
ForkJoinNursery::reallocateHugeSlots(JSObject *obj, HeapSlot *oldSlots,
uint32_t oldCount, uint32_t newCount)
{
HeapSlot *newSlots = obj->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
HeapSlot *newSlots = obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
if (!newSlots)
return newSlots;

Просмотреть файл

@ -20,7 +20,9 @@
#include "ds/BitArray.h"
#include "gc/Memory.h"
#include "js/GCAPI.h"
#include "js/HeapAPI.h"
#include "js/TracingAPI.h"
struct JSCompartment;
@ -36,6 +38,16 @@ namespace js {
class FreeOp;
#ifdef DEBUG
extern bool
RuntimeFromMainThreadIsHeapMajorCollecting(JS::shadow::Zone *shadowZone);
// Barriers can't be triggered during backend Ion compilation, which may run on
// a helper thread.
extern bool
CurrentThreadIsIonCompiling();
#endif
namespace gc {
struct Arena;
@ -44,6 +56,9 @@ class SortedArenaList;
struct ArenaHeader;
struct Chunk;
extern void
MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind);
/*
* This flag allows an allocation site to request a specific heap based upon the
* estimated lifetime or lifetime requirements of objects allocated from that
@ -85,30 +100,55 @@ enum AllocKind {
static const unsigned FINALIZE_LIMIT = FINALIZE_LAST + 1;
static const unsigned FINALIZE_OBJECT_LIMIT = FINALIZE_OBJECT_LAST + 1;
static inline JSGCTraceKind
MapAllocToTraceKind(AllocKind kind)
{
static const JSGCTraceKind map[] = {
JSTRACE_OBJECT, /* FINALIZE_OBJECT0 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT0_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT2 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT2_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT4 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT4_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT8 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT8_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT12 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT12_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT16 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT16_BACKGROUND */
JSTRACE_SCRIPT, /* FINALIZE_SCRIPT */
JSTRACE_LAZY_SCRIPT,/* FINALIZE_LAZY_SCRIPT */
JSTRACE_SHAPE, /* FINALIZE_SHAPE */
JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */
JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */
JSTRACE_STRING, /* FINALIZE_FAT_INLINE_STRING */
JSTRACE_STRING, /* FINALIZE_STRING */
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING */
JSTRACE_SYMBOL, /* FINALIZE_SYMBOL */
JSTRACE_JITCODE, /* FINALIZE_JITCODE */
};
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
return map[kind];
}
/*
* This must be an upper bound, but we do not need the least upper bound, so
* we just exclude non-background objects.
*/
static const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
/*
* A GC cell is the base class for all GC things.
*/
class TenuredCell;
// A GC cell is the base class for all GC things.
struct Cell
{
public:
inline ArenaHeader *arenaHeader() const;
inline AllocKind tenuredGetAllocKind() const;
MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const Cell *src);
MOZ_ALWAYS_INLINE bool isTenured() const { return !IsInsideNursery(this); }
MOZ_ALWAYS_INLINE const TenuredCell *asTenured() const;
MOZ_ALWAYS_INLINE TenuredCell *asTenured();
inline JSRuntime *runtimeFromMainThread() const;
inline JS::shadow::Runtime *shadowRuntimeFromMainThread() const;
inline JS::Zone *tenuredZone() const;
inline JS::Zone *tenuredZoneFromAnyThread() const;
inline bool tenuredIsInsideZone(JS::Zone *zone) const;
// Note: Unrestricted access to the runtime of a GC thing from an arbitrary
// thread can easily lead to races. Use this method very carefully.
@ -117,9 +157,10 @@ struct Cell
inline StoreBuffer *storeBuffer() const;
static MOZ_ALWAYS_INLINE bool needWriteBarrierPre(JS::Zone *zone);
#ifdef DEBUG
inline bool isAligned() const;
inline bool isTenured() const;
#endif
protected:
@ -127,6 +168,51 @@ struct Cell
inline Chunk *chunk() const;
};
// A GC TenuredCell gets behaviors that are valid for things in the Tenured
// heap, such as access to the arena header and mark bits.
class TenuredCell : public Cell
{
public:
// Construct a TenuredCell from a void*, making various sanity assertions.
static MOZ_ALWAYS_INLINE TenuredCell *fromPointer(void *ptr);
static MOZ_ALWAYS_INLINE const TenuredCell *fromPointer(const void *ptr);
// Mark bit management.
MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
MOZ_ALWAYS_INLINE void copyMarkBitsFrom(const TenuredCell *src);
// Note: this is in TenuredCell because ObjectImpl subclasses are sometimes
// used tagged.
static MOZ_ALWAYS_INLINE bool isNullLike(const Cell *thing) { return !thing; }
// Access to the arena header.
inline ArenaHeader *arenaHeader() const;
inline AllocKind getAllocKind() const;
inline JS::Zone *zone() const;
inline JS::Zone *zoneFromAnyThread() const;
inline bool isInsideZone(JS::Zone *zone) const;
MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZone() const {
return JS::shadow::Zone::asShadowZone(zone());
}
MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZoneFromAnyThread() const {
return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
}
static MOZ_ALWAYS_INLINE void readBarrier(TenuredCell *thing);
static MOZ_ALWAYS_INLINE void writeBarrierPre(TenuredCell *thing);
static MOZ_ALWAYS_INLINE void writeBarrierPost(TenuredCell *thing, void *cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(TenuredCell *thing, void *cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(TenuredCell *thing, void *cellp);
#ifdef DEBUG
inline bool isAligned() const;
#endif
};
/*
* The mark bitmap has one bit per each GC cell. For multi-cell GC things this
* wastes space but allows to avoid expensive devisions by thing's size when
@ -762,7 +848,7 @@ struct ChunkBitmap
*word &= ~mask;
}
MOZ_ALWAYS_INLINE void copyMarkBit(Cell *dst, const Cell *src, uint32_t color) {
MOZ_ALWAYS_INLINE void copyMarkBit(Cell *dst, const TenuredCell *src, uint32_t color) {
uintptr_t *word, mask;
getMarkWordAndMask(dst, color, &word, &mask);
*word = (*word & ~mask) | (src->isMarked(color) ? mask : 0);
@ -1050,21 +1136,26 @@ ArenaHeader::unsetAllocDuringSweep()
}
static void
AssertValidColor(const void *thing, uint32_t color)
AssertValidColor(const TenuredCell *thing, uint32_t color)
{
#ifdef DEBUG
ArenaHeader *aheader = reinterpret_cast<const Cell *>(thing)->arenaHeader();
ArenaHeader *aheader = thing->arenaHeader();
JS_ASSERT(color < aheader->getThingSize() / CellSize);
#endif
}
inline ArenaHeader *
Cell::arenaHeader() const
MOZ_ALWAYS_INLINE const TenuredCell *
Cell::asTenured() const
{
JS_ASSERT(isTenured());
uintptr_t addr = address();
addr &= ~ArenaMask;
return reinterpret_cast<ArenaHeader *>(addr);
return static_cast<const TenuredCell *>(this);
}
MOZ_ALWAYS_INLINE TenuredCell *
Cell::asTenured()
{
JS_ASSERT(isTenured());
return static_cast<TenuredCell *>(this);
}
inline JSRuntime *
@ -1093,82 +1184,6 @@ Cell::shadowRuntimeFromAnyThread() const
return reinterpret_cast<JS::shadow::Runtime*>(runtimeFromAnyThread());
}
bool
Cell::isMarked(uint32_t color /* = BLACK */) const
{
JS_ASSERT(isTenured());
JS_ASSERT(arenaHeader()->allocated());
AssertValidColor(this, color);
return chunk()->bitmap.isMarked(this, color);
}
bool
Cell::markIfUnmarked(uint32_t color /* = BLACK */) const
{
JS_ASSERT(isTenured());
AssertValidColor(this, color);
return chunk()->bitmap.markIfUnmarked(this, color);
}
void
Cell::unmark(uint32_t color) const
{
JS_ASSERT(isTenured());
JS_ASSERT(color != BLACK);
AssertValidColor(this, color);
chunk()->bitmap.unmark(this, color);
}
void
Cell::copyMarkBitsFrom(const Cell *src)
{
JS_ASSERT(isTenured());
JS_ASSERT(src->isTenured());
ChunkBitmap &bitmap = chunk()->bitmap;
bitmap.copyMarkBit(this, src, BLACK);
bitmap.copyMarkBit(this, src, GRAY);
}
JS::Zone *
Cell::tenuredZone() const
{
JS::Zone *zone = arenaHeader()->zone;
JS_ASSERT(CurrentThreadCanAccessZone(zone));
JS_ASSERT(isTenured());
return zone;
}
JS::Zone *
Cell::tenuredZoneFromAnyThread() const
{
JS_ASSERT(isTenured());
return arenaHeader()->zone;
}
bool
Cell::tenuredIsInsideZone(JS::Zone *zone) const
{
JS_ASSERT(isTenured());
return zone == arenaHeader()->zone;
}
#ifdef DEBUG
bool
Cell::isAligned() const
{
return Arena::isAligned(address(), arenaHeader()->getThingSize());
}
bool
Cell::isTenured() const
{
#ifdef JSGC_GENERATIONAL
return !IsInsideNursery(this);
#endif
return true;
}
#endif
inline uintptr_t
Cell::address() const
{
@ -1207,14 +1222,176 @@ InFreeList(ArenaHeader *aheader, void *thing)
return firstSpan.inFreeList(addr);
}
} /* namespace gc */
/* static */ MOZ_ALWAYS_INLINE bool
Cell::needWriteBarrierPre(JS::Zone *zone) {
#ifdef JSGC_INCREMENTAL
return JS::shadow::Zone::asShadowZone(zone)->needsIncrementalBarrier();
#else
return false;
#endif
}
gc::AllocKind
gc::Cell::tenuredGetAllocKind() const
/* static */ MOZ_ALWAYS_INLINE TenuredCell *
TenuredCell::fromPointer(void *ptr)
{
JS_ASSERT(static_cast<TenuredCell *>(ptr)->isTenured());
return static_cast<TenuredCell *>(ptr);
}
/* static */ MOZ_ALWAYS_INLINE const TenuredCell *
TenuredCell::fromPointer(const void *ptr)
{
JS_ASSERT(static_cast<const TenuredCell *>(ptr)->isTenured());
return static_cast<const TenuredCell *>(ptr);
}
bool
TenuredCell::isMarked(uint32_t color /* = BLACK */) const
{
JS_ASSERT(arenaHeader()->allocated());
AssertValidColor(this, color);
return chunk()->bitmap.isMarked(this, color);
}
bool
TenuredCell::markIfUnmarked(uint32_t color /* = BLACK */) const
{
AssertValidColor(this, color);
return chunk()->bitmap.markIfUnmarked(this, color);
}
void
TenuredCell::unmark(uint32_t color) const
{
JS_ASSERT(color != BLACK);
AssertValidColor(this, color);
chunk()->bitmap.unmark(this, color);
}
void
TenuredCell::copyMarkBitsFrom(const TenuredCell *src)
{
ChunkBitmap &bitmap = chunk()->bitmap;
bitmap.copyMarkBit(this, src, BLACK);
bitmap.copyMarkBit(this, src, GRAY);
}
inline ArenaHeader *
TenuredCell::arenaHeader() const
{
JS_ASSERT(isTenured());
uintptr_t addr = address();
addr &= ~ArenaMask;
return reinterpret_cast<ArenaHeader *>(addr);
}
AllocKind
TenuredCell::getAllocKind() const
{
return arenaHeader()->getAllocKind();
}
JS::Zone *
TenuredCell::zone() const
{
JS::Zone *zone = arenaHeader()->zone;
JS_ASSERT(CurrentThreadCanAccessZone(zone));
return zone;
}
JS::Zone *
TenuredCell::zoneFromAnyThread() const
{
return arenaHeader()->zone;
}
bool
TenuredCell::isInsideZone(JS::Zone *zone) const
{
return zone == arenaHeader()->zone;
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::readBarrier(TenuredCell *thing)
{
#ifdef JSGC_INCREMENTAL
JS_ASSERT(!CurrentThreadIsIonCompiling());
JS_ASSERT(!isNullLike(thing));
JS::shadow::Zone *shadowZone = thing->shadowZoneFromAnyThread();
if (shadowZone->needsIncrementalBarrier()) {
MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
void *tmp = thing;
shadowZone->barrierTracer()->setTracingName("read barrier");
MarkKind(shadowZone->barrierTracer(), &tmp,
MapAllocToTraceKind(thing->getAllocKind()));
JS_ASSERT(tmp == thing);
}
if (JS::GCThingIsMarkedGray(thing))
JS::UnmarkGrayGCThingRecursively(thing, MapAllocToTraceKind(thing->getAllocKind()));
#endif
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::writeBarrierPre(TenuredCell *thing) {
#ifdef JSGC_INCREMENTAL
JS_ASSERT(!CurrentThreadIsIonCompiling());
if (isNullLike(thing) || !thing->shadowRuntimeFromAnyThread()->needsIncrementalBarrier())
return;
JS::shadow::Zone *shadowZone = thing->shadowZoneFromAnyThread();
if (shadowZone->needsIncrementalBarrier()) {
MOZ_ASSERT(!RuntimeFromMainThreadIsHeapMajorCollecting(shadowZone));
void *tmp = thing;
shadowZone->barrierTracer()->setTracingName("pre barrier");
MarkKind(shadowZone->barrierTracer(), &tmp,
MapAllocToTraceKind(thing->getAllocKind()));
JS_ASSERT(tmp == thing);
}
#endif
}
static MOZ_ALWAYS_INLINE void
AssertValidToSkipBarrier(TenuredCell *thing)
{
JS_ASSERT(!IsInsideNursery(thing));
JS_ASSERT_IF(thing, MapAllocToTraceKind(thing->getAllocKind()) != JSTRACE_OBJECT);
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::writeBarrierPost(TenuredCell *thing, void *cellp)
{
AssertValidToSkipBarrier(thing);
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::writeBarrierPostRelocate(TenuredCell *thing, void *cellp)
{
AssertValidToSkipBarrier(thing);
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::writeBarrierPostRemove(TenuredCell *thing, void *cellp)
{
AssertValidToSkipBarrier(thing);
}
#ifdef DEBUG
bool
Cell::isAligned() const
{
if (!isTenured())
return true;
return asTenured()->isAligned();
}
bool
TenuredCell::isAligned() const
{
return Arena::isAligned(address(), arenaHeader()->getThingSize());
}
#endif
} /* namespace gc */
} /* namespace js */
#endif /* gc_Heap_h */

Просмотреть файл

@ -119,7 +119,7 @@ js::IterateGrayObjects(Zone *zone, GCThingCallback cellCallback, void *data)
for (size_t finalizeKind = 0; finalizeKind <= FINALIZE_OBJECT_LAST; finalizeKind++) {
for (ZoneCellIterUnderGC i(zone, AllocKind(finalizeKind)); !i.done(); i.next()) {
JSObject *obj = i.get<JSObject>();
if (obj->isMarked(GRAY))
if (obj->asTenured()->isMarked(GRAY))
cellCallback(data, obj);
}
}

Просмотреть файл

@ -227,7 +227,7 @@ CheckMarkedThing(JSTracer *trc, T **thingp)
* ArenaHeader may not be synced with the real one in ArenaLists.
*/
JS_ASSERT_IF(IsThingPoisoned(thing) && rt->isHeapBusy(),
!InFreeList(thing->arenaHeader(), thing));
!InFreeList(thing->asTenured()->arenaHeader(), thing));
#endif
}
@ -447,14 +447,14 @@ IsMarked(T **thingp)
}
}
#endif // JSGC_GENERATIONAL
Zone *zone = (*thingp)->tenuredZone();
Zone *zone = (*thingp)->asTenured()->zone();
if (!zone->isCollecting() || zone->isGCFinished())
return true;
#ifdef JSGC_COMPACTING
if (zone->isGCCompacting() && IsForwarded(*thingp))
*thingp = Forwarded(*thingp);
#endif
return (*thingp)->isMarked();
return (*thingp)->asTenured()->isMarked();
}
template <typename T>
@ -492,7 +492,7 @@ IsAboutToBeFinalized(T **thingp)
}
#endif // JSGC_GENERATIONAL
Zone *zone = thing->tenuredZone();
Zone *zone = thing->asTenured()->zone();
if (zone->isGCSweeping()) {
/*
* We should return false for things that have been allocated during
@ -501,9 +501,10 @@ IsAboutToBeFinalized(T **thingp)
* compartment group and during minor gc. Rather than do the extra check,
* we just assert that it's not necessary.
*/
JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !thing->arenaHeader()->allocatedDuringIncremental);
JS_ASSERT_IF(!rt->isHeapMinorCollecting(),
!thing->asTenured()->arenaHeader()->allocatedDuringIncremental);
return !thing->isMarked();
return !thing->asTenured()->isMarked();
}
#ifdef JSGC_COMPACTING
else if (zone->isGCCompacting() && IsForwarded(thing)) {
@ -542,7 +543,7 @@ UpdateIfRelocated(JSRuntime *rt, T **thingp)
#endif // JSGC_GENERATIONAL
#ifdef JSGC_COMPACTING
Zone *zone = (*thingp)->tenuredZone();
Zone *zone = (*thingp)->zone();
if (zone->isGCCompacting() && IsForwarded(*thingp))
*thingp = Forwarded(*thingp);
#endif
@ -661,7 +662,8 @@ gc::MarkKind(JSTracer *trc, void **thingp, JSGCTraceKind kind)
JS_ASSERT(thingp);
JS_ASSERT(*thingp);
DebugOnly<Cell *> cell = static_cast<Cell *>(*thingp);
JS_ASSERT_IF(cell->isTenured(), kind == MapAllocToTraceKind(cell->tenuredGetAllocKind()));
JS_ASSERT_IF(cell->isTenured(),
kind == MapAllocToTraceKind(cell->asTenured()->getAllocKind()));
switch (kind) {
case JSTRACE_OBJECT:
MarkInternal(trc, reinterpret_cast<JSObject **>(thingp));
@ -943,8 +945,9 @@ ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell)
JS_ASSERT(color == BLACK);
return false;
}
TenuredCell *tenured = cell->asTenured();
JS::Zone *zone = cell->tenuredZone();
JS::Zone *zone = tenured->zone();
if (color == BLACK) {
/*
* Having black->gray edges violates our promise to the cycle
@ -953,7 +956,7 @@ ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell)
* source and destination of the cross-compartment edge should be gray,
* but the source was marked black by the conservative scanner.
*/
if (cell->isMarked(GRAY)) {
if (tenured->isMarked(GRAY)) {
JS_ASSERT(!zone->isCollecting());
trc->runtime()->gc.setFoundBlackGrayEdges();
}
@ -965,7 +968,7 @@ ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell)
* but it will be later, so record the cell so it can be marked gray
* at the appropriate time.
*/
if (!cell->isMarked())
if (!tenured->isMarked())
DelayCrossCompartmentGrayMarking(src);
return false;
}
@ -1035,7 +1038,7 @@ PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing)
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
JS_ASSERT(!IsInsideNursery(thing));
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
if (thing->asTenured()->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushObject(thing);
}
@ -1053,7 +1056,7 @@ MaybePushMarkStackBetweenSlices(GCMarker *gcmarker, JSObject *thing)
JS_COMPARTMENT_ASSERT(rt, thing);
JS_ASSERT_IF(rt->isHeapBusy(), !IsInsideNursery(thing));
if (!IsInsideNursery(thing) && thing->markIfUnmarked(gcmarker->getMarkColor()))
if (!IsInsideNursery(thing) && thing->asTenured()->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushObject(thing);
}
@ -1063,7 +1066,7 @@ PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
JS_ASSERT(!IsInsideNursery(thing));
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
if (thing->asTenured()->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushObject(thing);
}
@ -1703,7 +1706,7 @@ GCMarker::processMarkStackTop(SliceBudget &budget)
JSObject *obj2 = &v.toObject();
JS_COMPARTMENT_ASSERT(runtime(), obj2);
JS_ASSERT(obj->compartment() == obj2->compartment());
if (obj2->markIfUnmarked(getMarkColor())) {
if (obj2->asTenured()->markIfUnmarked(getMarkColor())) {
pushValueArray(obj, vp, end);
obj = obj2;
goto scan_obj;
@ -1883,12 +1886,6 @@ AssertNonGrayGCThing(JSTracer *trc, void **thingp, JSGCTraceKind kind)
}
#endif
static void
UnmarkGrayGCThing(void *thing)
{
static_cast<js::gc::Cell *>(thing)->unmark(js::gc::GRAY);
}
static void
UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind);
@ -1977,7 +1974,7 @@ UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind)
return;
UnmarkGrayTracer *tracer = static_cast<UnmarkGrayTracer *>(trc);
UnmarkGrayGCThing(thing);
TenuredCell::fromPointer(thing)->unmark(js::gc::GRAY);
tracer->unmarkedAny = true;
/*
@ -2026,7 +2023,7 @@ JS::UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind)
if (!JS::GCThingIsMarkedGray(thing))
return false;
UnmarkGrayGCThing(thing);
TenuredCell::fromPointer(thing)->unmark(js::gc::GRAY);
unmarkedArg = true;
}

Просмотреть файл

@ -224,7 +224,7 @@ js::Nursery::allocateSlots(JSObject *obj, uint32_t nslots)
JS_ASSERT(nslots > 0);
if (!IsInsideNursery(obj))
return obj->pod_malloc<HeapSlot>(nslots);
return obj->zone()->pod_malloc<HeapSlot>(nslots);
if (nslots > MaxNurserySlots)
return allocateHugeSlots(obj->zone(), nslots);
@ -249,10 +249,10 @@ js::Nursery::reallocateSlots(JSObject *obj, HeapSlot *oldSlots,
uint32_t oldCount, uint32_t newCount)
{
if (!IsInsideNursery(obj))
return obj->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
return obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
if (!isInside(oldSlots)) {
HeapSlot *newSlots = obj->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
HeapSlot *newSlots = obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
if (newSlots && oldSlots != newSlots) {
hugeSlots.remove(oldSlots);
/* If this put fails, we will only leak the slots. */

Просмотреть файл

@ -533,7 +533,7 @@ GCMarker::markDelayedChildren(ArenaHeader *aheader)
aheader->markOverflow = 0;
for (ArenaCellIterUnderGC i(aheader); !i.done(); i.next()) {
Cell *t = i.getCell();
TenuredCell *t = i.getCell();
if (always || t->isMarked()) {
t->markIfUnmarked();
JS_TraceChildren(this, t, MapAllocToTraceKind(aheader->getAllocKind()));
@ -587,7 +587,7 @@ GCMarker::checkZone(void *p)
{
JS_ASSERT(started);
DebugOnly<Cell *> cell = static_cast<Cell *>(p);
JS_ASSERT_IF(cell->isTenured(), cell->tenuredZone()->isCollecting());
JS_ASSERT_IF(cell->isTenured(), cell->asTenured()->zone()->isCollecting());
}
#endif
@ -656,7 +656,7 @@ GCMarker::appendGrayRoot(void *thing, JSGCTraceKind kind)
root.debugPrintIndex = debugPrintIndex();
#endif
Zone *zone = static_cast<Cell *>(thing)->tenuredZone();
Zone *zone = TenuredCell::fromPointer(thing)->zone();
if (zone->isCollecting()) {
// See the comment on SetMaybeAliveFlag to see why we only do this for
// objects and scripts. We rely on gray root buffering for this to work,

Просмотреть файл

@ -264,7 +264,7 @@ oom:
}
static bool
IsMarkedOrAllocated(Cell *cell)
IsMarkedOrAllocated(TenuredCell *cell)
{
return cell->isMarked() || cell->arenaHeader()->allocatedDuringIncremental;
}
@ -300,7 +300,7 @@ CheckEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
static void
AssertMarkedOrAllocated(const EdgeValue &edge)
{
if (!edge.thing || IsMarkedOrAllocated(static_cast<Cell *>(edge.thing)))
if (!edge.thing || IsMarkedOrAllocated(TenuredCell::fromPointer(edge.thing)))
return;
// Permanent atoms and well-known symbols aren't marked during graph traversal.

Просмотреть файл

@ -268,7 +268,7 @@ js::ZoneOfValue(const JS::Value &value)
JS_ASSERT(value.isMarkable());
if (value.isObject())
return value.toObject().zone();
return static_cast<js::gc::Cell *>(value.toGCThing())->tenuredZone();
return js::gc::TenuredCell::fromPointer(value.toGCThing())->zone();
}
bool

Просмотреть файл

@ -361,7 +361,7 @@ BaselineScript::New(JSScript *jsscript, uint32_t prologueOffset, uint32_t epilog
paddedPCMappingSize +
paddedBytecodeTypesMapSize;
BaselineScript *script = jsscript->pod_malloc_with_extra<BaselineScript, uint8_t>(allocBytes);
BaselineScript *script = jsscript->zone()->pod_malloc_with_extra<BaselineScript, uint8_t>(allocBytes);
if (!script)
return nullptr;
new (script) BaselineScript(prologueOffset, epilogueOffset,

Просмотреть файл

@ -183,7 +183,7 @@ struct BaselineScript
BaselineScript(uint32_t prologueOffset, uint32_t epilogueOffset,
uint32_t spsPushToggleOffset, uint32_t postDebugPrologueOffset);
static BaselineScript *New(JSScript *script, uint32_t prologueOffset,
static BaselineScript *New(JSScript *jsscript, uint32_t prologueOffset,
uint32_t epilogueOffset, uint32_t postDebugPrologueOffset,
uint32_t spsPushToggleOffset, size_t icEntries,
size_t pcMappingIndexEntries, size_t pcMappingSize,

Просмотреть файл

@ -4011,7 +4011,7 @@ CodeGenerator::emitAllocateGCThingPar(LInstruction *lir, Register objReg, Regist
JS_ASSERT(lir->mirRaw());
JS_ASSERT(lir->mirRaw()->isInstruction());
gc::AllocKind allocKind = templateObj->tenuredGetAllocKind();
gc::AllocKind allocKind = templateObj->asTenured()->getAllocKind();
#ifdef JSGC_FJGENERATIONAL
OutOfLineCode *ool = oolCallVM(NewGCThingParInfo, lir,
(ArgList(), Imm32(allocKind)), StoreRegisterTo(objReg));
@ -4193,7 +4193,7 @@ bool
CodeGenerator::visitCreateThisWithTemplate(LCreateThisWithTemplate *lir)
{
JSObject *templateObject = lir->mir()->templateObject();
gc::AllocKind allocKind = templateObject->tenuredGetAllocKind();
gc::AllocKind allocKind = templateObject->asTenured()->getAllocKind();
gc::InitialHeap initialHeap = lir->mir()->initialHeap();
Register objReg = ToRegister(lir->output());
Register tempReg = ToRegister(lir->temp());

Просмотреть файл

@ -6760,7 +6760,7 @@ NumFixedSlots(JSObject *object)
// shape and can race with the main thread if we are building off thread.
// The allocation kind and object class (which goes through the type) can
// be read freely, however.
gc::AllocKind kind = object->tenuredGetAllocKind();
gc::AllocKind kind = object->asTenured()->getAllocKind();
return gc::GetGCKindSlots(kind, object->getClass());
}

Просмотреть файл

@ -30,7 +30,7 @@ class CodeOffsetLabel;
class PatchableBackedge;
class IonBuilder;
class JitCode : public gc::BarrieredCell<JitCode>
class JitCode : public gc::TenuredCell
{
protected:
uint8_t *code_;

Просмотреть файл

@ -595,7 +595,7 @@ MacroAssembler::newGCThing(Register result, Register temp, JSObject *templateObj
// frees them. Instead just assert this case does not happen.
JS_ASSERT(!templateObj->numDynamicSlots());
gc::AllocKind allocKind = templateObj->tenuredGetAllocKind();
gc::AllocKind allocKind = templateObj->asTenured()->getAllocKind();
JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
allocateObject(result, temp, allocKind, templateObj->numDynamicSlots(), initialHeap, fail);
@ -606,7 +606,7 @@ MacroAssembler::createGCObject(Register obj, Register temp, JSObject *templateOb
gc::InitialHeap initialHeap, Label *fail, bool initFixedSlots)
{
uint32_t nDynamicSlots = templateObj->numDynamicSlots();
gc::AllocKind allocKind = templateObj->tenuredGetAllocKind();
gc::AllocKind allocKind = templateObj->asTenured()->getAllocKind();
JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
// Arrays with copy on write elements do not need fixed space for an
@ -736,7 +736,7 @@ void
MacroAssembler::newGCThingPar(Register result, Register cx, Register tempReg1, Register tempReg2,
JSObject *templateObject, Label *fail)
{
gc::AllocKind allocKind = templateObject->tenuredGetAllocKind();
gc::AllocKind allocKind = templateObject->asTenured()->getAllocKind();
JS_ASSERT(allocKind >= gc::FINALIZE_OBJECT0 && allocKind <= gc::FINALIZE_OBJECT_LAST);
JS_ASSERT(!templateObject->numDynamicSlots());

Просмотреть файл

@ -3107,7 +3107,7 @@ MNewArray::shouldUseVM() const
JS_ASSERT(count() < JSObject::NELEMENTS_LIMIT);
size_t arraySlots =
gc::GetGCKindSlots(templateObject()->tenuredGetAllocKind()) - ObjectElements::VALUES_PER_HEADER;
gc::GetGCKindSlots(templateObject()->asTenured()->getAllocKind()) - ObjectElements::VALUES_PER_HEADER;
// Allocate space using the VMCall when mir hints it needs to get allocated
// immediately, but only when data doesn't fit the available array slots.

Просмотреть файл

@ -1136,9 +1136,9 @@ AssertValidObjectPtr(JSContext *cx, JSObject *obj)
if (obj->isTenured()) {
JS_ASSERT(obj->isAligned());
gc::AllocKind kind = obj->tenuredGetAllocKind();
gc::AllocKind kind = obj->asTenured()->getAllocKind();
JS_ASSERT(kind >= js::gc::FINALIZE_OBJECT0 && kind <= js::gc::FINALIZE_OBJECT_LAST);
JS_ASSERT(obj->tenuredZone() == cx->zone());
JS_ASSERT(obj->asTenured()->zone() == cx->zone());
}
}
@ -1152,15 +1152,15 @@ AssertValidStringPtr(JSContext *cx, JSString *str)
}
if (str->isAtom())
JS_ASSERT(cx->runtime()->isAtomsZone(str->tenuredZone()));
JS_ASSERT(cx->runtime()->isAtomsZone(str->zone()));
else
JS_ASSERT(str->tenuredZone() == cx->zone());
JS_ASSERT(str->zone() == cx->zone());
JS_ASSERT(str->runtimeFromMainThread() == cx->runtime());
JS_ASSERT(str->isAligned());
JS_ASSERT(str->length() <= JSString::MAX_LENGTH);
gc::AllocKind kind = str->tenuredGetAllocKind();
gc::AllocKind kind = str->getAllocKind();
if (str->isFatInline())
JS_ASSERT(kind == gc::FINALIZE_FAT_INLINE_STRING);
else if (str->isExternal())
@ -1178,7 +1178,7 @@ AssertValidSymbolPtr(JSContext *cx, JS::Symbol *sym)
if (sym->runtimeFromAnyThread() != cx->runtime())
return;
JS_ASSERT(cx->runtime()->isAtomsZone(sym->tenuredZone()));
JS_ASSERT(cx->runtime()->isAtomsZone(sym->zone()));
JS_ASSERT(sym->runtimeFromMainThread() == cx->runtime());
JS_ASSERT(sym->isAligned());
@ -1187,7 +1187,7 @@ AssertValidSymbolPtr(JSContext *cx, JS::Symbol *sym)
AssertValidStringPtr(cx, desc);
}
JS_ASSERT(sym->tenuredGetAllocKind() == gc::FINALIZE_SYMBOL);
JS_ASSERT(sym->getAllocKind() == gc::FINALIZE_SYMBOL);
}
void

Просмотреть файл

@ -644,7 +644,7 @@ js::VisitGrayWrapperTargets(Zone *zone, GCThingCallback callback, void *closure)
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
gc::Cell *thing = e.front().key().wrapped;
if (!IsInsideNursery(thing) && thing->isMarked(gc::GRAY))
if (thing->isTenured() && thing->asTenured()->isMarked(gc::GRAY))
callback(closure, thing);
}
}
@ -723,7 +723,7 @@ struct DumpHeapTracer : public JSTracer
static char
MarkDescriptor(void *thing)
{
gc::Cell *cell = static_cast<gc::Cell*>(thing);
gc::TenuredCell *cell = gc::TenuredCell::fromPointer(thing);
if (cell->isMarked(gc::BLACK))
return cell->isMarked(gc::GRAY) ? 'G' : 'B';
else
@ -968,7 +968,7 @@ JS::IncrementalReferenceBarrier(void *ptr, JSGCTraceKind kind)
#ifdef DEBUG
Zone *zone = kind == JSTRACE_OBJECT
? static_cast<JSObject *>(cell)->zone()
: cell->tenuredZone();
: cell->asTenured()->zone();
JS_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
#endif

Просмотреть файл

@ -461,7 +461,7 @@ class JSFunction : public JSObject
public:
inline bool isExtended() const {
JS_STATIC_ASSERT(FinalizeKind != ExtendedFinalizeKind);
JS_ASSERT_IF(isTenured(), !!(flags() & EXTENDED) == (tenuredGetAllocKind() == ExtendedFinalizeKind));
JS_ASSERT_IF(isTenured(), !!(flags() & EXTENDED) == (asTenured()->getAllocKind() == ExtendedFinalizeKind));
return !!(flags() & EXTENDED);
}
@ -484,7 +484,7 @@ class JSFunction : public JSObject
js::gc::AllocKind kind = FinalizeKind;
if (isExtended())
kind = ExtendedFinalizeKind;
JS_ASSERT_IF(isTenured(), kind == tenuredGetAllocKind());
JS_ASSERT_IF(isTenured(), kind == asTenured()->getAllocKind());
return kind;
}
};

Просмотреть файл

@ -414,6 +414,14 @@ static const int BackgroundPhaseLength[] = {
sizeof(BackgroundPhaseShapes) / sizeof(AllocKind)
};
template<>
JSObject *
ArenaCellIterImpl::get<JSObject>() const
{
JS_ASSERT(!done());
return reinterpret_cast<JSObject *>(getCell());
}
#ifdef DEBUG
void
ArenaHeader::checkSynchronizedWithFreeList() const
@ -488,7 +496,7 @@ Arena::finalize(FreeOp *fop, AllocKind thingKind, size_t thingSize)
for (ArenaCellIterUnderFinalize i(&aheader); !i.done(); i.next()) {
T *t = i.get<T>();
if (t->isMarked()) {
if (t->asTenured()->isMarked()) {
uintptr_t thing = reinterpret_cast<uintptr_t>(t);
if (thing != firstThingOrSuccessorOfLastMarkedThing) {
// We just finished passing over one or more free things,
@ -1871,7 +1879,7 @@ GCMarker::delayMarkingArena(ArenaHeader *aheader)
void
GCMarker::delayMarkingChildren(const void *thing)
{
const Cell *cell = reinterpret_cast<const Cell *>(thing);
const TenuredCell *cell = TenuredCell::fromPointer(thing);
cell->arenaHeader()->markOverflow = 1;
delayMarkingArena(cell->arenaHeader());
}
@ -2081,11 +2089,11 @@ AutoDisableCompactingGC::~AutoDisableCompactingGC()
}
static void
ForwardCell(Cell *dest, Cell *src)
ForwardCell(TenuredCell *dest, TenuredCell *src)
{
// Mark a cell has having been relocated and astore forwarding pointer to
// the new cell.
MOZ_ASSERT(src->tenuredZone() == dest->tenuredZone());
MOZ_ASSERT(src->zone() == dest->zone());
// Putting the values this way round is a terrible hack to make
// ObjectImpl::zone() work on forwarded objects.
@ -2102,7 +2110,7 @@ ArenaContainsGlobal(ArenaHeader *arena)
return false;
for (ArenaCellIterUnderGC i(arena); !i.done(); i.next()) {
JSObject *obj = static_cast<JSObject *>(i.getCell());
JSObject *obj = i.get<JSObject>();
if (obj->is<GlobalObject>())
return true;
}
@ -2182,21 +2190,22 @@ PtrIsInRange(const void *ptr, const void *start, size_t length)
#endif
static bool
RelocateCell(Zone *zone, Cell *src, AllocKind thingKind, size_t thingSize)
RelocateCell(Zone *zone, TenuredCell *src, AllocKind thingKind, size_t thingSize)
{
// Allocate a new cell.
void *dst = zone->allocator.arenas.allocateFromFreeList(thingKind, thingSize);
if (!dst)
dst = js::gc::ArenaLists::refillFreeListInGC(zone, thingKind);
if (!dst)
void *dstAlloc = zone->allocator.arenas.allocateFromFreeList(thingKind, thingSize);
if (!dstAlloc)
dstAlloc = js::gc::ArenaLists::refillFreeListInGC(zone, thingKind);
if (!dstAlloc)
return false;
TenuredCell *dst = TenuredCell::fromPointer(dstAlloc);
// Copy source cell contents to destination.
memcpy(dst, src, thingSize);
if (thingKind <= FINALIZE_OBJECT_LAST) {
JSObject *srcObj = static_cast<JSObject *>(src);
JSObject *dstObj = static_cast<JSObject *>(dst);
JSObject *srcObj = static_cast<JSObject *>(static_cast<Cell *>(src));
JSObject *dstObj = static_cast<JSObject *>(static_cast<Cell *>(dst));
// Fixup the pointer to inline object elements if necessary.
if (srcObj->hasFixedElements())
@ -2211,10 +2220,10 @@ RelocateCell(Zone *zone, Cell *src, AllocKind thingKind, size_t thingSize)
}
// Copy the mark bits.
static_cast<Cell *>(dst)->copyMarkBitsFrom(src);
dst->copyMarkBitsFrom(src);
// Mark source cell as forwarded and leave a pointer to the destination.
ForwardCell(static_cast<Cell *>(dst), src);
ForwardCell(dst, src);
return true;
}
@ -2327,8 +2336,8 @@ GCRuntime::relocateArenas()
void
MovingTracer::Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
{
Cell *thing = static_cast<Cell *>(*thingp);
Zone *zone = thing->tenuredZoneFromAnyThread();
TenuredCell *thing = TenuredCell::fromPointer(*thingp);
Zone *zone = thing->zoneFromAnyThread();
if (!zone->isGCCompacting()) {
JS_ASSERT(!IsForwarded(thing));
return;
@ -2374,7 +2383,6 @@ MovingTracer::Sweep(JSTracer *jstrc)
// TODO: Should possibly just call PurgeRuntime() here.
rt->newObjectCache.purge();
rt->nativeIterCache.purge();
rt->regExpTestCache.purge();
}
/*
@ -2470,9 +2478,9 @@ GCRuntime::releaseRelocatedArenas(ArenaHeader *relocatedList)
#ifdef DEBUG
for (ArenaHeader *arena = relocatedList; arena; arena = arena->next) {
for (ArenaCellIterUnderFinalize i(arena); !i.done(); i.next()) {
Cell *src = i.getCell();
TenuredCell *src = i.getCell();
JS_ASSERT(IsForwarded(src));
Cell *dest = Forwarded(src);
TenuredCell *dest = Forwarded(src);
JS_ASSERT(src->isMarked(BLACK) == dest->isMarked(BLACK));
JS_ASSERT(src->isMarked(GRAY) == dest->isMarked(GRAY));
}
@ -3611,14 +3619,14 @@ static void
CheckCompartmentCallback(JSTracer *trcArg, void **thingp, JSGCTraceKind kind)
{
CompartmentCheckTracer *trc = static_cast<CompartmentCheckTracer *>(trcArg);
Cell *thing = (Cell *)*thingp;
TenuredCell *thing = TenuredCell::fromPointer(*thingp);
JSCompartment *comp = CompartmentOfCell(thing, kind);
if (comp && trc->compartment) {
CheckCompartment(trc, comp, thing, kind);
} else {
JS_ASSERT(thing->tenuredZone() == trc->zone ||
trc->runtime()->isAtomsZone(thing->tenuredZone()));
JS_ASSERT(thing->zone() == trc->zone ||
trc->runtime()->isAtomsZone(thing->zone()));
}
}
@ -4201,7 +4209,7 @@ JSCompartment::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
for (js::WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
CrossCompartmentKey::Kind kind = e.front().key().kind;
JS_ASSERT(kind != CrossCompartmentKey::StringWrapper);
Cell *other = e.front().key().wrapped;
TenuredCell *other = e.front().key().wrapped->asTenured();
if (kind == CrossCompartmentKey::ObjectWrapper) {
/*
* Add edge to wrapped object compartment if wrapped object is not
@ -4209,7 +4217,7 @@ JSCompartment::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
* after wrapped compartment.
*/
if (!other->isMarked(BLACK) || other->isMarked(GRAY)) {
JS::Zone *w = other->tenuredZone();
JS::Zone *w = other->zone();
if (w->isGCMarking())
finder.addEdgeTo(w);
}
@ -4223,7 +4231,7 @@ JSCompartment::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
* with call to Debugger::findCompartmentEdges below) that debugger
* and debuggee objects are always swept in the same group.
*/
JS::Zone *w = other->tenuredZone();
JS::Zone *w = other->zone();
if (w->isGCMarking())
finder.addEdgeTo(w);
}
@ -4466,11 +4474,11 @@ MarkIncomingCrossCompartmentPointers(JSRuntime *rt, const uint32_t color)
JS_ASSERT(dst->compartment() == c);
if (color == GRAY) {
if (IsObjectMarked(&src) && src->isMarked(GRAY))
if (IsObjectMarked(&src) && src->asTenured()->isMarked(GRAY))
MarkGCThingUnbarriered(&rt->gc.marker, (void**)&dst,
"cross-compartment gray pointer");
} else {
if (IsObjectMarked(&src) && !src->isMarked(GRAY))
if (IsObjectMarked(&src) && !src->asTenured()->isMarked(GRAY))
MarkGCThingUnbarriered(&rt->gc.marker, (void**)&dst,
"cross-compartment black pointer");
}
@ -6303,8 +6311,8 @@ AutoDisableProxyCheck::~AutoDisableProxyCheck()
JS_FRIEND_API(void)
JS::AssertGCThingMustBeTenured(JSObject *obj)
{
JS_ASSERT((!IsNurseryAllocable(obj->tenuredGetAllocKind()) || obj->getClass()->finalize) &&
obj->isTenured());
JS_ASSERT(obj->isTenured() &&
(!IsNurseryAllocable(obj->asTenured()->getAllocKind()) || obj->getClass()->finalize));
}
JS_FRIEND_API(void)
@ -6314,7 +6322,7 @@ js::gc::AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind)
if (IsInsideNursery(cell))
JS_ASSERT(kind == JSTRACE_OBJECT);
else
JS_ASSERT(MapAllocToTraceKind(cell->tenuredGetAllocKind()) == kind);
JS_ASSERT(MapAllocToTraceKind(cell->asTenured()->getAllocKind()) == kind);
}
JS_FRIEND_API(size_t)

Просмотреть файл

@ -52,37 +52,6 @@ enum State {
#endif
};
static inline JSGCTraceKind
MapAllocToTraceKind(AllocKind kind)
{
static const JSGCTraceKind map[] = {
JSTRACE_OBJECT, /* FINALIZE_OBJECT0 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT0_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT2 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT2_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT4 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT4_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT8 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT8_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT12 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT12_BACKGROUND */
JSTRACE_OBJECT, /* FINALIZE_OBJECT16 */
JSTRACE_OBJECT, /* FINALIZE_OBJECT16_BACKGROUND */
JSTRACE_SCRIPT, /* FINALIZE_SCRIPT */
JSTRACE_LAZY_SCRIPT,/* FINALIZE_LAZY_SCRIPT */
JSTRACE_SHAPE, /* FINALIZE_SHAPE */
JSTRACE_BASE_SHAPE, /* FINALIZE_BASE_SHAPE */
JSTRACE_TYPE_OBJECT,/* FINALIZE_TYPE_OBJECT */
JSTRACE_STRING, /* FINALIZE_FAT_INLINE_STRING */
JSTRACE_STRING, /* FINALIZE_STRING */
JSTRACE_STRING, /* FINALIZE_EXTERNAL_STRING */
JSTRACE_SYMBOL, /* FINALIZE_SYMBOL */
JSTRACE_JITCODE, /* FINALIZE_JITCODE */
};
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == FINALIZE_LIMIT);
return map[kind];
}
/* Return a printable string for the given kind, for diagnostic purposes. */
const char *
TraceKindAsAscii(JSGCTraceKind kind);

Просмотреть файл

@ -41,7 +41,7 @@ ThreadSafeContext::isThreadLocal(T thing) const
JS_ASSERT(!IsInsideNursery(thing));
// The thing is not in the nursery, but is it in the private tenured area?
if (allocator_->arenas.containsArena(runtime_, thing->arenaHeader()))
if (allocator_->arenas.containsArena(runtime_, thing->asTenured()->arenaHeader()))
{
// GC should be suppressed in preparation for mutating thread local
// objects, as we don't want to trip any barriers.
@ -92,7 +92,7 @@ GetGCThingTraceKind(const void *thing)
if (IsInsideNursery(cell))
return JSTRACE_OBJECT;
#endif
return MapAllocToTraceKind(cell->tenuredGetAllocKind());
return MapAllocToTraceKind(cell->asTenured()->getAllocKind());
}
inline void
@ -232,9 +232,9 @@ class ArenaCellIterImpl
return thing == limit;
}
Cell *getCell() const {
TenuredCell *getCell() const {
JS_ASSERT(!done());
return reinterpret_cast<Cell *>(thing);
return reinterpret_cast<TenuredCell *>(thing);
}
template<typename T> T *get() const {
@ -250,6 +250,10 @@ class ArenaCellIterImpl
}
};
template<>
JSObject *
ArenaCellIterImpl::get<JSObject>() const;
class ArenaCellIterUnderGC : public ArenaCellIterImpl
{
public:
@ -570,7 +574,7 @@ CheckIncrementalZoneState(ThreadSafeContext *cx, T *t)
Zone *zone = cx->asJSContext()->zone();
JS_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
t->arenaHeader()->allocatedDuringIncremental);
t->asTenured()->arenaHeader()->allocatedDuringIncremental);
#endif
}

Просмотреть файл

@ -2793,11 +2793,11 @@ TypeCompartment::fixObjectType(ExclusiveContext *cx, JSObject *obj)
if (obj->isIndexed())
objType->setFlags(cx, OBJECT_FLAG_SPARSE_INDEXES);
ScopedJSFreePtr<jsid> ids(objType->pod_calloc<jsid>(properties.length()));
ScopedJSFreePtr<jsid> ids(objType->zone()->pod_calloc<jsid>(properties.length()));
if (!ids)
return;
ScopedJSFreePtr<Type> types(objType->pod_calloc<Type>(properties.length()));
ScopedJSFreePtr<Type> types(objType->zone()->pod_calloc<Type>(properties.length()));
if (!types)
return;
@ -3663,7 +3663,7 @@ JSScript::makeTypes(JSContext *cx)
unsigned count = TypeScript::NumTypeSets(this);
TypeScript *typeScript = (TypeScript *)
pod_calloc<uint8_t>(TypeScript::SizeIncludingTypeArray(count));
zone()->pod_calloc<uint8_t>(TypeScript::SizeIncludingTypeArray(count));
if (!typeScript)
return false;
@ -3735,7 +3735,7 @@ TypeNewScript::make(JSContext *cx, TypeObject *type, JSFunction *fun)
newScript->fun = fun;
JSObject **preliminaryObjects = type->pod_calloc<JSObject *>(PRELIMINARY_OBJECT_COUNT);
JSObject **preliminaryObjects = type->zone()->pod_calloc<JSObject *>(PRELIMINARY_OBJECT_COUNT);
if (!preliminaryObjects)
return;
@ -3999,7 +3999,7 @@ TypeNewScript::maybeAnalyze(JSContext *cx, TypeObject *type, bool *regenerate, b
if (!initializerVector.append(done))
return false;
initializerList = type->pod_calloc<Initializer>(initializerVector.length());
initializerList = type->zone()->pod_calloc<Initializer>(initializerVector.length());
if (!initializerList)
return false;
PodCopy(initializerList, initializerVector.begin(), initializerVector.length());

Просмотреть файл

@ -999,7 +999,7 @@ class TypeNewScript
*/
/* Type information about an object accessed by a script. */
struct TypeObject : gc::BarrieredCell<TypeObject>
struct TypeObject : public gc::TenuredCell
{
private:
/* Class shared by object using this type. */

Просмотреть файл

@ -1774,7 +1774,7 @@ js_NewGenerator(JSContext *cx, const InterpreterRegs &stackRegs)
static_assert(sizeof(InterpreterFrame) % sizeof(HeapValue) == 0,
"The Values stored after InterpreterFrame must be aligned.");
unsigned nvals = vplen + VALUES_PER_STACK_FRAME + stackfp->script()->nslots();
JSGenerator *gen = obj->pod_calloc_with_extra<JSGenerator, HeapValue>(nvals);
JSGenerator *gen = obj->zone()->pod_calloc_with_extra<JSGenerator, HeapValue>(nvals);
if (!gen)
return nullptr;

Просмотреть файл

@ -2049,7 +2049,7 @@ js::DeepCloneObjectLiteral(JSContext *cx, HandleObject obj, NewObjectKind newKin
} else {
// Object literals are tenured by default as holded by the JSScript.
JS_ASSERT(obj->isTenured());
AllocKind kind = obj->tenuredGetAllocKind();
AllocKind kind = obj->asTenured()->getAllocKind();
Rooted<TypeObject*> typeObj(cx, obj->getType(cx));
if (!typeObj)
return nullptr;
@ -2157,7 +2157,7 @@ js::XDRObjectLiteral(XDRState<mode> *xdr, MutableHandleObject obj)
if (mode == XDR_ENCODE) {
JS_ASSERT(obj->getClass() == &JSObject::class_);
JS_ASSERT(obj->isTenured());
kind = obj->tenuredGetAllocKind();
kind = obj->asTenured()->getAllocKind();
}
if (!xdr->codeEnum32(&kind))
@ -2355,7 +2355,7 @@ js::CloneObjectLiteral(JSContext *cx, HandleObject parent, HandleObject srcObj)
{
if (srcObj->getClass() == &JSObject::class_) {
AllocKind kind = GetBackgroundAllocKind(GuessObjectGCKind(srcObj->numFixedSlots()));
JS_ASSERT_IF(srcObj->isTenured(), kind == srcObj->tenuredGetAllocKind());
JS_ASSERT_IF(srcObj->isTenured(), kind == srcObj->asTenured()->getAllocKind());
JSObject *proto = cx->global()->getOrCreateObjectPrototype(cx);
if (!proto)
@ -2384,7 +2384,8 @@ js::CloneObjectLiteral(JSContext *cx, HandleObject parent, HandleObject srcObj)
// can be freely copied between compartments.
value = srcObj->getDenseElement(i);
JS_ASSERT_IF(value.isMarkable(),
cx->runtime()->isAtomsZone(value.toGCThing()->tenuredZone()));
value.toGCThing()->isTenured() &&
cx->runtime()->isAtomsZone(value.toGCThing()->asTenured()->zone()));
id = INT_TO_JSID(i);
if (!JSObject::defineGeneric(cx, res, id, value, nullptr, nullptr, JSPROP_ENUMERATE))
@ -2470,7 +2471,7 @@ JSObject::ReserveForTradeGuts(JSContext *cx, JSObject *aArg, JSObject *bArg,
return false;
} else {
reserved.newbshape = EmptyShape::getInitialShape(cx, aClass, aProto, a->getParent(), a->getMetadata(),
b->tenuredGetAllocKind());
b->asTenured()->getAllocKind());
if (!reserved.newbshape)
return false;
}
@ -2479,7 +2480,7 @@ JSObject::ReserveForTradeGuts(JSContext *cx, JSObject *aArg, JSObject *bArg,
return false;
} else {
reserved.newashape = EmptyShape::getInitialShape(cx, bClass, bProto, b->getParent(), b->getMetadata(),
a->tenuredGetAllocKind());
a->asTenured()->getAllocKind());
if (!reserved.newashape)
return false;
}
@ -2522,13 +2523,13 @@ JSObject::ReserveForTradeGuts(JSContext *cx, JSObject *aArg, JSObject *bArg,
unsigned bdynamic = dynamicSlotsCount(reserved.newbfixed, a->slotSpan(), a->getClass());
if (adynamic) {
reserved.newaslots = a->pod_malloc<HeapSlot>(adynamic);
reserved.newaslots = a->zone()->pod_malloc<HeapSlot>(adynamic);
if (!reserved.newaslots)
return false;
Debug_SetSlotRangeToCrashOnTouch(reserved.newaslots, adynamic);
}
if (bdynamic) {
reserved.newbslots = b->pod_malloc<HeapSlot>(bdynamic);
reserved.newbslots = b->zone()->pod_malloc<HeapSlot>(bdynamic);
if (!reserved.newbslots)
return false;
Debug_SetSlotRangeToCrashOnTouch(reserved.newbslots, bdynamic);
@ -2679,8 +2680,8 @@ bool
JSObject::swap(JSContext *cx, HandleObject a, HandleObject b)
{
// Ensure swap doesn't cause a finalizer to not be run.
JS_ASSERT(IsBackgroundFinalized(a->tenuredGetAllocKind()) ==
IsBackgroundFinalized(b->tenuredGetAllocKind()));
JS_ASSERT(IsBackgroundFinalized(a->asTenured()->getAllocKind()) ==
IsBackgroundFinalized(b->asTenured()->getAllocKind()));
JS_ASSERT(a->compartment() == b->compartment());
unsigned r = NotifyGCPreSwap(a, b);
@ -3026,7 +3027,7 @@ AllocateSlots(ThreadSafeContext *cx, JSObject *obj, uint32_t nslots)
if (cx->isForkJoinContext())
return cx->asForkJoinContext()->nursery().allocateSlots(obj, nslots);
#endif
return obj->pod_malloc<HeapSlot>(nslots);
return obj->zone()->pod_malloc<HeapSlot>(nslots);
}
// This will not run the garbage collector. If a nursery cannot accomodate the slot array
@ -3049,7 +3050,7 @@ ReallocateSlots(ThreadSafeContext *cx, JSObject *obj, HeapSlot *oldSlots,
oldCount, newCount);
}
#endif
return obj->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
return obj->zone()->pod_realloc<HeapSlot>(oldSlots, oldCount, newCount);
}
/* static */ bool
@ -3335,7 +3336,7 @@ AllocateElements(ThreadSafeContext *cx, JSObject *obj, uint32_t nelems)
return cx->asForkJoinContext()->nursery().allocateElements(obj, nelems);
#endif
return reinterpret_cast<js::ObjectElements *>(obj->pod_malloc<HeapSlot>(nelems));
return reinterpret_cast<js::ObjectElements *>(obj->zone()->pod_malloc<HeapSlot>(nelems));
}
// This will not run the garbage collector. If a nursery cannot accomodate the element array
@ -3358,8 +3359,8 @@ ReallocateElements(ThreadSafeContext *cx, JSObject *obj, ObjectElements *oldHead
#endif
return reinterpret_cast<js::ObjectElements *>(
obj->pod_realloc<HeapSlot>(reinterpret_cast<HeapSlot *>(oldHeader),
oldCount, newCount));
obj->zone()->pod_realloc<HeapSlot>(reinterpret_cast<HeapSlot *>(oldHeader),
oldCount, newCount));
}
// Round up |reqAllocated| to a good size. Up to 1 Mebi (i.e. 1,048,576) the

Просмотреть файл

@ -84,7 +84,7 @@ JSObject::finalize(js::FreeOp *fop)
#ifdef DEBUG
JS_ASSERT(isTenured());
if (!IsBackgroundFinalized(tenuredGetAllocKind())) {
if (!IsBackgroundFinalized(asTenured()->getAllocKind())) {
/* Assert we're on the main thread. */
JS_ASSERT(CurrentThreadCanAccessRuntime(fop->runtime()));
}
@ -1059,7 +1059,7 @@ CopyInitializerObject(JSContext *cx, HandleObject baseobj, NewObjectKind newKind
gc::AllocKind allocKind = gc::GetGCObjectFixedSlotsKind(baseobj->numFixedSlots());
allocKind = gc::GetBackgroundAllocKind(allocKind);
JS_ASSERT_IF(baseobj->isTenured(), allocKind == baseobj->tenuredGetAllocKind());
JS_ASSERT_IF(baseobj->isTenured(), allocKind == baseobj->asTenured()->getAllocKind());
RootedObject obj(cx);
obj = NewBuiltinClassInstance(cx, &JSObject::class_, allocKind, newKind);
if (!obj)

Просмотреть файл

@ -286,7 +286,7 @@ Shape::fixupDictionaryShapeAfterMovingGC()
}
JS_ASSERT(!IsInsideNursery(reinterpret_cast<Cell *>(listp)));
AllocKind kind = reinterpret_cast<Cell *>(listp)->tenuredGetAllocKind();
AllocKind kind = TenuredCell::fromPointer(listp)->getAllocKind();
JS_ASSERT(kind == FINALIZE_SHAPE || kind <= FINALIZE_OBJECT_LAST);
if (kind == FINALIZE_SHAPE) {
// listp points to the parent field of the next shape.

Просмотреть файл

@ -2922,7 +2922,7 @@ js::CloneScript(JSContext *cx, HandleObject enclosingScope, HandleFunction fun,
/* NB: Keep this in sync with XDRScript. */
/* Some embeddings are not careful to use ExposeObjectToActiveJS as needed. */
JS_ASSERT(!src->sourceObject()->isMarked(gc::GRAY));
JS_ASSERT(!src->sourceObject()->asTenured()->isMarked(gc::GRAY));
uint32_t nconsts = src->hasConsts() ? src->consts()->length : 0;
uint32_t nobjects = src->hasObjects() ? src->objects()->length : 0;
@ -3693,7 +3693,7 @@ LazyScript::CreateRaw(ExclusiveContext *cx, HandleFunction fun,
size_t bytes = (p.numFreeVariables * sizeof(FreeVariable))
+ (p.numInnerFunctions * sizeof(HeapPtrFunction));
ScopedJSFreePtr<uint8_t> table(bytes ? fun->pod_malloc<uint8_t>(bytes) : nullptr);
ScopedJSFreePtr<uint8_t> table(bytes ? fun->zone()->pod_malloc<uint8_t>(bytes) : nullptr);
if (bytes && !table)
return nullptr;

Просмотреть файл

@ -731,7 +731,7 @@ XDRScriptConst(XDRState<mode> *xdr, MutableHandleValue vp);
} /* namespace js */
class JSScript : public js::gc::BarrieredCell<JSScript>
class JSScript : public js::gc::TenuredCell
{
template <js::XDRMode mode>
friend
@ -1287,7 +1287,7 @@ class JSScript : public js::gc::BarrieredCell<JSScript>
}
void setIonScript(JSContext *maybecx, js::jit::IonScript *ionScript) {
if (hasIonScript())
js::jit::IonScript::writeBarrierPre(tenuredZone(), ion);
js::jit::IonScript::writeBarrierPre(zone(), ion);
ion = ionScript;
MOZ_ASSERT_IF(hasIonScript(), hasBaselineScript());
updateBaselineOrIonRaw(maybecx);
@ -1340,7 +1340,7 @@ class JSScript : public js::gc::BarrieredCell<JSScript>
}
void setParallelIonScript(js::jit::IonScript *ionScript) {
if (hasParallelIonScript())
js::jit::IonScript::writeBarrierPre(tenuredZone(), parallelIon);
js::jit::IonScript::writeBarrierPre(zone(), parallelIon);
parallelIon = ionScript;
}
@ -1722,7 +1722,7 @@ class AliasedFormalIter
// Information about a script which may be (or has been) lazily compiled to
// bytecode from its source.
class LazyScript : public gc::BarrieredCell<LazyScript>
class LazyScript : public gc::TenuredCell
{
public:
class FreeVariable

Просмотреть файл

@ -166,7 +166,7 @@ inline void
JSScript::setBaselineScript(JSContext *maybecx, js::jit::BaselineScript *baselineScript)
{
if (hasBaselineScript())
js::jit::BaselineScript::writeBarrierPre(tenuredZone(), baseline);
js::jit::BaselineScript::writeBarrierPre(zone(), baseline);
MOZ_ASSERT(!hasIonScript());
baseline = baselineScript;
updateBaselineOrIonRaw(maybecx);

Просмотреть файл

@ -176,7 +176,7 @@ ObjectValueMap::findZoneEdges()
Zone *mapZone = compartment->zone();
for (Range r = all(); !r.empty(); r.popFront()) {
JSObject *key = r.front().key();
if (key->isMarked(BLACK) && !key->isMarked(GRAY))
if (key->asTenured()->isMarked(BLACK) && !key->asTenured()->isMarked(GRAY))
continue;
JSWeakmapKeyDelegateOp op = key->getClass()->ext.weakmapKeyDelegateOp;
if (!op)

Просмотреть файл

@ -166,5 +166,5 @@ bool Wrapper::finalizeInBackground(Value priv) const
*/
if (IsInsideNursery(&priv.toObject()))
return true;
return IsBackgroundFinalized(priv.toObject().tenuredGetAllocKind());
return IsBackgroundFinalized(priv.toObject().asTenured()->getAllocKind());
}

Просмотреть файл

@ -349,11 +349,8 @@ IsObjectValueInCompartment(js::Value v, JSCompartment *comp);
* will change so that some members are private, and only certain methods that
* act upon them will be protected.
*/
class ObjectImpl : public gc::BarrieredCell<ObjectImpl>
class ObjectImpl : public gc::Cell
{
friend Zone *js::gc::BarrieredCell<ObjectImpl>::zone() const;
friend Zone *js::gc::BarrieredCell<ObjectImpl>::zoneFromAnyThread() const;
protected:
/*
* Shape of the object, encodes the layout of the object's properties and
@ -826,7 +823,8 @@ class ObjectImpl : public gc::BarrieredCell<ObjectImpl>
/* Memory usage functions. */
size_t tenuredSizeOfThis() const {
return js::gc::Arena::thingSize(tenuredGetAllocKind());
MOZ_ASSERT(isTenured());
return js::gc::Arena::thingSize(asTenured()->getAllocKind());
}
/* Elements accessors. */
@ -942,7 +940,26 @@ class ObjectImpl : public gc::BarrieredCell<ObjectImpl>
}
/* GC Accessors */
static const size_t MaxTagBits = 3;
void setInitialSlots(HeapSlot *newSlots) { slots = newSlots; }
static bool isNullLike(const ObjectImpl *obj) { return uintptr_t(obj) < (1 << MaxTagBits); }
MOZ_ALWAYS_INLINE JS::Zone *zone() const {
return shape_->zone();
}
MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZone() const {
return JS::shadow::Zone::asShadowZone(zone());
}
MOZ_ALWAYS_INLINE JS::Zone *zoneFromAnyThread() const {
return shape_->zoneFromAnyThread();
}
MOZ_ALWAYS_INLINE JS::shadow::Zone *shadowZoneFromAnyThread() const {
return JS::shadow::Zone::asShadowZone(zoneFromAnyThread());
}
static MOZ_ALWAYS_INLINE void readBarrier(ObjectImpl *obj);
static MOZ_ALWAYS_INLINE void writeBarrierPre(ObjectImpl *obj);
static MOZ_ALWAYS_INLINE void writeBarrierPost(ObjectImpl *obj, void *cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(ObjectImpl *obj, void *cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(ObjectImpl *obj, void *cellp);
/* JIT Accessors */
static size_t offsetOfShape() { return offsetof(ObjectImpl, shape_); }
@ -963,37 +980,22 @@ class ObjectImpl : public gc::BarrieredCell<ObjectImpl>
static size_t offsetOfSlots() { return offsetof(ObjectImpl, slots); }
};
namespace gc {
template <>
MOZ_ALWAYS_INLINE Zone *
BarrieredCell<ObjectImpl>::zone() const
/* static */ MOZ_ALWAYS_INLINE void
ObjectImpl::readBarrier(ObjectImpl *obj)
{
const ObjectImpl* obj = static_cast<const ObjectImpl*>(this);
JS::Zone *zone = obj->shape_->zone();
JS_ASSERT(CurrentThreadCanAccessZone(zone));
return zone;
if (!isNullLike(obj) && obj->isTenured())
obj->asTenured()->readBarrier(obj->asTenured());
}
template <>
MOZ_ALWAYS_INLINE Zone *
BarrieredCell<ObjectImpl>::zoneFromAnyThread() const
/* static */ MOZ_ALWAYS_INLINE void
ObjectImpl::writeBarrierPre(ObjectImpl *obj)
{
const ObjectImpl* obj = static_cast<const ObjectImpl*>(this);
return obj->shape_->zoneFromAnyThread();
if (!isNullLike(obj) && obj->isTenured())
obj->asTenured()->writeBarrierPre(obj->asTenured());
}
// TypeScript::global uses 0x1 as a special value.
template<>
/* static */ inline bool
BarrieredCell<ObjectImpl>::isNullLike(ObjectImpl *obj)
{
return IsNullTaggedPointer(obj);
}
template<>
/* static */ inline void
BarrieredCell<ObjectImpl>::writeBarrierPost(ObjectImpl *obj, void *cellp)
/* static */ MOZ_ALWAYS_INLINE void
ObjectImpl::writeBarrierPost(ObjectImpl *obj, void *cellp)
{
JS_ASSERT(cellp);
#ifdef JSGC_GENERATIONAL
@ -1002,13 +1004,12 @@ BarrieredCell<ObjectImpl>::writeBarrierPost(ObjectImpl *obj, void *cellp)
JS_ASSERT(obj == *static_cast<ObjectImpl **>(cellp));
gc::StoreBuffer *storeBuffer = obj->storeBuffer();
if (storeBuffer)
storeBuffer->putCellFromAnyThread(static_cast<Cell **>(cellp));
storeBuffer->putCellFromAnyThread(static_cast<gc::Cell **>(cellp));
#endif
}
template<>
/* static */ inline void
BarrieredCell<ObjectImpl>::writeBarrierPostRelocate(ObjectImpl *obj, void *cellp)
/* static */ MOZ_ALWAYS_INLINE void
ObjectImpl::writeBarrierPostRelocate(ObjectImpl *obj, void *cellp)
{
JS_ASSERT(cellp);
JS_ASSERT(obj);
@ -1016,25 +1017,22 @@ BarrieredCell<ObjectImpl>::writeBarrierPostRelocate(ObjectImpl *obj, void *cellp
#ifdef JSGC_GENERATIONAL
gc::StoreBuffer *storeBuffer = obj->storeBuffer();
if (storeBuffer)
storeBuffer->putRelocatableCellFromAnyThread(static_cast<Cell **>(cellp));
storeBuffer->putRelocatableCellFromAnyThread(static_cast<gc::Cell **>(cellp));
#endif
}
template<>
/* static */ inline void
BarrieredCell<ObjectImpl>::writeBarrierPostRemove(ObjectImpl *obj, void *cellp)
/* static */ MOZ_ALWAYS_INLINE void
ObjectImpl::writeBarrierPostRemove(ObjectImpl *obj, void *cellp)
{
JS_ASSERT(cellp);
JS_ASSERT(obj);
JS_ASSERT(obj == *static_cast<ObjectImpl **>(cellp));
#ifdef JSGC_GENERATIONAL
obj->shadowRuntimeFromAnyThread()->gcStoreBufferPtr()->removeRelocatableCellFromAnyThread(
static_cast<Cell **>(cellp));
static_cast<gc::Cell **>(cellp));
#endif
}
} // namespace gc
inline void
ObjectImpl::privateWriteBarrierPre(void **oldval)
{

Просмотреть файл

@ -252,7 +252,7 @@ RegExpObject::trace(JSTracer *trc, JSObject *obj)
// isHeapBusy() will be false.
if (trc->runtime()->isHeapBusy() &&
IS_GC_MARKING_TRACER(trc) &&
!obj->tenuredZone()->isPreservingCode())
!obj->asTenured()->zone()->isPreservingCode())
{
obj->setPrivate(nullptr);
} else {

Просмотреть файл

@ -1045,7 +1045,7 @@ JSRuntime::initSelfHosting(JSContext *cx)
const unsigned char *compressed = compressedSources;
uint32_t compressedLen = GetCompressedSize();
ScopedJSFreePtr<char> src(selfHostingGlobal_->pod_malloc<char>(srcLen));
ScopedJSFreePtr<char> src(selfHostingGlobal_->zone()->pod_malloc<char>(srcLen));
if (!src || !DecompressString(compressed, compressedLen,
reinterpret_cast<unsigned char *>(src.get()), srcLen))
{
@ -1226,7 +1226,7 @@ CloneObject(JSContext *cx, HandleObject selfHostedObject)
} else {
JS_ASSERT(selfHostedObject->isNative());
clone = NewObjectWithGivenProto(cx, selfHostedObject->getClass(), TaggedProto(nullptr), cx->global(),
selfHostedObject->tenuredGetAllocKind(),
selfHostedObject->asTenured()->getAllocKind(),
SingletonObject);
}
if (!clone)

Просмотреть файл

@ -281,7 +281,7 @@ GetterSetterWriteBarrierPostRemove(JSRuntime *rt, JSObject **objp)
#endif
}
class BaseShape : public gc::BarrieredCell<BaseShape>
class BaseShape : public gc::TenuredCell
{
public:
friend class Shape;
@ -630,7 +630,7 @@ typedef HashSet<ReadBarrieredUnownedBaseShape,
SystemAllocPolicy> BaseShapeSet;
class Shape : public gc::BarrieredCell<Shape>
class Shape : public gc::TenuredCell
{
friend class ::JSObject;
friend class ::JSFunction;

Просмотреть файл

@ -173,7 +173,7 @@ AllocChars(JSString *str, size_t length, CharT **chars, size_t *capacity)
*capacity = numChars - 1;
JS_STATIC_ASSERT(JSString::MAX_LENGTH * sizeof(CharT) < UINT32_MAX);
*chars = str->pod_malloc<CharT>(numChars);
*chars = str->zone()->pod_malloc<CharT>(numChars);
return *chars != nullptr;
}

Просмотреть файл

@ -136,7 +136,7 @@ static const size_t UINT32_CHAR_BUFFER_LENGTH = sizeof("4294967295") - 1;
* at least X (e.g., ensureLinear will change a JSRope to be a JSFlatString).
*/
class JSString : public js::gc::BarrieredCell<JSString>
class JSString : public js::gc::TenuredCell
{
protected:
static const size_t NUM_INLINE_CHARS_LATIN1 = 2 * sizeof(void *) / sizeof(char);
@ -478,8 +478,6 @@ class JSString : public js::gc::BarrieredCell<JSString>
return offsetof(JSString, d.s.u2.nonInlineCharsTwoByte);
}
js::gc::AllocKind getAllocKind() const { return tenuredGetAllocKind(); }
static inline js::ThingRootKind rootKind() { return js::THING_ROOT_STRING; }
#ifdef DEBUG
@ -497,7 +495,7 @@ class JSString : public js::gc::BarrieredCell<JSString>
if (thing->isPermanentAtom())
return;
js::gc::BarrieredCell<JSString>::readBarrier(thing);
TenuredCell::readBarrier(thing);
#endif
}
@ -506,7 +504,7 @@ class JSString : public js::gc::BarrieredCell<JSString>
if (isNullLike(thing) || thing->isPermanentAtom())
return;
js::gc::BarrieredCell<JSString>::writeBarrierPre(thing);
TenuredCell::writeBarrierPre(thing);
#endif
}

Просмотреть файл

@ -21,7 +21,7 @@
namespace JS {
class Symbol : public js::gc::BarrieredCell<Symbol>
class Symbol : public js::gc::TenuredCell
{
private:
SymbolCode code_;