Bug 751618 - Zone renaming part 1 (r=jonco)

This commit is contained in:
Bill McCloskey 2013-01-27 12:37:18 -08:00
Родитель ca9f8adf8a
Коммит 56954e59be
9 изменённых файлов: 107 добавлений и 92 удалений

Просмотреть файл

@ -55,19 +55,23 @@ static const uint32_t GRAY = 1;
} /* namespace gc */
} /* namespace js */
namespace JS {
typedef JSCompartment Zone;
} /* namespace JS */
namespace JS {
namespace shadow {
struct ArenaHeader
{
JSCompartment *compartment;
js::Zone *zone;
};
struct Compartment
struct Zone
{
bool needsBarrier_;
Compartment() : needsBarrier_(false) {}
Zone() : needsBarrier_(false) {}
};
} /* namespace shadow */
@ -114,7 +118,7 @@ static JS_ALWAYS_INLINE JSCompartment *
GetGCThingCompartment(void *thing)
{
JS_ASSERT(thing);
return js::gc::GetGCThingArena(thing)->compartment;
return js::gc::GetGCThingArena(thing)->zone;
}
static JS_ALWAYS_INLINE JSCompartment *
@ -134,8 +138,8 @@ GCThingIsMarkedGray(void *thing)
static JS_ALWAYS_INLINE bool
IsIncrementalBarrierNeededOnGCThing(void *thing, JSGCTraceKind kind)
{
JSCompartment *comp = GetGCThingCompartment(thing);
return reinterpret_cast<shadow::Compartment *>(comp)->needsBarrier_;
js::Zone *zone = GetGCThingCompartment(thing);
return reinterpret_cast<shadow::Zone *>(zone)->needsBarrier_;
}
} /* namespace JS */

Просмотреть файл

@ -29,16 +29,16 @@ RelocatablePtr<T>::post()
{
#ifdef JSGC_GENERATIONAL
JS_ASSERT(this->value);
this->value->compartment()->gcStoreBuffer.putRelocatableCell((gc::Cell **)&this->value);
this->value->zone()->gcStoreBuffer.putRelocatableCell((gc::Cell **)&this->value);
#endif
}
template <typename T>
inline void
RelocatablePtr<T>::relocate(JSCompartment *comp)
RelocatablePtr<T>::relocate(Zone *zone)
{
#ifdef JSGC_GENERATIONAL
comp->gcStoreBuffer.removeRelocatableCell((gc::Cell **)&this->value);
zone->gcStoreBuffer.removeRelocatableCell((gc::Cell **)&this->value);
#endif
}
@ -48,18 +48,18 @@ EncapsulatedValue::writeBarrierPre(const Value &value)
#ifdef JSGC_INCREMENTAL
if (value.isMarkable()) {
js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
writeBarrierPre(cell->compartment(), value);
writeBarrierPre(cell->zone(), value);
}
#endif
}
inline void
EncapsulatedValue::writeBarrierPre(JSCompartment *comp, const Value &value)
EncapsulatedValue::writeBarrierPre(Zone *zone, const Value &value)
{
#ifdef JSGC_INCREMENTAL
if (comp->needsBarrier()) {
if (zone->needsBarrier()) {
Value tmp(value);
js::gc::MarkValueUnbarriered(comp->barrierTracer(), &tmp, "write barrier");
js::gc::MarkValueUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
JS_ASSERT(tmp == value);
}
#endif
@ -72,9 +72,9 @@ EncapsulatedValue::pre()
}
inline void
EncapsulatedValue::pre(JSCompartment *comp)
EncapsulatedValue::pre(Zone *zone)
{
writeBarrierPre(comp, value);
writeBarrierPre(zone, value);
}
inline
@ -115,11 +115,11 @@ HeapValue::init(const Value &v)
}
inline void
HeapValue::init(JSCompartment *comp, const Value &v)
HeapValue::init(Zone *zone, const Value &v)
{
JS_ASSERT(!IsPoisonedValue(v));
value = v;
post(comp);
post(zone);
}
inline HeapValue &
@ -143,20 +143,20 @@ HeapValue::operator=(const HeapValue &v)
}
inline void
HeapValue::set(JSCompartment *comp, const Value &v)
HeapValue::set(Zone *zone, const Value &v)
{
#ifdef DEBUG
if (value.isMarkable()) {
js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
JS_ASSERT(cell->compartment() == comp ||
cell->compartment() == comp->rt->atomsCompartment);
JS_ASSERT(cell->zone() == zone ||
cell->zone() == zone->rt->atomsCompartment);
}
#endif
pre(comp);
pre(zone);
JS_ASSERT(!IsPoisonedValue(v));
value = v;
post(comp);
post(zone);
}
inline void
@ -165,17 +165,17 @@ HeapValue::writeBarrierPost(const Value &value, Value *addr)
#ifdef JSGC_GENERATIONAL
if (value.isMarkable()) {
js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
cell->compartment()->gcStoreBuffer.putValue(addr);
cell->zone()->gcStoreBuffer.putValue(addr);
}
#endif
}
inline void
HeapValue::writeBarrierPost(JSCompartment *comp, const Value &value, Value *addr)
HeapValue::writeBarrierPost(Zone *zone, const Value &value, Value *addr)
{
#ifdef JSGC_GENERATIONAL
if (value.isMarkable())
comp->gcStoreBuffer.putValue(addr);
zone->gcStoreBuffer.putValue(addr);
#endif
}
@ -186,9 +186,9 @@ HeapValue::post()
}
inline void
HeapValue::post(JSCompartment *comp)
HeapValue::post(Zone *zone)
{
writeBarrierPost(comp, value, &value);
writeBarrierPost(zone, value, &value);
}
inline
@ -246,17 +246,17 @@ RelocatableValue::post()
#ifdef JSGC_GENERATIONAL
if (value.isMarkable()) {
js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
cell->compartment()->gcStoreBuffer.putRelocatableValue(&value);
cell->zone()->gcStoreBuffer.putRelocatableValue(&value);
}
#endif
}
inline void
RelocatableValue::post(JSCompartment *comp)
RelocatableValue::post(Zone *zone)
{
#ifdef JSGC_GENERATIONAL
if (value.isMarkable())
comp->gcStoreBuffer.putRelocatableValue(&value);
zone->gcStoreBuffer.putRelocatableValue(&value);
#endif
}
@ -266,7 +266,7 @@ RelocatableValue::relocate()
#ifdef JSGC_GENERATIONAL
if (value.isMarkable()) {
js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
cell->compartment()->gcStoreBuffer.removeRelocatableValue(&value);
cell->zone()->gcStoreBuffer.removeRelocatableValue(&value);
}
#endif
}
@ -301,10 +301,10 @@ HeapSlot::init(JSObject *obj, Kind kind, uint32_t slot, const Value &v)
}
inline void
HeapSlot::init(JSCompartment *comp, JSObject *obj, Kind kind, uint32_t slot, const Value &v)
HeapSlot::init(Zone *zone, JSObject *obj, Kind kind, uint32_t slot, const Value &v)
{
value = v;
post(comp, obj, kind, slot);
post(zone, obj, kind, slot);
}
inline void
@ -320,20 +320,20 @@ HeapSlot::set(JSObject *obj, Kind kind, uint32_t slot, const Value &v)
}
inline void
HeapSlot::set(JSCompartment *comp, JSObject *obj, Kind kind, uint32_t slot, const Value &v)
HeapSlot::set(Zone *zone, JSObject *obj, Kind kind, uint32_t slot, const Value &v)
{
JS_ASSERT_IF(kind == Slot, &obj->getSlotRef(slot) == this);
JS_ASSERT_IF(kind == Element, &obj->getDenseElement(slot) == (const Value *)this);
JS_ASSERT(obj->compartment() == comp);
JS_ASSERT(obj->zone() == zone);
pre(comp);
pre(zone);
JS_ASSERT(!IsPoisonedValue(v));
value = v;
post(comp, obj, kind, slot);
post(zone, obj, kind, slot);
}
inline void
HeapSlot::setCrossCompartment(JSObject *obj, Kind kind, uint32_t slot, const Value &v, JSCompartment *vcomp)
HeapSlot::setCrossCompartment(JSObject *obj, Kind kind, uint32_t slot, const Value &v, Zone *vzone)
{
JS_ASSERT_IF(kind == Slot, &obj->getSlotRef(slot) == this);
JS_ASSERT_IF(kind == Element, &obj->getDenseElement(slot) == (const Value *)this);
@ -341,22 +341,22 @@ HeapSlot::setCrossCompartment(JSObject *obj, Kind kind, uint32_t slot, const Val
pre();
JS_ASSERT(!IsPoisonedValue(v));
value = v;
post(vcomp, obj, kind, slot);
post(vzone, obj, kind, slot);
}
inline void
HeapSlot::writeBarrierPost(JSObject *obj, Kind kind, uint32_t slot)
{
#ifdef JSGC_GENERATIONAL
obj->compartment()->gcStoreBuffer.putSlot(obj, kind, slot);
obj->zone()->gcStoreBuffer.putSlot(obj, kind, slot);
#endif
}
inline void
HeapSlot::writeBarrierPost(JSCompartment *comp, JSObject *obj, Kind kind, uint32_t slot)
HeapSlot::writeBarrierPost(Zone *zone, JSObject *obj, Kind kind, uint32_t slot)
{
#ifdef JSGC_GENERATIONAL
comp->gcStoreBuffer.putSlot(obj, kind, slot);
zone->gcStoreBuffer.putSlot(obj, kind, slot);
#endif
}
@ -367,9 +367,9 @@ HeapSlot::post(JSObject *owner, Kind kind, uint32_t slot)
}
inline void
HeapSlot::post(JSCompartment *comp, JSObject *owner, Kind kind, uint32_t slot)
HeapSlot::post(Zone *zone, JSObject *owner, Kind kind, uint32_t slot)
{
HeapSlot::writeBarrierPost(comp, owner, kind, slot);
HeapSlot::writeBarrierPost(zone, owner, kind, slot);
}
#ifdef JSGC_GENERATIONAL
@ -404,11 +404,11 @@ class DenseRangeRef : public gc::BufferableRef
#endif
inline void
DenseRangeWriteBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t start, uint32_t count)
DenseRangeWriteBarrierPost(Zone *zone, JSObject *obj, uint32_t start, uint32_t count)
{
#ifdef JSGC_GENERATIONAL
if (count > 0)
comp->gcStoreBuffer.putGeneric(DenseRangeRef(obj, start, start + count));
zone->gcStoreBuffer.putGeneric(DenseRangeRef(obj, start, start + count));
#endif
}
@ -434,16 +434,16 @@ EncapsulatedId::pre()
#ifdef JSGC_INCREMENTAL
if (JSID_IS_OBJECT(value)) {
JSObject *obj = JSID_TO_OBJECT(value);
JSCompartment *comp = obj->compartment();
if (comp->needsBarrier()) {
js::gc::MarkObjectUnbarriered(comp->barrierTracer(), &obj, "write barrier");
Zone *zone = obj->zone();
if (zone->needsBarrier()) {
js::gc::MarkObjectUnbarriered(zone->barrierTracer(), &obj, "write barrier");
JS_ASSERT(obj == JSID_TO_OBJECT(value));
}
} else if (JSID_IS_STRING(value)) {
JSString *str = JSID_TO_STRING(value);
JSCompartment *comp = str->compartment();
if (comp->needsBarrier()) {
js::gc::MarkStringUnbarriered(comp->barrierTracer(), &str, "write barrier");
Zone *zone = str->zone();
if (zone->needsBarrier()) {
js::gc::MarkStringUnbarriered(zone->barrierTracer(), &str, "write barrier");
JS_ASSERT(str == JSID_TO_STRING(value));
}
}

Просмотреть файл

@ -213,7 +213,7 @@ class HeapPtr : public EncapsulatedPtr<T, Unioned>
/* Make this friend so it can access pre() and post(). */
template<class T1, class T2>
friend inline void
BarrieredSetPair(JSCompartment *comp,
BarrieredSetPair(Zone *zone,
HeapPtr<T1> &v1, T1 *val1,
HeapPtr<T2> &v2, T2 *val2);
};
@ -274,9 +274,9 @@ class RelocatablePtr : public EncapsulatedPtr<T>
this->value = v;
post();
} else if (this->value) {
JSCompartment *comp = this->value->compartment();
Zone *zone = this->value->zone();
this->value = v;
relocate(comp);
relocate(zone);
}
return *this;
}
@ -288,16 +288,16 @@ class RelocatablePtr : public EncapsulatedPtr<T>
this->value = v.value;
post();
} else if (this->value) {
JSCompartment *comp = this->value->compartment();
Zone *zone = this->value->zone();
this->value = v;
relocate(comp);
relocate(zone);
}
return *this;
}
protected:
inline void post();
inline void relocate(JSCompartment *comp);
inline void relocate(Zone *zone);
};
/*
@ -306,11 +306,11 @@ class RelocatablePtr : public EncapsulatedPtr<T>
*/
template<class T1, class T2>
static inline void
BarrieredSetPair(JSCompartment *comp,
BarrieredSetPair(Zone *zone,
HeapPtr<T1> &v1, T1 *val1,
HeapPtr<T2> &v2, T2 *val2)
{
if (T1::needWriteBarrierPre(comp)) {
if (T1::needWriteBarrierPre(zone)) {
v1.pre();
v2.pre();
}
@ -397,11 +397,11 @@ class EncapsulatedValue : public ValueOperations<EncapsulatedValue>
uint64_t asRawBits() const { return value.asRawBits(); }
static inline void writeBarrierPre(const Value &v);
static inline void writeBarrierPre(JSCompartment *comp, const Value &v);
static inline void writeBarrierPre(Zone *zone, const Value &v);
protected:
inline void pre();
inline void pre(JSCompartment *comp);
inline void pre(Zone *zone);
private:
friend class ValueOperations<EncapsulatedValue>;
@ -417,7 +417,7 @@ class HeapValue : public EncapsulatedValue
inline ~HeapValue();
inline void init(const Value &v);
inline void init(JSCompartment *comp, const Value &v);
inline void init(Zone *zone, const Value &v);
inline HeapValue &operator=(const Value &v);
inline HeapValue &operator=(const HeapValue &v);
@ -428,14 +428,14 @@ class HeapValue : public EncapsulatedValue
* the barrier. If you already know the compartment, it's faster to pass it
* in.
*/
inline void set(JSCompartment *comp, const Value &v);
inline void set(Zone *zone, const Value &v);
static inline void writeBarrierPost(const Value &v, Value *addr);
static inline void writeBarrierPost(JSCompartment *comp, const Value &v, Value *addr);
static inline void writeBarrierPost(Zone *zone, const Value &v, Value *addr);
private:
inline void post();
inline void post(JSCompartment *comp);
inline void post(Zone *zone);
};
class RelocatableValue : public EncapsulatedValue
@ -451,7 +451,7 @@ class RelocatableValue : public EncapsulatedValue
private:
inline void post();
inline void post(JSCompartment *comp);
inline void post(Zone *zone);
inline void relocate();
};
@ -477,19 +477,19 @@ class HeapSlot : public EncapsulatedValue
inline ~HeapSlot();
inline void init(JSObject *owner, Kind kind, uint32_t slot, const Value &v);
inline void init(JSCompartment *comp, JSObject *owner, Kind kind, uint32_t slot, const Value &v);
inline void init(Zone *zone, JSObject *owner, Kind kind, uint32_t slot, const Value &v);
inline void set(JSObject *owner, Kind kind, uint32_t slot, const Value &v);
inline void set(JSCompartment *comp, JSObject *owner, Kind kind, uint32_t slot, const Value &v);
inline void set(Zone *zone, JSObject *owner, Kind kind, uint32_t slot, const Value &v);
inline void setCrossCompartment(JSObject *owner, Kind kind, uint32_t slot, const Value &v,
JSCompartment *vcomp);
Zone *vzone);
static inline void writeBarrierPost(JSObject *obj, Kind kind, uint32_t slot);
static inline void writeBarrierPost(JSCompartment *comp, JSObject *obj, Kind kind, uint32_t slot);
static inline void writeBarrierPost(Zone *zone, JSObject *obj, Kind kind, uint32_t slot);
private:
inline void post(JSObject *owner, Kind kind, uint32_t slot);
inline void post(JSCompartment *comp, JSObject *owner, Kind kind, uint32_t slot);
inline void post(Zone *zone, JSObject *owner, Kind kind, uint32_t slot);
};
/*
@ -499,14 +499,14 @@ class HeapSlot : public EncapsulatedValue
* single step.
*/
inline void
DenseRangeWriteBarrierPost(JSCompartment *comp, JSObject *obj, uint32_t start, uint32_t count);
DenseRangeWriteBarrierPost(Zone *zone, JSObject *obj, uint32_t start, uint32_t count);
/*
* This is a post barrier for HashTables whose key can be moved during a GC.
*/
template <class Map, class Key>
inline void
HashTableWriteBarrierPost(JSCompartment *comp, const Map *map, const Key &key)
HashTableWriteBarrierPost(Zone *zone, const Map *map, const Key &key)
{
#ifdef JS_GCGENERATIONAL
if (key && comp->gcNursery.isInside(key))

Просмотреть файл

@ -86,6 +86,7 @@ struct Cell
MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
inline JSCompartment *compartment() const;
inline Zone *zone() const;
#ifdef DEBUG
inline bool isAligned() const;
@ -444,12 +445,12 @@ struct ArenaHeader : public JS::shadow::ArenaHeader
return allocKind < size_t(FINALIZE_LIMIT);
}
void init(JSCompartment *comp, AllocKind kind) {
void init(Zone *zoneArg, AllocKind kind) {
JS_ASSERT(!allocated());
JS_ASSERT(!markOverflow);
JS_ASSERT(!allocatedDuringIncremental);
JS_ASSERT(!hasDelayedMarking);
compartment = comp;
zone = zoneArg;
JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
allocKind = size_t(kind);
@ -977,7 +978,13 @@ Cell::unmark(uint32_t color) const
JSCompartment *
Cell::compartment() const
{
return arenaHeader()->compartment;
return arenaHeader()->zone;
}
Zone *
Cell::zone() const
{
return arenaHeader()->zone;
}
#ifdef DEBUG

Просмотреть файл

@ -152,7 +152,7 @@ IsAddressableGCThing(JSRuntime *rt, uintptr_t w,
if (!aheader->allocated())
return CGCT_FREEARENA;
if (skipUncollectedCompartments && !aheader->compartment->isCollecting())
if (skipUncollectedCompartments && !aheader->zone->isCollecting())
return CGCT_OTHERCOMPARTMENT;
AllocKind thingKind = aheader->getAllocKind();

Просмотреть файл

@ -92,8 +92,8 @@ JSCompartment::JSCompartment(JSRuntime *rt)
#endif
{
/* Ensure that there are no vtables to mess us up here. */
JS_ASSERT(reinterpret_cast<JS::shadow::Compartment *>(this) ==
static_cast<JS::shadow::Compartment *>(this));
JS_ASSERT(reinterpret_cast<JS::shadow::Zone *>(this) ==
static_cast<JS::shadow::Zone *>(this));
setGCMaxMallocBytes(rt->gcMaxMallocBytes * 0.9);
}

Просмотреть файл

@ -26,7 +26,7 @@
namespace js {
namespace ion {
class IonCompartment;
class IonCompartment;
}
struct NativeIterator;
@ -39,8 +39,8 @@ struct NativeIterator;
* is erroneously included in the measurement; see bug 562553.
*/
class DtoaCache {
double d;
int base;
double d;
int base;
JSFlatString *s; // if s==NULL, d and base are not valid
public:
@ -154,9 +154,9 @@ class Allocator
JS_DECLARE_NEW_METHODS(new_, malloc_, JS_ALWAYS_INLINE)
};
}
} /* namespace js */
struct JSCompartment : private JS::shadow::Compartment, public js::gc::GraphNodeBase<JSCompartment>
struct JSCompartment : private JS::shadow::Zone, public js::gc::GraphNodeBase<JSCompartment>
{
JSRuntime *rt;
JSPrincipals *principals;
@ -578,6 +578,10 @@ struct JSCompartment : private JS::shadow::Compartment, public js::gc::GraphNode
#endif
};
namespace JS {
typedef JSCompartment Zone;
} /* namespace JS */
// For use when changing the debug mode flag on one or more compartments.
// Do not run scripts in any compartment that is scheduled for GC using this
// object. See comment in updateForDebugMode.

Просмотреть файл

@ -268,13 +268,13 @@ ArenaHeader::checkSynchronizedWithFreeList() const
* list in the compartment can mutate at any moment. We cannot do any
* checks in this case.
*/
if (IsBackgroundFinalized(getAllocKind()) && !compartment->rt->isHeapBusy())
if (IsBackgroundFinalized(getAllocKind()) && !zone->rt->isHeapBusy())
return;
FreeSpan firstSpan = FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
if (firstSpan.isEmpty())
return;
const FreeSpan *list = compartment->allocator.arenas.getFreeList(getAllocKind());
const FreeSpan *list = zone->allocator.arenas.getFreeList(getAllocKind());
if (list->isEmpty() || firstSpan.arenaAddress() != list->arenaAddress())
return;
@ -812,7 +812,7 @@ Chunk::releaseArena(ArenaHeader *aheader)
{
JS_ASSERT(aheader->allocated());
JS_ASSERT(!aheader->hasDelayedMarking);
JSCompartment *comp = aheader->compartment;
JSCompartment *comp = aheader->zone;
JSRuntime *rt = comp->rt;
AutoLockGC maybeLock;
if (rt->gcHelperThread.sweeping())
@ -1369,7 +1369,7 @@ ArenaLists::backgroundFinalize(FreeOp *fop, ArenaHeader *listHead, bool onBackgr
{
JS_ASSERT(listHead);
AllocKind thingKind = listHead->getAllocKind();
JSCompartment *comp = listHead->compartment;
JSCompartment *comp = listHead->zone;
ArenaList finalized;
SliceBudget budget;
@ -3072,7 +3072,7 @@ js::gc::MarkingValidator::validate()
Arena *arena = &chunk->arenas[i];
if (!arena->aheader.allocated())
continue;
if (!arena->aheader.compartment->isGCSweeping())
if (!arena->aheader.zone->isGCSweeping())
continue;
if (arena->aheader.allocatedDuringIncremental)
continue;

Просмотреть файл

@ -281,7 +281,7 @@ class CellIterImpl
}
void init(ArenaHeader *singleAheader) {
initSpan(singleAheader->compartment, singleAheader->getAllocKind());
initSpan(singleAheader->zone, singleAheader->getAllocKind());
aiter.init(singleAheader);
next();
aiter.init();
@ -341,7 +341,7 @@ class CellIterUnderGC : public CellIterImpl
}
CellIterUnderGC(ArenaHeader *aheader) {
JS_ASSERT(aheader->compartment->rt->isHeapBusy());
JS_ASSERT(aheader->zone->rt->isHeapBusy());
init(aheader);
}
};