Bug 751618 - Zone renaming part 8 (r=terrence)

This commit is contained in:
Bill McCloskey 2013-01-27 13:51:41 -08:00
Родитель 5d9651e480
Коммит 85d4900acd
49 изменённых файлов: 256 добавлений и 221 удалений

Просмотреть файл

@ -149,7 +149,7 @@ HeapValue::set(Zone *zone, const Value &v)
if (value.isMarkable()) {
js::gc::Cell *cell = (js::gc::Cell *)value.toGCThing();
JS_ASSERT(cell->zone() == zone ||
cell->zone() == zone->rt->atomsCompartment);
cell->zone() == zone->rt->atomsCompartment->zone());
}
#endif

Просмотреть файл

@ -264,7 +264,7 @@ class RelocatablePtr : public EncapsulatedPtr<T>
~RelocatablePtr() {
if (this->value)
relocate(this->value->compartment());
relocate(this->value->zone());
}
RelocatablePtr<T> &operator=(T *v) {

Просмотреть файл

@ -767,7 +767,7 @@ struct Chunk
inline void insertToAvailableList(Chunk **insertPoint);
inline void removeFromAvailableList();
ArenaHeader *allocateArena(JSCompartment *comp, AllocKind kind);
ArenaHeader *allocateArena(JS::Zone *zone, AllocKind kind);
void releaseArena(ArenaHeader *aheader);

Просмотреть файл

@ -13,6 +13,7 @@
#include "js/HashTable.h"
#include "gc/GCInternals.h"
#include "jsobjinlines.h"
#include "jsgcinlines.h"
using namespace js;

Просмотреть файл

@ -119,8 +119,8 @@ CheckMarkedThing(JSTracer *trc, T *thing)
{
JS_ASSERT(trc);
JS_ASSERT(thing);
JS_ASSERT(thing->compartment());
JS_ASSERT(thing->compartment()->rt == trc->runtime);
JS_ASSERT(thing->zone());
JS_ASSERT(thing->zone()->rt == trc->runtime);
JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
DebugOnly<JSRuntime *> rt = trc->runtime;
@ -132,16 +132,16 @@ CheckMarkedThing(JSTracer *trc, T *thing)
rt->assertValidThread();
#endif
JS_ASSERT_IF(thing->compartment()->requireGCTracer(), IS_GC_MARKING_TRACER(trc));
JS_ASSERT_IF(thing->zone()->requireGCTracer(), IS_GC_MARKING_TRACER(trc));
JS_ASSERT(thing->isAligned());
JS_ASSERT_IF(rt->gcStrictCompartmentChecking,
thing->compartment()->isCollecting() ||
thing->zone()->isCollecting() ||
thing->compartment() == rt->atomsCompartment);
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && ((GCMarker *)trc)->getMarkColor() == GRAY,
thing->compartment()->isGCMarkingGray() ||
thing->zone()->isGCMarkingGray() ||
thing->compartment() == rt->atomsCompartment);
/*
@ -176,7 +176,7 @@ MarkInternal(JSTracer *trc, T **thingp)
* GC.
*/
if (!trc->callback) {
if (thing->compartment()->isGCMarking()) {
if (thing->zone()->isGCMarking()) {
PushMarkStack(AsGCMarker(trc), thing);
thing->compartment()->maybeAlive = true;
}
@ -259,8 +259,8 @@ IsMarked(T **thingp)
{
JS_ASSERT(thingp);
JS_ASSERT(*thingp);
JSCompartment *c = (*thingp)->compartment();
if (!c->isCollecting() || c->isGCFinished())
Zone *zone = (*thingp)->zone();
if (!zone->isCollecting() || zone->isGCFinished())
return true;
return (*thingp)->isMarked();
}
@ -271,7 +271,7 @@ IsAboutToBeFinalized(T **thingp)
{
JS_ASSERT(thingp);
JS_ASSERT(*thingp);
if (!(*thingp)->compartment()->isGCSweeping())
if (!(*thingp)->zone()->isGCSweeping())
return false;
return !(*thingp)->isMarked();
}
@ -644,7 +644,7 @@ ShouldMarkCrossCompartment(JSTracer *trc, RawObject src, Cell *cell)
if (!IS_GC_MARKING_TRACER(trc))
return true;
JSCompartment *c = cell->compartment();
JS::Zone *zone = cell->zone();
uint32_t color = AsGCMarker(trc)->getMarkColor();
JS_ASSERT(color == BLACK || color == GRAY);
@ -657,12 +657,12 @@ ShouldMarkCrossCompartment(JSTracer *trc, RawObject src, Cell *cell)
* but the source was marked black by the conservative scanner.
*/
if (cell->isMarked(GRAY)) {
JS_ASSERT(!cell->compartment()->isCollecting());
JS_ASSERT(!zone->isCollecting());
trc->runtime->gcFoundBlackGrayEdges = true;
}
return c->isGCMarking();
return zone->isGCMarking();
} else {
if (c->isGCMarkingBlack()) {
if (zone->isGCMarkingBlack()) {
/*
* The destination compartment is being not being marked gray now,
* but it will be later, so record the cell so it can be marked gray
@ -672,7 +672,7 @@ ShouldMarkCrossCompartment(JSTracer *trc, RawObject src, Cell *cell)
DelayCrossCompartmentGrayMarking(src);
return false;
}
return c->isGCMarkingGray();
return zone->isGCMarkingGray();
}
}
@ -729,10 +729,10 @@ gc::IsCellAboutToBeFinalized(Cell **thingp)
/*** Push Mark Stack ***/
#define JS_COMPARTMENT_ASSERT(rt, thing) \
JS_ASSERT((thing)->compartment()->isGCMarking())
JS_ASSERT((thing)->zone()->isGCMarking())
#define JS_COMPARTMENT_ASSERT_STR(rt, thing) \
JS_ASSERT((thing)->compartment()->isGCMarking() || \
JS_ASSERT((thing)->zone()->isGCMarking() || \
(thing)->compartment() == (rt)->atomsCompartment);
#if JS_HAS_XML_SUPPORT

Просмотреть файл

@ -669,7 +669,7 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
if (IS_GC_MARKING_TRACER(trc)) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (!c->isCollecting())
if (!c->zone()->isCollecting())
c->markCrossCompartmentWrappers(trc);
}
Debugger::markCrossCompartmentDebuggerObjectReferents(trc);
@ -710,7 +710,7 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
MarkScriptRoot(trc, &vec[i].script, "scriptAndCountsVector");
}
if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment->isCollecting()) {
if (!IS_GC_MARKING_TRACER(trc) || rt->atomsCompartment->zone()->isCollecting()) {
MarkAtoms(trc);
#ifdef JS_ION
/* Any Ion wrappers survive until the runtime is being torn down. */
@ -726,7 +726,7 @@ js::gc::MarkRuntime(JSTracer *trc, bool useSavedRoots)
/* We can't use GCCompartmentsIter if we're called from TraceRuntime. */
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (IS_GC_MARKING_TRACER(trc) && !c->isCollecting())
if (IS_GC_MARKING_TRACER(trc) && !c->zone()->isCollecting())
continue;
if (IS_GC_MARKING_TRACER(trc) && c->isPreservingCode()) {

Просмотреть файл

@ -367,6 +367,7 @@ Statistics::formatData(StatisticsSerializer &ss, uint64_t timestamp)
ss.appendDecimal("Total Time", "ms", t(total));
ss.appendNumber("Compartments Collected", "%d", "", collectedCount);
ss.appendNumber("Total Compartments", "%d", "", compartmentCount);
ss.appendNumber("Total Zones", "%d", "", zoneCount);
ss.appendNumber("MMU (20ms)", "%d", "%", int(mmu20 * 100));
ss.appendNumber("MMU (50ms)", "%d", "%", int(mmu50 * 100));
ss.appendDecimal("SCC Sweep Total", "ms", t(sccTotal));
@ -447,6 +448,7 @@ Statistics::Statistics(JSRuntime *rt)
fullFormat(false),
gcDepth(0),
collectedCount(0),
zoneCount(0),
compartmentCount(0),
nonincrementalReason(NULL),
preBytes(0),
@ -547,7 +549,7 @@ Statistics::endGC()
int64_t sccTotal, sccLongest;
sccDurations(&sccTotal, &sccLongest);
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, collectedCount == compartmentCount ? 0 : 1);
(*cb)(JS_TELEMETRY_GC_IS_COMPARTMENTAL, collectedCount == zoneCount ? 0 : 1);
(*cb)(JS_TELEMETRY_GC_MS, t(total));
(*cb)(JS_TELEMETRY_GC_MAX_PAUSE_MS, t(longest));
(*cb)(JS_TELEMETRY_GC_MARK_MS, t(phaseTimes[PHASE_MARK]));
@ -568,9 +570,11 @@ Statistics::endGC()
}
void
Statistics::beginSlice(int collectedCount, int compartmentCount, gcreason::Reason reason)
Statistics::beginSlice(int collectedCount, int zoneCount, int compartmentCount,
gcreason::Reason reason)
{
this->collectedCount = collectedCount;
this->zoneCount = zoneCount;
this->compartmentCount = compartmentCount;
bool first = runtime->gcIncrementalState == gc::NO_INCREMENTAL;
@ -585,7 +589,7 @@ Statistics::beginSlice(int collectedCount, int compartmentCount, gcreason::Reaso
// Slice callbacks should only fire for the outermost level
if (++gcDepth == 1) {
bool wasFullGC = collectedCount == compartmentCount;
bool wasFullGC = collectedCount == zoneCount;
if (GCSliceCallback cb = runtime->gcSliceCallback)
(*cb)(runtime, first ? GC_CYCLE_BEGIN : GC_SLICE_BEGIN, GCDescription(!wasFullGC));
}
@ -608,7 +612,7 @@ Statistics::endSlice()
// Slice callbacks should only fire for the outermost level
if (--gcDepth == 0) {
bool wasFullGC = collectedCount == compartmentCount;
bool wasFullGC = collectedCount == zoneCount;
if (GCSliceCallback cb = runtime->gcSliceCallback)
(*cb)(runtime, last ? GC_CYCLE_END : GC_SLICE_END, GCDescription(!wasFullGC));
}

Просмотреть файл

@ -86,7 +86,7 @@ struct Statistics {
void beginPhase(Phase phase);
void endPhase(Phase phase);
void beginSlice(int collectedCount, int compartmentCount, gcreason::Reason reason);
void beginSlice(int collectedCount, int zoneCount, int compartmentCount, gcreason::Reason reason);
void endSlice();
void reset(const char *reason) { slices.back().resetReason = reason; }
@ -118,6 +118,7 @@ struct Statistics {
int gcDepth;
int collectedCount;
int zoneCount;
int compartmentCount;
const char *nonincrementalReason;
@ -177,12 +178,13 @@ struct Statistics {
struct AutoGCSlice
{
AutoGCSlice(Statistics &stats, int collectedCount, int compartmentCount, gcreason::Reason reason
AutoGCSlice(Statistics &stats, int collectedCount, int zoneCount, int compartmentCount,
gcreason::Reason reason
MOZ_GUARD_OBJECT_NOTIFIER_PARAM)
: stats(stats)
{
MOZ_GUARD_OBJECT_NOTIFIER_INIT;
stats.beginSlice(collectedCount, compartmentCount, reason);
stats.beginSlice(collectedCount, zoneCount, compartmentCount, reason);
}
~AutoGCSlice() { stats.endSlice(); }

Просмотреть файл

@ -17,6 +17,7 @@
#include "js/HashTable.h"
#include "gc/GCInternals.h"
#include "jsobjinlines.h"
#include "jsgcinlines.h"
#ifdef MOZ_VALGRIND
@ -506,10 +507,12 @@ gc::StartVerifyPreBarriers(JSRuntime *rt)
rt->gcVerifyPreData = trc;
rt->gcIncrementalState = MARK;
rt->gcMarker.start();
for (CompartmentsIter c(rt); !c.done(); c.next()) {
for (CompartmentsIter c(rt); !c.done(); c.next())
PurgeJITCaches(c);
c->setNeedsBarrier(true, JSCompartment::UpdateIon);
c->allocator.arenas.purge();
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
zone->setNeedsBarrier(true, Zone::UpdateIon);
zone->allocator.arenas.purge();
}
return;
@ -581,14 +584,16 @@ gc::EndVerifyPreBarriers(JSRuntime *rt)
bool compartmentCreated = false;
/* We need to disable barriers before tracing, which may invoke barriers. */
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (!c->needsBarrier())
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
if (!zone->needsBarrier())
compartmentCreated = true;
PurgeJITCaches(c);
c->setNeedsBarrier(false, JSCompartment::UpdateIon);
zone->setNeedsBarrier(false, Zone::UpdateIon);
}
for (CompartmentsIter c(rt); !c.done(); c.next())
PurgeJITCaches(c);
/*
* We need to bump gcNumber so that the methodjit knows that jitcode has
* been discarded.

Просмотреть файл

@ -3681,7 +3681,7 @@ CodeGenerator::link()
// The correct state for prebarriers is unknown until the end of compilation,
// since a GC can occur during code generation. All barriers are emitted
// off-by-default, and are toggled on here if necessary.
if (cx->compartment->needsBarrier())
if (cx->zone()->needsBarrier())
ionScript->toggleBarriers(true);
return true;

Просмотреть файл

@ -1818,13 +1818,13 @@ InvalidateActivation(FreeOp *fop, uint8_t *ionTop, bool invalidateAll)
const SafepointIndex *si = ionScript->getSafepointIndex(it.returnAddressToFp());
IonCode *ionCode = ionScript->method();
JSCompartment *compartment = script->compartment();
if (compartment->needsBarrier()) {
JS::Zone *zone = script->zone();
if (zone->needsBarrier()) {
// We're about to remove edges from the JSScript to gcthings
// embedded in the IonCode. Perform one final trace of the
// IonCode for the incremental GC, as it must know about
// those edges.
ionCode->trace(compartment->barrierTracer());
ionCode->trace(zone->barrierTracer());
}
ionCode->setInvalidated();
@ -1927,13 +1927,13 @@ ion::Invalidate(types::TypeCompartment &types, FreeOp *fop,
UnrootedScript script = co.script;
IonScript *ionScript = GetIonScript(script, executionMode);
JSCompartment *compartment = script->compartment();
if (compartment->needsBarrier()) {
Zone *zone = script->zone();
if (zone->needsBarrier()) {
// We're about to remove edges from the JSScript to gcthings
// embedded in the IonScript. Perform one final trace of the
// IonScript for the incremental GC, as it must know about
// those edges.
IonScript::Trace(compartment->barrierTracer(), ionScript);
IonScript::Trace(zone->barrierTracer(), ionScript);
}
ionScript->decref(fop);

Просмотреть файл

@ -1066,7 +1066,7 @@ IonCacheSetProperty::attachNativeExisting(JSContext *cx, IonScript *ion,
if (obj->isFixedSlot(shape->slot())) {
Address addr(object(), JSObject::getFixedSlotOffset(shape->slot()));
if (cx->compartment->needsBarrier())
if (cx->zone()->needsBarrier())
masm.callPreBarrier(addr, MIRType_Value);
masm.storeConstantOrRegister(value(), addr);
@ -1076,7 +1076,7 @@ IonCacheSetProperty::attachNativeExisting(JSContext *cx, IonScript *ion,
Address addr(slotsReg, obj->dynamicSlotIndex(shape->slot()) * sizeof(Value));
if (cx->compartment->needsBarrier())
if (cx->zone()->needsBarrier())
masm.callPreBarrier(addr, MIRType_Value);
masm.storeConstantOrRegister(value(), addr);
@ -1336,7 +1336,7 @@ IonCacheSetProperty::attachNativeAdding(JSContext *cx, IonScript *ion, JSObject
/* Changing object shape. Write the object's new shape. */
Address shapeAddr(object(), JSObject::offsetOfShape());
if (cx->compartment->needsBarrier())
if (cx->zone()->needsBarrier())
masm.callPreBarrier(shapeAddr, MIRType_Shape);
masm.storePtr(ImmGCPtr(newShape), shapeAddr);

Просмотреть файл

@ -308,11 +308,11 @@ MacroAssembler::newGCThing(const Register &result,
JS_ASSERT(!templateObject->hasDynamicElements());
JSCompartment *compartment = GetIonContext()->compartment;
Zone *zone = GetIonContext()->compartment->zone();
#ifdef JS_GC_ZEAL
// Don't execute the inline path if gcZeal is active.
movePtr(ImmWord(compartment->rt), result);
movePtr(ImmWord(zone->rt), result);
loadPtr(Address(result, offsetof(JSRuntime, gcZeal_)), result);
branch32(Assembler::NotEqual, result, Imm32(0), fail);
#endif
@ -322,7 +322,7 @@ MacroAssembler::newGCThing(const Register &result,
// If a FreeSpan is replaced, its members are updated in the freeLists table,
// which the code below always re-reads.
gc::FreeSpan *list = const_cast<gc::FreeSpan *>
(compartment->allocator.arenas.getFreeList(allocKind));
(zone->allocator.arenas.getFreeList(allocKind));
loadPtr(AbsoluteAddress(&list->first), result);
branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(&list->last), result, fail);

Просмотреть файл

@ -382,9 +382,9 @@ class MacroAssembler : public MacroAssemblerSpecific
void branchTestNeedsBarrier(Condition cond, const Register &scratch, Label *label) {
JS_ASSERT(cond == Zero || cond == NonZero);
JSCompartment *comp = GetIonContext()->compartment;
movePtr(ImmWord(comp), scratch);
Address needsBarrierAddr(scratch, JSCompartment::OffsetOfNeedsBarrier());
JS::Zone *zone = GetIonContext()->compartment->zone();
movePtr(ImmWord(zone), scratch);
Address needsBarrierAddr(scratch, JS::Zone::OffsetOfNeedsBarrier());
branchTest32(cond, needsBarrierAddr, Imm32(0x1), label);
}

Просмотреть файл

@ -13,6 +13,7 @@
// For js::gc::AutoSuppressGC
#include "jsgc.h"
#include "jsobjinlines.h"
#include "jsgcinlines.h"
#include "js/Vector.h"

Просмотреть файл

@ -1382,7 +1382,8 @@ struct JSContext : js::ContextFriendFields,
JSContext *thisDuringConstruction() { return this; }
~JSContext();
js::PerThreadData& mainThread() { return runtime->mainThread; }
inline JS::Zone *zone();
js::PerThreadData &mainThread() { return runtime->mainThread; }
private:
/* See JSContext::findVersion. */

Просмотреть файл

@ -607,4 +607,10 @@ JSContext::leaveCompartment(JSCompartment *oldCompartment)
wrapPendingException();
}
inline JS::Zone *
JSContext::zone()
{
return compartment->zone();
}
#endif /* jscntxtinlines_h___ */

Просмотреть файл

@ -374,7 +374,7 @@ JSCompartment::wrap(JSContext *cx, Value *vp, JSObject *existingArg)
if (!putWrapper(orig, *vp))
return false;
if (str->compartment()->isGCMarking()) {
if (str->zone()->isGCMarking()) {
/*
* All string wrappers are dropped when collection starts, but we
* just created a new one. Mark the wrapped string to stop it being
@ -522,7 +522,7 @@ JSCompartment::wrap(JSContext *cx, AutoIdVector &props)
void
JSCompartment::markCrossCompartmentWrappers(JSTracer *trc)
{
JS_ASSERT(!isCollecting());
JS_ASSERT(!zone()->isCollecting());
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
Value v = e.front().value;
@ -815,10 +815,9 @@ JSCompartment::setGCMaxMallocBytes(size_t value)
void
JSCompartment::onTooMuchMalloc()
{
TriggerZoneGC(this, gcreason::TOO_MUCH_MALLOC);
TriggerZoneGC(zone(), gcreason::TOO_MUCH_MALLOC);
}
bool
JSCompartment::hasScriptsOnStack()
{
@ -908,7 +907,7 @@ JSCompartment::updateForDebugMode(FreeOp *fop, AutoDebugModeGC &dmgc)
// to run any scripts in this compartment until the dmgc is destroyed.
// That is the caller's responsibility.
if (!rt->isHeapBusy())
dmgc.scheduleGC(this);
dmgc.scheduleGC(zone());
#endif
}

Просмотреть файл

@ -135,8 +135,8 @@ JS::PrepareZoneForGC(Zone *zone)
JS_FRIEND_API(void)
JS::PrepareForFullGC(JSRuntime *rt)
{
for (CompartmentsIter c(rt); !c.done(); c.next())
c->scheduleGC();
for (ZonesIter zone(rt); !zone.done(); zone.next())
zone->scheduleGC();
}
JS_FRIEND_API(void)
@ -145,17 +145,17 @@ JS::PrepareForIncrementalGC(JSRuntime *rt)
if (!IsIncrementalGCInProgress(rt))
return;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->wasGCStarted())
PrepareZoneForGC(c);
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
if (zone->wasGCStarted())
PrepareZoneForGC(zone);
}
}
JS_FRIEND_API(bool)
JS::IsGCScheduled(JSRuntime *rt)
{
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->isGCScheduled())
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
if (zone->isGCScheduled())
return true;
}
@ -218,7 +218,7 @@ JS_SetCompartmentPrincipals(JSCompartment *compartment, JSPrincipals *principals
// with the old one, but JSPrincipals doesn't give us a way to do that.
// But we can at least assert that we're not switching between system
// and non-system.
JS_ASSERT(compartment->isSystemCompartment == isSystem);
JS_ASSERT(compartment->zone()->isSystemCompartment == isSystem);
}
// Set up the new principals.
@ -228,7 +228,7 @@ JS_SetCompartmentPrincipals(JSCompartment *compartment, JSPrincipals *principals
}
// Update the system flag.
compartment->isSystemCompartment = isSystem;
compartment->zone()->isSystemCompartment = isSystem;
}
JS_FRIEND_API(JSBool)
@ -318,7 +318,7 @@ AutoSwitchCompartment::~AutoSwitchCompartment()
JS_FRIEND_API(bool)
js::IsSystemCompartment(const JSCompartment *c)
{
return c->isSystemCompartment;
return c->zone()->isSystemCompartment;
}
JS_FRIEND_API(bool)

Просмотреть файл

@ -1503,9 +1503,9 @@ template <AllowGC allowGC>
/* static */ void *
ArenaLists::refillFreeList(JSContext *cx, AllocKind thingKind)
{
JS_ASSERT(cx->compartment->allocator.arenas.freeLists[thingKind].isEmpty());
JS_ASSERT(cx->zone()->allocator.arenas.freeLists[thingKind].isEmpty());
Zone *zone = cx->compartment;
Zone *zone = cx->zone();
JSRuntime *rt = zone->rt;
JS_ASSERT(!rt->isHeapBusy());
@ -1987,7 +1987,7 @@ js::TriggerZoneGC(Zone *zone, gcreason::Reason reason)
return;
}
if (zone == rt->atomsCompartment) {
if (zone == rt->atomsCompartment->zone()) {
/* We can't do a zone GC of the atoms compartment. */
TriggerGC(rt, reason);
return;
@ -2015,7 +2015,7 @@ js::MaybeGC(JSContext *cx)
}
double factor = rt->gcHighFrequencyGC ? 0.75 : 0.9;
Zone *zone = cx->compartment;
Zone *zone = cx->zone();
if (zone->gcBytes > 1024 * 1024 &&
zone->gcBytes >= factor * zone->gcTriggerBytes &&
rt->gcIncrementalState == NO_INCREMENTAL &&
@ -2539,9 +2539,9 @@ SweepCompartments(FreeOp *fop, bool lastGC)
JSCompartment *compartment = *read++;
if (!compartment->hold && compartment->wasGCStarted() &&
(compartment->allocator.arenas.arenaListsAreEmpty() || lastGC))
(compartment->zone()->allocator.arenas.arenaListsAreEmpty() || lastGC))
{
compartment->allocator.arenas.checkEmptyFreeLists();
compartment->zone()->allocator.arenas.checkEmptyFreeLists();
if (callback)
callback(fop, compartment);
if (compartment->principals)
@ -2683,9 +2683,9 @@ BeginMarkPhase(JSRuntime *rt)
/* Set up which compartments will be collected. */
if (zone->isGCScheduled()) {
if (zone != rt->atomsCompartment) {
if (zone != rt->atomsCompartment->zone()) {
any = true;
zone->setGCState(JSCompartment::Mark);
zone->setGCState(Zone::Mark);
}
} else {
rt->gcIsFull = false;
@ -2702,10 +2702,10 @@ BeginMarkPhase(JSRuntime *rt)
* atoms. Otherwise, the non-collected compartments could contain pointers
* to atoms that we would miss.
*/
JSCompartment *atomsComp = rt->atomsCompartment;
if (atomsComp->isGCScheduled() && rt->gcIsFull && !rt->gcKeepAtoms) {
JS_ASSERT(!atomsComp->isCollecting());
atomsComp->setGCState(JSCompartment::Mark);
Zone *atomsZone = rt->atomsCompartment->zone();
if (atomsZone->isGCScheduled() && rt->gcIsFull && !rt->gcKeepAtoms) {
JS_ASSERT(!atomsZone->isCollecting());
atomsZone->setGCState(Zone::Mark);
}
/*
@ -3019,7 +3019,7 @@ js::gc::MarkingValidator::nonIncrementalMark()
/* Update compartment state for gray marking. */
for (GCZonesIter zone(runtime); !zone.done(); zone.next()) {
JS_ASSERT(zone->isGCMarkingBlack());
zone->setGCState(JSCompartment::MarkGray);
zone->setGCState(Zone::MarkGray);
}
MarkAllGrayReferences(runtime);
@ -3027,7 +3027,7 @@ js::gc::MarkingValidator::nonIncrementalMark()
/* Restore compartment state. */
for (GCZonesIter zone(runtime); !zone.done(); zone.next()) {
JS_ASSERT(zone->isGCMarkingGray());
zone->setGCState(JSCompartment::Mark);
zone->setGCState(Zone::Mark);
}
}
@ -3207,7 +3207,7 @@ JSCompartment::findOutgoingEdgesFromCompartment(ComponentFinder<JS::Zone> &finde
#endif
}
Debugger::findCompartmentEdges(this, finder);
Debugger::findCompartmentEdges(zone(), finder);
}
void
@ -3256,8 +3256,8 @@ GetNextZoneGroup(JSRuntime *rt)
for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
JS_ASSERT(!zone->gcNextGraphComponent);
JS_ASSERT(zone->isGCMarking());
zone->setNeedsBarrier(false, JSCompartment::UpdateIon);
zone->setGCState(JSCompartment::NoGC);
zone->setNeedsBarrier(false, Zone::UpdateIon);
zone->setGCState(Zone::NoGC);
}
for (GCCompartmentGroupIter comp(rt); !comp.done(); comp.next()) {
@ -3395,8 +3395,8 @@ MarkIncomingCrossCompartmentPointers(JSRuntime *rt, const uint32_t color)
bool unlinkList = color == GRAY;
for (GCCompartmentGroupIter c(rt); !c.done(); c.next()) {
JS_ASSERT_IF(color == GRAY, c->isGCMarkingGray());
JS_ASSERT_IF(color == BLACK, c->isGCMarkingBlack());
JS_ASSERT_IF(color == GRAY, c->zone()->isGCMarkingGray());
JS_ASSERT_IF(color == BLACK, c->zone()->isGCMarkingBlack());
JS_ASSERT_IF(c->gcIncomingGrayPointers, IsGrayListObject(c->gcIncomingGrayPointers));
for (RawObject src = c->gcIncomingGrayPointers;
@ -3528,7 +3528,7 @@ EndMarkingZoneGroup(JSRuntime *rt)
*/
for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
JS_ASSERT(zone->isGCMarkingBlack());
zone->setGCState(JSCompartment::MarkGray);
zone->setGCState(Zone::MarkGray);
}
/* Mark incoming gray pointers from previously swept compartments. */
@ -3542,7 +3542,7 @@ EndMarkingZoneGroup(JSRuntime *rt)
/* Restore marking state. */
for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
JS_ASSERT(zone->isGCMarkingGray());
zone->setGCState(JSCompartment::Mark);
zone->setGCState(Zone::Mark);
}
JS_ASSERT(rt->gcMarker.isDrained());
@ -3560,12 +3560,12 @@ BeginSweepingZoneGroup(JSRuntime *rt)
for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
/* Set the GC state to sweeping. */
JS_ASSERT(zone->isGCMarking());
zone->setGCState(JSCompartment::Sweep);
zone->setGCState(Zone::Sweep);
/* Purge the ArenaLists before sweeping. */
zone->allocator.arenas.purge();
if (zone == rt->atomsCompartment)
if (zone == rt->atomsCompartment->zone())
sweepingAtoms = true;
}
@ -3652,7 +3652,7 @@ EndSweepingZoneGroup(JSRuntime *rt)
/* Update the GC state for compartments we have swept and unlink the list. */
for (GCZoneGroupIter zone(rt); !zone.done(); zone.next()) {
JS_ASSERT(zone->isGCSweeping());
zone->setGCState(JSCompartment::Finished);
zone->setGCState(Zone::Finished);
}
/* Reset the list of arenas marked as being allocated during sweep phase. */
@ -3772,8 +3772,8 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC)
* newly created compartments. Can only change from full to not full.
*/
if (rt->gcIsFull) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (!c->isCollecting()) {
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
if (!zone->isCollecting()) {
rt->gcIsFull = false;
break;
}
@ -3787,9 +3787,9 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC)
* prevent the cycle collector from collecting some dead objects.
*/
if (rt->gcFoundBlackGrayEdges) {
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (!c->isCollecting())
c->allocator.arenas.unmarkAll();
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
if (!zone->isCollecting())
zone->allocator.arenas.unmarkAll();
}
}
@ -3863,17 +3863,27 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC)
SweepCompartments(&fop, lastGC);
}
for (CompartmentsIter c(rt); !c.done(); c.next()) {
c->setGCLastBytes(c->gcBytes, gckind);
if (c->isCollecting()) {
JS_ASSERT(c->isGCFinished());
c->setGCState(JSCompartment::NoGC);
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
zone->setGCLastBytes(zone->gcBytes, gckind);
if (zone->isCollecting()) {
JS_ASSERT(zone->isGCFinished());
zone->setGCState(Zone::NoGC);
}
#ifdef DEBUG
JS_ASSERT(!c->isCollecting());
JS_ASSERT(!c->wasGCStarted());
JS_ASSERT(!zone->isCollecting());
JS_ASSERT(!zone->wasGCStarted());
for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i) {
JS_ASSERT_IF(!IsBackgroundFinalized(AllocKind(i)) ||
!rt->gcSweepOnBackgroundThread,
!zone->allocator.arenas.arenaListsToSweep[i]);
}
#endif
}
#ifdef DEBUG
for (CompartmentsIter c(rt); !c.done(); c.next()) {
JS_ASSERT(!c->gcIncomingGrayPointers);
JS_ASSERT(!c->gcLiveArrayBuffers);
@ -3881,14 +3891,8 @@ EndSweepPhase(JSRuntime *rt, JSGCInvocationKind gckind, bool lastGC)
if (e.front().key.kind != CrossCompartmentKey::StringWrapper)
AssertNotOnGrayList(&e.front().value.get().toObject());
}
for (unsigned i = 0 ; i < FINALIZE_LIMIT ; ++i) {
JS_ASSERT_IF(!IsBackgroundFinalized(AllocKind(i)) ||
!rt->gcSweepOnBackgroundThread,
!c->allocator.arenas.arenaListsToSweep[i]);
}
#endif
}
#endif
FinishMarkingValidation(rt);
@ -3942,10 +3946,11 @@ AutoGCSession::~AutoGCSession()
#endif
/* Clear gcMallocBytes for all compartments */
for (CompartmentsIter c(runtime); !c.done(); c.next()) {
for (CompartmentsIter c(runtime); !c.done(); c.next())
c->resetGCMallocBytes();
c->unscheduleGC();
}
for (ZonesIter zone(runtime); !zone.done(); zone.next())
zone->unscheduleGC();
runtime->resetGCMallocBytes();
}
@ -3953,14 +3958,14 @@ AutoGCSession::~AutoGCSession()
AutoCopyFreeListToArenas::AutoCopyFreeListToArenas(JSRuntime *rt)
: runtime(rt)
{
for (CompartmentsIter c(rt); !c.done(); c.next())
c->allocator.arenas.copyFreeListsToArenas();
for (ZonesIter zone(rt); !zone.done(); zone.next())
zone->allocator.arenas.copyFreeListsToArenas();
}
AutoCopyFreeListToArenas::~AutoCopyFreeListToArenas()
{
for (CompartmentsIter c(runtime); !c.done(); c.next())
c->allocator.arenas.clearFreeListsInArenas();
for (ZonesIter zone(runtime); !zone.done(); zone.next())
zone->allocator.arenas.clearFreeListsInArenas();
}
static void
@ -3990,8 +3995,8 @@ ResetIncrementalGC(JSRuntime *rt, const char *reason)
for (GCZonesIter zone(rt); !zone.done(); zone.next()) {
JS_ASSERT(zone->isGCMarking());
zone->setNeedsBarrier(false, JSCompartment::UpdateIon);
zone->setGCState(JSCompartment::NoGC);
zone->setNeedsBarrier(false, Zone::UpdateIon);
zone->setGCState(Zone::NoGC);
}
rt->gcIncrementalState = NO_INCREMENTAL;
@ -4064,7 +4069,7 @@ AutoGCSlice::AutoGCSlice(JSRuntime *rt)
*/
if (zone->isGCMarking()) {
JS_ASSERT(zone->needsBarrier());
zone->setNeedsBarrier(false, JSCompartment::DontUpdateIon);
zone->setNeedsBarrier(false, Zone::DontUpdateIon);
} else {
JS_ASSERT(!zone->needsBarrier());
}
@ -4076,10 +4081,10 @@ AutoGCSlice::~AutoGCSlice()
/* We can't use GCZonesIter if this is the end of the last slice. */
for (ZonesIter zone(runtime); !zone.done(); zone.next()) {
if (zone->isGCMarking()) {
zone->setNeedsBarrier(true, JSCompartment::UpdateIon);
zone->setNeedsBarrier(true, Zone::UpdateIon);
zone->allocator.arenas.prepareForIncrementalGC(runtime);
} else {
zone->setNeedsBarrier(false, JSCompartment::UpdateIon);
zone->setNeedsBarrier(false, Zone::UpdateIon);
}
}
}
@ -4260,21 +4265,24 @@ BudgetIncrementalGC(JSRuntime *rt, int64_t *budget)
}
bool reset = false;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->gcBytes >= c->gcTriggerBytes) {
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
if (zone->gcBytes >= zone->gcTriggerBytes) {
*budget = SliceBudget::Unlimited;
rt->gcStats.nonincremental("allocation trigger");
}
if (rt->gcIncrementalState != NO_INCREMENTAL &&
zone->isGCScheduled() != zone->wasGCStarted())
{
reset = true;
}
}
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->isTooMuchMalloc()) {
*budget = SliceBudget::Unlimited;
rt->gcStats.nonincremental("malloc bytes trigger");
}
if (rt->gcIncrementalState != NO_INCREMENTAL &&
c->isGCScheduled() != c->wasGCStarted()) {
reset = true;
}
}
if (reset)
@ -4294,8 +4302,8 @@ GCCycle(JSRuntime *rt, bool incremental, int64_t budget, JSGCInvocationKind gcki
AutoAssertNoGC nogc;
#ifdef DEBUG
for (CompartmentsIter c(rt); !c.done(); c.next())
JS_ASSERT_IF(rt->gcMode == JSGC_MODE_GLOBAL, c->isGCScheduled());
for (ZonesIter zone(rt); !zone.done(); zone.next())
JS_ASSERT_IF(rt->gcMode == JSGC_MODE_GLOBAL, zone->isGCScheduled());
#endif
/*
@ -4418,24 +4426,28 @@ Collect(JSRuntime *rt, bool incremental, int64_t budget,
RecordNativeStackTopForGC(rt);
int zoneCount = 0;
int compartmentCount = 0;
int collectedCount = 0;
for (CompartmentsIter c(rt); !c.done(); c.next()) {
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
if (rt->gcMode == JSGC_MODE_GLOBAL)
c->scheduleGC();
zone->scheduleGC();
/* This is a heuristic to avoid resets. */
if (rt->gcIncrementalState != NO_INCREMENTAL && c->needsBarrier())
c->scheduleGC();
if (rt->gcIncrementalState != NO_INCREMENTAL && zone->needsBarrier())
zone->scheduleGC();
compartmentCount++;
if (c->isGCScheduled())
zoneCount++;
if (zone->isGCScheduled())
collectedCount++;
}
for (CompartmentsIter c(rt); !c.done(); c.next())
compartmentCount++;
rt->gcShouldCleanUpEverything = ShouldCleanUpEverything(rt, reason, gckind);
gcstats::AutoGCSlice agc(rt->gcStats, collectedCount, compartmentCount, reason);
gcstats::AutoGCSlice agc(rt->gcStats, collectedCount, zoneCount, compartmentCount, reason);
do {
/*
@ -4498,10 +4510,10 @@ js::GCFinalSlice(JSRuntime *rt, JSGCInvocationKind gckind, gcreason::Reason reas
}
static bool
CompartmentsSelected(JSRuntime *rt)
ZonesSelected(JSRuntime *rt)
{
for (CompartmentsIter c(rt); !c.done(); c.next()) {
if (c->isGCScheduled())
for (ZonesIter zone(rt); !zone.done(); zone.next()) {
if (zone->isGCScheduled())
return true;
}
return false;
@ -4512,7 +4524,7 @@ js::GCDebugSlice(JSRuntime *rt, bool limit, int64_t objCount)
{
AssertCanGC();
int64_t budget = limit ? SliceBudget::WorkBudget(objCount) : SliceBudget::Unlimited;
if (!CompartmentsSelected(rt)) {
if (!ZonesSelected(rt)) {
if (IsIncrementalGCInProgress(rt))
PrepareForIncrementalGC(rt);
else
@ -4525,7 +4537,7 @@ js::GCDebugSlice(JSRuntime *rt, bool limit, int64_t objCount)
void
js::PrepareForDebugGC(JSRuntime *rt)
{
if (!CompartmentsSelected(rt))
if (!ZonesSelected(rt))
PrepareForFullGC(rt);
}

Просмотреть файл

@ -459,7 +459,7 @@ struct ArenaLists {
inline void queueForForegroundSweep(FreeOp *fop, AllocKind thingKind);
inline void queueForBackgroundSweep(FreeOp *fop, AllocKind thingKind);
inline void *allocateFromArena(JSCompartment *comp, AllocKind thingKind);
inline void *allocateFromArena(JS::Zone *zone, AllocKind thingKind);
};
/*

Просмотреть файл

@ -505,17 +505,20 @@ NewGCThing(JSContext *cx, js::gc::AllocKind kind, size_t thingSize)
if (allowGC)
MaybeCheckStackRoots(cx, /* relax = */ false);
JSCompartment *comp = cx->compartment;
T *t = static_cast<T *>(comp->allocator.arenas.allocateFromFreeList(kind, thingSize));
JS::Zone *zone = cx->zone();
T *t = static_cast<T *>(zone->allocator.arenas.allocateFromFreeList(kind, thingSize));
if (!t)
t = static_cast<T *>(js::gc::ArenaLists::refillFreeList<allowGC>(cx, kind));
JS_ASSERT_IF(t && comp->wasGCStarted() && (comp->isGCMarking() || comp->isGCSweeping()),
JS_ASSERT_IF(t && zone->wasGCStarted() && (zone->isGCMarking() || zone->isGCSweeping()),
t->arenaHeader()->allocatedDuringIncremental);
#if defined(JSGC_GENERATIONAL) && defined(JS_GC_ZEAL)
if (cx->runtime->gcVerifyPostData && IsNurseryAllocable(kind) && !IsAtomsCompartment(comp))
comp->gcNursery.insertPointer(t);
if (cx->runtime->gcVerifyPostData && IsNurseryAllocable(kind)
&& !IsAtomsCompartment(cx->compartment))
{
zone->gcNursery.insertPointer(t);
}
#endif
return t;

Просмотреть файл

@ -6229,7 +6229,7 @@ void
TypeSet::sweep(JSCompartment *compartment)
{
JS_ASSERT(!purged());
JS_ASSERT(compartment->isGCSweeping());
JS_ASSERT(compartment->zone()->isGCSweeping());
/*
* Purge references to type objects that are no longer live. Type sets hold
@ -6309,7 +6309,7 @@ TypeObject::sweep(FreeOp *fop)
}
JSCompartment *compartment = this->compartment();
JS_ASSERT(compartment->isGCSweeping());
JS_ASSERT(compartment->zone()->isGCSweeping());
if (!isMarked()) {
if (newScript)
@ -6393,7 +6393,7 @@ struct SweepTypeObjectOp
void
SweepTypeObjects(FreeOp *fop, JSCompartment *compartment)
{
JS_ASSERT(compartment->isGCSweeping());
JS_ASSERT(compartment->zone()->isGCSweeping());
SweepTypeObjectOp op(fop);
gc::ForEachArenaAndCell(compartment, gc::FINALIZE_TYPE_OBJECT, gc::EmptyArenaOp, op);
}
@ -6402,7 +6402,7 @@ void
TypeCompartment::sweep(FreeOp *fop)
{
JSCompartment *compartment = this->compartment();
JS_ASSERT(compartment->isGCSweeping());
JS_ASSERT(compartment->zone()->isGCSweeping());
SweepTypeObjects(fop, compartment);
@ -6539,7 +6539,7 @@ JSCompartment::sweepNewTypeObjectTable(TypeObjectSet &table)
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
JS_ASSERT(isGCSweeping());
JS_ASSERT(zone()->isGCSweeping());
if (table.initialized()) {
for (TypeObjectSet::Enum e(table); !e.empty(); e.popFront()) {
TypeObject *type = e.front();
@ -6570,7 +6570,7 @@ TypeCompartment::~TypeCompartment()
TypeScript::Sweep(FreeOp *fop, RawScript script)
{
JSCompartment *compartment = script->compartment();
JS_ASSERT(compartment->isGCSweeping());
JS_ASSERT(compartment->zone()->isGCSweeping());
JS_ASSERT(compartment->types.inferenceEnabled);
unsigned num = NumTypeSets(script);

Просмотреть файл

@ -420,7 +420,7 @@ struct AutoEnterCompilation
co.script = script;
co.setKind(kind);
co.constructing = constructing;
co.barriers = cx->compartment->compileBarriers();
co.barriers = cx->zone()->compileBarriers();
co.chunkIndex = chunkIndex;
// This flag is used to prevent adding the current compiled script in

Просмотреть файл

@ -1369,9 +1369,9 @@ MarkGeneratorFrame(JSTracer *trc, JSGenerator *gen)
static void
GeneratorWriteBarrierPre(JSContext *cx, JSGenerator *gen)
{
JSCompartment *comp = cx->compartment;
if (comp->needsBarrier())
MarkGeneratorFrame(comp->barrierTracer(), gen);
JS::Zone *zone = cx->zone();
if (zone->needsBarrier())
MarkGeneratorFrame(zone->barrierTracer(), gen);
}
/*

Просмотреть файл

@ -334,7 +334,7 @@ JS::SystemCompartmentCount(const JSRuntime *rt)
{
size_t n = 0;
for (size_t i = 0; i < rt->compartments.length(); i++) {
if (rt->compartments[i]->isSystemCompartment)
if (rt->compartments[i]->zone()->isSystemCompartment)
++n;
}
return n;
@ -345,7 +345,7 @@ JS::UserCompartmentCount(const JSRuntime *rt)
{
size_t n = 0;
for (size_t i = 0; i < rt->compartments.length(); i++) {
if (!rt->compartments[i]->isSystemCompartment)
if (!rt->compartments[i]->zone()->isSystemCompartment)
++n;
}
return n;

Просмотреть файл

@ -1769,10 +1769,10 @@ JSObject::ReserveForTradeGuts(JSContext *cx, JSObject *aArg, JSObject *bArg,
* after the swap, |b|'s guts would never be marked. The write barrier
* solves this.
*/
JSCompartment *comp = a->compartment();
if (comp->needsBarrier()) {
MarkChildren(comp->barrierTracer(), a);
MarkChildren(comp->barrierTracer(), b);
JS::Zone *zone = a->zone();
if (zone->needsBarrier()) {
MarkChildren(zone->barrierTracer(), a);
MarkChildren(zone->barrierTracer(), b);
}
#endif

Просмотреть файл

@ -439,9 +439,9 @@ Probes::ETWStopExecution(UnrootedScript script)
}
bool
Probes::ETWResizeHeap(JSCompartment *compartment, size_t oldSize, size_t newSize)
Probes::ETWResizeHeap(JS::Zone *zone, size_t oldSize, size_t newSize)
{
return EventWriteEvtHeapResize(reinterpret_cast<uint64_t>(compartment),
return EventWriteEvtHeapResize(reinterpret_cast<uint64_t>(zone),
oldSize, newSize) == ERROR_SUCCESS;
}

Просмотреть файл

@ -104,7 +104,7 @@ bool startExecution(UnrootedScript script);
bool stopExecution(UnrootedScript script);
/* Heap has been resized */
bool resizeHeap(JSCompartment *compartment, size_t oldSize, size_t newSize);
bool resizeHeap(JS::Zone *zone, size_t oldSize, size_t newSize);
/*
* Object has been created. |obj| must exist (its class and size are read)
@ -371,12 +371,12 @@ Probes::exitScript(JSContext *cx, UnrootedScript script, UnrootedFunction maybeF
}
inline bool
Probes::resizeHeap(JSCompartment *compartment, size_t oldSize, size_t newSize)
Probes::resizeHeap(JS::Zone *zone, size_t oldSize, size_t newSize)
{
bool ok = true;
#ifdef MOZ_ETW
if (ProfilingActive && !ETWResizeHeap(compartment, oldSize, newSize))
if (ProfilingActive && !ETWResizeHeap(zone, oldSize, newSize))
ok = false;
#endif

Просмотреть файл

@ -159,16 +159,16 @@ PropertyTree::getChild(JSContext *cx, Shape *parent_, uint32_t nfixed, const Sta
#ifdef JSGC_INCREMENTAL
if (shape) {
JSCompartment *comp = shape->compartment();
if (comp->needsBarrier()) {
JS::Zone *zone = shape->zone();
if (zone->needsBarrier()) {
/*
* We need a read barrier for the shape tree, since these are weak
* pointers.
*/
Shape *tmp = shape;
MarkShapeUnbarriered(comp->barrierTracer(), &tmp, "read barrier");
MarkShapeUnbarriered(zone->barrierTracer(), &tmp, "read barrier");
JS_ASSERT(tmp == shape);
} else if (comp->isGCSweeping() && !shape->isMarked() &&
} else if (zone->isGCSweeping() && !shape->isMarked() &&
!shape->arenaHeader()->allocatedDuringIncremental)
{
/*

Просмотреть файл

@ -2601,7 +2601,7 @@ JSScript::markChildren(JSTracer *trc)
// JSScript::Create(), but not yet finished initializing it with
// fullyInitFromEmitter() or fullyInitTrivial().
JS_ASSERT_IF(trc->runtime->gcStrictCompartmentChecking, compartment()->isCollecting());
JS_ASSERT_IF(trc->runtime->gcStrictCompartmentChecking, zone()->isCollecting());
for (uint32_t i = 0; i < natoms; ++i) {
if (atoms[i])

Просмотреть файл

@ -308,7 +308,7 @@ WeakMap_set_impl(JSContext *cx, CallArgs args)
JS_ReportOutOfMemory(cx);
return false;
}
HashTableWriteBarrierPost(cx->compartment, map, key);
HashTableWriteBarrierPost(cx->zone(), map, key);
args.rval().setUndefined();
return true;

Просмотреть файл

@ -1365,7 +1365,7 @@ static const JSC::MacroAssembler::RegisterID JSParamReg_Argc = JSC::MIPSRegiste
* span is not empty is handled.
*/
gc::FreeSpan *list = const_cast<gc::FreeSpan *>
(cx->compartment->allocator.arenas.getFreeList(allocKind));
(cx->zone()->allocator.arenas.getFreeList(allocKind));
loadPtr(&list->first, result);
Jump jump = branchPtr(Assembler::BelowOrEqual, AbsoluteAddress(&list->last), result);

Просмотреть файл

@ -146,7 +146,7 @@ mjit::Compiler::compile()
if (status != Compile_Okay && status != Compile_Retry) {
if (!outerScript->ensureHasMJITInfo(cx))
return Compile_Error;
JSScript::JITScriptHandle *jith = outerScript->jitHandle(isConstructing, cx->compartment->compileBarriers());
JSScript::JITScriptHandle *jith = outerScript->jitHandle(isConstructing, cx->zone()->compileBarriers());
JSScript::ReleaseCode(cx->runtime->defaultFreeOp(), jith);
jith->setUnjittable();
@ -1015,7 +1015,7 @@ mjit::CanMethodJIT(JSContext *cx, JSScript *script, jsbytecode *pc,
}
if (script->hasMJITInfo()) {
JSScript::JITScriptHandle *jith = script->jitHandle(construct, cx->compartment->compileBarriers());
JSScript::JITScriptHandle *jith = script->jitHandle(construct, cx->zone()->compileBarriers());
if (jith->isUnjittable())
return Compile_Abort;
}
@ -1040,7 +1040,7 @@ mjit::CanMethodJIT(JSContext *cx, JSScript *script, jsbytecode *pc,
if (!script->ensureHasMJITInfo(cx))
return Compile_Error;
JSScript::JITScriptHandle *jith = script->jitHandle(construct, cx->compartment->compileBarriers());
JSScript::JITScriptHandle *jith = script->jitHandle(construct, cx->zone()->compileBarriers());
JITScript *jit;
if (jith->isEmpty()) {
@ -5654,7 +5654,7 @@ mjit::Compiler::jsop_setprop(HandlePropertyName name, bool popGuaranteed)
if (!isObject)
notObject = frame.testObject(Assembler::NotEqual, lhs);
#ifdef JSGC_INCREMENTAL_MJ
if (cx->compartment->compileBarriers() && propertyTypes->needsBarrier(cx)) {
if (cx->zone()->compileBarriers() && propertyTypes->needsBarrier(cx)) {
/* Write barrier. */
Jump j = masm.testGCThing(Address(reg, JSObject::getFixedSlotOffset(slot)));
stubcc.linkExit(j, Uses(0));
@ -5687,7 +5687,7 @@ mjit::Compiler::jsop_setprop(HandlePropertyName name, bool popGuaranteed)
#ifdef JSGC_INCREMENTAL_MJ
/* Write barrier. We don't have type information for JSOP_SETNAME. */
if (cx->compartment->compileBarriers() &&
if (cx->zone()->compileBarriers() &&
(!types || JSOp(*PC) == JSOP_SETNAME || types->propertyNeedsBarrier(cx, id)))
{
jsop_setprop_slow(name);
@ -6063,7 +6063,7 @@ mjit::Compiler::jsop_aliasedArg(unsigned arg, bool get, bool poppedAfter)
frame.push(Address(reg), type, true /* = reuseBase */);
} else {
#ifdef JSGC_INCREMENTAL_MJ
if (cx->compartment->compileBarriers()) {
if (cx->zone()->compileBarriers()) {
/* Write barrier. */
stubcc.linkExit(masm.testGCThing(Address(reg)), Uses(0));
stubcc.leave();
@ -6106,7 +6106,7 @@ mjit::Compiler::jsop_aliasedVar(ScopeCoordinate sc, bool get, bool poppedAfter)
finishBarrier(barrier, REJOIN_FALLTHROUGH, 0);
} else {
#ifdef JSGC_INCREMENTAL_MJ
if (cx->compartment->compileBarriers()) {
if (cx->zone()->compileBarriers()) {
/* Write barrier. */
stubcc.linkExit(masm.testGCThing(addr), Uses(0));
stubcc.leave();
@ -6261,7 +6261,7 @@ mjit::Compiler::iter(unsigned flags)
* Write barrier for stores to the iterator. We only need to take a write
* barrier if NativeIterator::obj is actually going to change.
*/
if (cx->compartment->compileBarriers()) {
if (cx->zone()->compileBarriers()) {
Jump j = masm.branchPtr(Assembler::NotEqual,
Address(nireg, offsetof(NativeIterator, obj)), reg);
stubcc.linkExit(j, Uses(1));
@ -6672,7 +6672,7 @@ mjit::Compiler::jsop_setgname(HandlePropertyName name, bool popGuaranteed)
RegisterID reg = frame.allocReg();
#ifdef JSGC_INCREMENTAL_MJ
/* Write barrier. */
if (cx->compartment->compileBarriers() && types->needsBarrier(cx)) {
if (cx->zone()->compileBarriers() && types->needsBarrier(cx)) {
stubcc.linkExit(masm.jump(), Uses(0));
stubcc.leave();
stubcc.masm.move(ImmPtr(value), Registers::ArgReg1);
@ -6690,7 +6690,7 @@ mjit::Compiler::jsop_setgname(HandlePropertyName name, bool popGuaranteed)
#ifdef JSGC_INCREMENTAL_MJ
/* Write barrier. */
if (cx->compartment->compileBarriers()) {
if (cx->zone()->compileBarriers()) {
jsop_setgname_slow(name);
return true;
}

Просмотреть файл

@ -495,7 +495,7 @@ private:
}
JITScript *outerJIT() {
return outerScript->getJIT(isConstructing, cx->compartment->compileBarriers());
return outerScript->getJIT(isConstructing, cx->zone()->compileBarriers());
}
ChunkDescriptor &outerChunkRef() {

Просмотреть файл

@ -478,7 +478,7 @@ mjit::Compiler::compileArrayPopShift(FrameEntry *thisValue, bool isPacked, bool
#ifdef JSGC_INCREMENTAL_MJ
/* Write barrier. */
if (cx->compartment->compileBarriers())
if (cx->zone()->compileBarriers())
return Compile_InlineAbort;
#endif

Просмотреть файл

@ -974,7 +974,7 @@ mjit::Compiler::jsop_setelem_dense()
* undefined.
*/
types::StackTypeSet *types = frame.extra(obj).types;
if (cx->compartment->compileBarriers() && (!types || types->propertyNeedsBarrier(cx, JSID_VOID))) {
if (cx->zone()->compileBarriers() && (!types || types->propertyNeedsBarrier(cx, JSID_VOID))) {
Label barrierStart = stubcc.masm.label();
stubcc.linkExitDirect(masm.jump(), barrierStart);
@ -1371,7 +1371,7 @@ mjit::Compiler::jsop_setelem(bool popGuaranteed)
#ifdef JSGC_INCREMENTAL_MJ
// Write barrier.
if (cx->compartment->compileBarriers()) {
if (cx->zone()->compileBarriers()) {
jsop_setelem_slow();
return true;
}
@ -2482,7 +2482,7 @@ mjit::Compiler::jsop_initprop()
RootedObject baseobj(cx, frame.extra(obj).initObject);
if (!baseobj || monitored(PC) || cx->compartment->compileBarriers()) {
if (!baseobj || monitored(PC) || cx->zone()->compileBarriers()) {
if (monitored(PC) && script_ == outerScript)
monitoredBytecodes.append(PC - script_->code);

Просмотреть файл

@ -370,7 +370,7 @@ UncachedInlineCall(VMFrame &f, InitialFrameFlags initial,
* will be constructing a new type object for 'this'.
*/
if (!newType) {
if (JITScript *jit = newscript->getJIT(regs.fp()->isConstructing(), cx->compartment->compileBarriers())) {
if (JITScript *jit = newscript->getJIT(regs.fp()->isConstructing(), cx->zone()->compileBarriers())) {
if (jit->invokeEntry) {
*pret = jit->invokeEntry;

Просмотреть файл

@ -1104,7 +1104,7 @@ JaegerStatus
mjit::JaegerShot(JSContext *cx, bool partial)
{
StackFrame *fp = cx->fp();
JITScript *jit = fp->script()->getJIT(fp->isConstructing(), cx->compartment->compileBarriers());
JITScript *jit = fp->script()->getJIT(fp->isConstructing(), cx->zone()->compileBarriers());
JS_ASSERT(cx->regs().pc == fp->script()->code);
@ -1334,8 +1334,8 @@ JITScript::destroyChunk(FreeOp *fop, unsigned chunkIndex, bool resetUses)
* Write barrier: Before we destroy the chunk, trace through the objects
* it holds.
*/
if (script->compartment()->needsBarrier())
desc.chunk->trace(script->compartment()->barrierTracer());
if (script->zone()->needsBarrier())
desc.chunk->trace(script->zone()->barrierTracer());
Probes::discardMJITCode(fop, this, desc.chunk, desc.chunk->code.m_code.executableAddress());
fop->delete_(desc.chunk);
@ -1386,7 +1386,7 @@ JITScript::trace(JSTracer *trc)
static ic::PICInfo *
GetPIC(JSContext *cx, JSScript *script, jsbytecode *pc, bool constructing)
{
JITScript *jit = script->getJIT(constructing, cx->compartment->needsBarrier());
JITScript *jit = script->getJIT(constructing, cx->zone()->needsBarrier());
if (!jit)
return NULL;

Просмотреть файл

@ -874,7 +874,7 @@ class CallCompiler : public BaseCompiler
masm.loadPtr(Address(t0, JSScript::offsetOfMJITInfo()), t0);
Jump hasNoJitInfo = masm.branchPtr(Assembler::Equal, t0, ImmPtr(NULL));
size_t offset = JSScript::JITScriptSet::jitHandleOffset(callingNew,
f.cx->compartment->compileBarriers());
f.cx->zone()->compileBarriers());
masm.loadPtr(Address(t0, offset), t0);
Jump hasNoJitCode = masm.branchPtr(Assembler::BelowOrEqual, t0,
ImmPtr(JSScript::JITScriptHandle::UNJITTABLE));
@ -962,7 +962,7 @@ class CallCompiler : public BaseCompiler
bool patchInlinePath(JSScript *script, JSObject *obj)
{
JS_ASSERT(ic.frameSize.isStatic());
JITScript *jit = script->getJIT(callingNew, f.cx->compartment->compileBarriers());
JITScript *jit = script->getJIT(callingNew, f.cx->zone()->compileBarriers());
/* Very fast path. */
Repatcher repatch(f.chunk());

Просмотреть файл

@ -564,7 +564,7 @@ class SetPropCompiler : public PICStubCompiler
* Since we're changing the object's shape, we need a write
* barrier. Taking the slow path is the easiest way to get one.
*/
if (cx->compartment->compileBarriers())
if (cx->zone()->compileBarriers())
return disable("ADDPROP write barrier required");
#endif
@ -2846,7 +2846,7 @@ SetElementIC::shouldUpdate(VMFrame &f)
return false;
}
#ifdef JSGC_INCREMENTAL_MJ
JS_ASSERT(!f.cx->compartment->compileBarriers());
JS_ASSERT(!f.cx->zone()->compileBarriers());
#endif
JS_ASSERT(stubsGenerated < MAX_PIC_STUBS);
return true;

Просмотреть файл

@ -1749,21 +1749,21 @@ void JS_FASTCALL
stubs::WriteBarrier(VMFrame &f, Value *addr)
{
#ifdef JS_GC_ZEAL
if (!f.cx->compartment->needsBarrier())
if (!f.cx->zone()->needsBarrier())
return;
#endif
gc::MarkValueUnbarriered(f.cx->compartment->barrierTracer(), addr, "write barrier");
gc::MarkValueUnbarriered(f.cx->zone()->barrierTracer(), addr, "write barrier");
}
void JS_FASTCALL
stubs::GCThingWriteBarrier(VMFrame &f, Value *addr)
{
#ifdef JS_GC_ZEAL
if (!f.cx->compartment->needsBarrier())
if (!f.cx->zone()->needsBarrier())
return;
#endif
gc::Cell *cell = (gc::Cell *)addr->toGCThing();
if (cell && !cell->isMarked())
gc::MarkValueUnbarriered(f.cx->compartment->barrierTracer(), addr, "write barrier");
gc::MarkValueUnbarriered(f.cx->zone()->barrierTracer(), addr, "write barrier");
}

Просмотреть файл

@ -159,7 +159,7 @@ NormalArgumentsObject::callee() const
inline void
NormalArgumentsObject::clearCallee()
{
data()->callee.set(compartment(), MagicValue(JS_OVERWRITTEN_CALLEE));
data()->callee.set(zone(), MagicValue(JS_OVERWRITTEN_CALLEE));
}
} /* namespace js */

Просмотреть файл

@ -698,7 +698,7 @@ Debugger::wrapDebuggeeValue(JSContext *cx, MutableHandleValue vp)
js_ReportOutOfMemory(cx);
return false;
}
HashTableWriteBarrierPost(cx->compartment, &objects, obj);
HashTableWriteBarrierPost(cx->zone(), &objects, obj);
if (obj->compartment() != object->compartment()) {
CrossCompartmentKey key(CrossCompartmentKey::DebuggerObject, object, obj);
@ -1418,7 +1418,7 @@ Debugger::markCrossCompartmentDebuggerObjectReferents(JSTracer *tracer)
* compartments.
*/
for (Debugger *dbg = rt->debuggerList.getFirst(); dbg; dbg = dbg->getNext()) {
if (!dbg->object->compartment()->isCollecting())
if (!dbg->object->zone()->isCollecting())
dbg->markKeysInCompartment(tracer);
}
}
@ -1468,7 +1468,7 @@ Debugger::markAllIteratively(GCMarker *trc)
* - it actually has hooks that might be called
*/
HeapPtrObject &dbgobj = dbg->toJSObjectRef();
if (!dbgobj->compartment()->isGCMarking())
if (!dbgobj->zone()->isGCMarking())
continue;
bool dbgMarked = IsObjectMarked(&dbgobj);

Просмотреть файл

@ -219,25 +219,25 @@ js::ObjectImpl::checkShapeConsistency()
void
js::ObjectImpl::initSlotRange(uint32_t start, const Value *vector, uint32_t length)
{
JSCompartment *comp = compartment();
JS::Zone *zone = this->zone();
HeapSlot *fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
getSlotRange(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
for (HeapSlot *sp = fixedStart; sp < fixedEnd; sp++)
sp->init(comp, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
sp->init(zone, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
for (HeapSlot *sp = slotsStart; sp < slotsEnd; sp++)
sp->init(comp, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
sp->init(zone, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
}
void
js::ObjectImpl::copySlotRange(uint32_t start, const Value *vector, uint32_t length)
{
JSCompartment *comp = compartment();
JS::Zone *zone = this->zone();
HeapSlot *fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
getSlotRange(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
for (HeapSlot *sp = fixedStart; sp < fixedEnd; sp++)
sp->set(comp, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
sp->set(zone, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
for (HeapSlot *sp = slotsStart; sp < slotsEnd; sp++)
sp->set(comp, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
sp->set(zone, this->asObjectPtr(), HeapSlot::Slot, start++, *vector++);
}
#ifdef DEBUG

Просмотреть файл

@ -433,7 +433,7 @@ RegExpStatics::updateLazily(JSContext *cx, JSLinearString *input,
JS_ASSERT(input && shared);
aboutToWrite();
BarrieredSetPair<JSString, JSLinearString>(cx->compartment,
BarrieredSetPair<JSString, JSLinearString>(cx->zone(),
pendingInput, input,
matchesInput, input);
if (regexp.initialized())
@ -455,7 +455,7 @@ RegExpStatics::updateFromMatchPairs(JSContext *cx, JSLinearString *input, MatchP
this->regexp.release();
this->lastIndex = size_t(-1);
BarrieredSetPair<JSString, JSLinearString>(cx->compartment,
BarrieredSetPair<JSString, JSLinearString>(cx->zone(),
pendingInput, input,
matchesInput, input);

Просмотреть файл

@ -1687,7 +1687,7 @@ DebugScopes::addDebugScope(JSContext *cx, ScopeObject &scope, DebugScopeObject &
return false;
}
HashTableWriteBarrierPost(cx->compartment, &scopes->proxiedScopes, &scope);
HashTableWriteBarrierPost(cx->zone(), &scopes->proxiedScopes, &scope);
return true;
}

Просмотреть файл

@ -76,7 +76,7 @@ inline mjit::JITScript *
StackFrame::jit()
{
AutoAssertNoGC nogc;
return script()->getJIT(isConstructing(), script()->compartment()->compileBarriers());
return script()->getJIT(isConstructing(), script()->zone()->compileBarriers());
}
#endif

Просмотреть файл

@ -192,7 +192,8 @@ StackFrame::prevpcSlow(InlinedSite **pinlined)
JS_ASSERT(!(flags_ & HAS_PREVPC));
#if defined(JS_METHODJIT) && defined(JS_MONOIC)
StackFrame *p = prev();
mjit::JITScript *jit = p->script()->getJIT(p->isConstructing(), p->compartment()->compileBarriers());
mjit::JITScript *jit = p->script()->getJIT(p->isConstructing(),
p->compartment()->zone()->compileBarriers());
prevpc_ = jit->nativeToPC(ncode_, &prevInline_);
flags_ |= HAS_PREVPC;
if (pinlined)