Bug 898886 - Improve threadsafe assertions when accessing runtimes and zones, r=billm.

This commit is contained in:
Brian Hackett 2013-08-05 08:48:34 -06:00
Родитель fbcf001662
Коммит b92045c124
56 изменённых файлов: 348 добавлений и 271 удалений

Просмотреть файл

@ -79,7 +79,7 @@ inline void
EncapsulatedValue::writeBarrierPre(const Value &value)
{
#ifdef JSGC_INCREMENTAL
if (value.isMarkable() && runtime(value)->needsBarrier())
if (value.isMarkable() && runtimeFromAnyThread(value)->needsBarrier())
writeBarrierPre(ZoneOfValue(value), value);
#endif
}
@ -89,7 +89,7 @@ EncapsulatedValue::writeBarrierPre(Zone *zone, const Value &value)
{
#ifdef JSGC_INCREMENTAL
if (zone->needsBarrier()) {
JS_ASSERT_IF(value.isMarkable(), runtime(value)->needsBarrier());
JS_ASSERT_IF(value.isMarkable(), runtimeFromMainThread(value)->needsBarrier());
Value tmp(value);
js::gc::MarkValueUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
JS_ASSERT(tmp == value);
@ -180,14 +180,14 @@ HeapValue::set(Zone *zone, const Value &v)
#ifdef DEBUG
if (value.isMarkable()) {
JS_ASSERT(ZoneOfValue(value) == zone ||
ZoneOfValue(value) == zone->rt->atomsCompartment->zone());
ZoneOfValue(value) == zone->runtimeFromMainThread()->atomsCompartment->zone());
}
#endif
pre(zone);
JS_ASSERT(!IsPoisonedValue(v));
value = v;
post(zone->rt);
post(zone->runtimeFromAnyThread());
}
inline void
@ -195,7 +195,7 @@ HeapValue::writeBarrierPost(const Value &value, Value *addr)
{
#ifdef JSGC_GENERATIONAL
if (value.isMarkable())
runtime(value)->gcStoreBuffer.putValue(addr);
runtimeFromMainThread(value)->gcStoreBuffer.putValue(addr);
#endif
}
@ -248,7 +248,7 @@ inline
RelocatableValue::~RelocatableValue()
{
if (value.isMarkable())
relocate(runtime(value));
relocate(runtimeFromMainThread(value));
}
inline RelocatableValue &
@ -260,7 +260,7 @@ RelocatableValue::operator=(const Value &v)
value = v;
post();
} else if (value.isMarkable()) {
JSRuntime *rt = runtime(value);
JSRuntime *rt = runtimeFromMainThread(value);
value = v;
relocate(rt);
} else {
@ -278,7 +278,7 @@ RelocatableValue::operator=(const RelocatableValue &v)
value = v.value;
post();
} else if (value.isMarkable()) {
JSRuntime *rt = runtime(value);
JSRuntime *rt = runtimeFromMainThread(value);
value = v.value;
relocate(rt);
} else {
@ -292,7 +292,7 @@ RelocatableValue::post()
{
#ifdef JSGC_GENERATIONAL
JS_ASSERT(value.isMarkable());
runtime(value)->gcStoreBuffer.putRelocatableValue(&value);
runtimeFromMainThread(value)->gcStoreBuffer.putRelocatableValue(&value);
#endif
}
@ -362,14 +362,14 @@ HeapSlot::set(Zone *zone, JSObject *obj, Kind kind, uint32_t slot, const Value &
pre(zone);
JS_ASSERT(!IsPoisonedValue(v));
value = v;
post(zone->rt, obj, kind, slot, v);
post(zone->runtimeFromAnyThread(), obj, kind, slot, v);
}
inline void
HeapSlot::writeBarrierPost(JSObject *obj, Kind kind, uint32_t slot, Value target)
{
#ifdef JSGC_GENERATIONAL
writeBarrierPost(obj->runtime(), obj, kind, slot, target);
writeBarrierPost(obj->runtimeFromAnyThread(), obj, kind, slot, target);
#endif
}

Просмотреть файл

@ -266,7 +266,7 @@ class RelocatablePtr : public EncapsulatedPtr<T>
~RelocatablePtr() {
if (this->value)
relocate(this->value->runtime());
relocate(this->value->runtimeFromMainThread());
}
RelocatablePtr<T> &operator=(T *v) {
@ -276,7 +276,7 @@ class RelocatablePtr : public EncapsulatedPtr<T>
this->value = v;
post();
} else if (this->value) {
JSRuntime *rt = this->value->runtime();
JSRuntime *rt = this->value->runtimeFromMainThread();
this->value = v;
relocate(rt);
}
@ -290,7 +290,7 @@ class RelocatablePtr : public EncapsulatedPtr<T>
this->value = v.value;
post();
} else if (this->value) {
JSRuntime *rt = this->value->runtime();
JSRuntime *rt = this->value->runtimeFromMainThread();
this->value = v;
relocate(rt);
}
@ -419,9 +419,13 @@ class EncapsulatedValue : public ValueOperations<EncapsulatedValue>
inline void pre();
inline void pre(Zone *zone);
static inline JSRuntime *runtime(const Value &v) {
static inline JSRuntime *runtimeFromMainThread(const Value &v) {
JS_ASSERT(v.isMarkable());
return static_cast<js::gc::Cell *>(v.toGCThing())->runtime();
return static_cast<js::gc::Cell *>(v.toGCThing())->runtimeFromMainThread();
}
static inline JSRuntime *runtimeFromAnyThread(const Value &v) {
JS_ASSERT(v.isMarkable());
return static_cast<js::gc::Cell *>(v.toGCThing())->runtimeFromAnyThread();
}
private:

Просмотреть файл

@ -28,8 +28,10 @@ struct JSRuntime;
namespace js {
// Defined in vm/ForkJoin.cpp
extern bool InSequentialOrExclusiveParallelSection();
// Whether the current thread is permitted access to any part of the specified
// runtime or zone.
extern bool CurrentThreadCanAccessRuntime(JSRuntime *rt);
extern bool CurrentThreadCanAccessZone(JS::Zone *zone);
class FreeOp;
@ -97,10 +99,14 @@ struct Cell
MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
inline JSRuntime *runtime() const;
inline JSRuntime *runtimeFromMainThread() const;
inline JS::Zone *tenuredZone() const;
inline bool tenuredIsInsideZone(JS::Zone *zone) const;
// Note: Unrestricted access to the runtime of a GC thing from an arbitrary
// thread can easily lead to races. Use this method very carefully.
inline JSRuntime *runtimeFromAnyThread() const;
#ifdef DEBUG
inline bool isAligned() const;
inline bool isTenured() const;
@ -950,9 +956,16 @@ Cell::arenaHeader() const
}
inline JSRuntime *
Cell::runtime() const
Cell::runtimeFromMainThread() const
{
JSRuntime *rt = chunk()->info.runtime;
JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
return rt;
}
inline JSRuntime *
Cell::runtimeFromAnyThread() const
{
JS_ASSERT(InSequentialOrExclusiveParallelSection());
return chunk()->info.runtime;
}
@ -990,9 +1003,10 @@ Cell::unmark(uint32_t color) const
JS::Zone *
Cell::tenuredZone() const
{
JS_ASSERT(InSequentialOrExclusiveParallelSection());
JS::Zone *zone = arenaHeader()->zone;
JS_ASSERT(CurrentThreadCanAccessZone(zone));
JS_ASSERT(isTenured());
return arenaHeader()->zone;
return zone;
}
bool

Просмотреть файл

@ -86,7 +86,7 @@ js::IterateScripts(JSRuntime *rt, JSCompartment *compartment,
void
js::IterateGrayObjects(Zone *zone, GCThingCallback cellCallback, void *data)
{
AutoPrepareForTracing prep(zone->rt);
AutoPrepareForTracing prep(zone->runtimeFromMainThread());
for (size_t finalizeKind = 0; finalizeKind <= FINALIZE_OBJECT_LAST; finalizeKind++) {
for (CellIterUnderGC i(zone, AllocKind(finalizeKind)); !i.done(); i.next()) {

Просмотреть файл

@ -126,7 +126,7 @@ CheckMarkedThing(JSTracer *trc, T *thing)
return;
JS_ASSERT(thing->zone());
JS_ASSERT(thing->zone()->rt == trc->runtime);
JS_ASSERT(thing->zone()->runtimeFromMainThread() == trc->runtime);
JS_ASSERT(trc->debugPrinter || trc->debugPrintArg);
DebugOnly<JSRuntime *> rt = trc->runtime;
@ -134,7 +134,7 @@ CheckMarkedThing(JSTracer *trc, T *thing)
JS_ASSERT_IF(IS_GC_MARKING_TRACER(trc) && rt->gcManipulatingDeadZones,
!thing->zone()->scheduledForDestruction);
rt->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
JS_ASSERT_IF(thing->zone()->requireGCTracer(),
IS_GC_MARKING_TRACER(trc));
@ -270,7 +270,7 @@ IsMarked(T **thingp)
JS_ASSERT(thingp);
JS_ASSERT(*thingp);
#ifdef JSGC_GENERATIONAL
Nursery &nursery = (*thingp)->runtime()->gcNursery;
Nursery &nursery = (*thingp)->runtimeFromMainThread()->gcNursery;
if (nursery.isInside(*thingp))
return nursery.getForwardedPointer(thingp);
#endif
@ -288,7 +288,7 @@ IsAboutToBeFinalized(T **thingp)
JS_ASSERT(*thingp);
#ifdef JSGC_GENERATIONAL
Nursery &nursery = (*thingp)->runtime()->gcNursery;
Nursery &nursery = (*thingp)->runtimeFromMainThread()->gcNursery;
if (nursery.isInside(*thingp))
return !nursery.getForwardedPointer(thingp);
#endif
@ -775,7 +775,7 @@ static void
PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushObject(thing);
@ -785,7 +785,7 @@ static void
PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushType(thing);
@ -795,7 +795,7 @@ static void
PushMarkStack(GCMarker *gcmarker, JSScript *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
/*
* We mark scripts directly rather than pushing on the stack as they can
@ -810,7 +810,7 @@ static void
PushMarkStack(GCMarker *gcmarker, LazyScript *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
/*
* We mark lazy scripts directly rather than pushing on the stack as they
@ -827,7 +827,7 @@ static void
PushMarkStack(GCMarker *gcmarker, Shape *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
/* We mark shapes directly rather than pushing on the stack. */
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
@ -838,7 +838,7 @@ static void
PushMarkStack(GCMarker *gcmarker, ion::IonCode *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushIonCode(thing);
@ -851,7 +851,7 @@ static void
PushMarkStack(GCMarker *gcmarker, BaseShape *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime, thing);
JS_ASSERT(!IsInsideNursery(thing->runtime(), thing));
JS_ASSERT(!IsInsideNursery(gcmarker->runtime, thing));
/* We mark base shapes directly rather than pushing on the stack. */
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
@ -1689,7 +1689,7 @@ JS::UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind)
UnmarkGrayGCThing(thing);
JSRuntime *rt = static_cast<Cell *>(thing)->runtime();
JSRuntime *rt = static_cast<Cell *>(thing)->runtimeFromMainThread();
UnmarkGrayTracer trc(rt);
JS_TraceChildren(&trc, thing, kind);
}

Просмотреть файл

@ -317,7 +317,7 @@ JS_PUBLIC_API(void)
JS::HeapCellPostBarrier(js::gc::Cell **cellp)
{
JS_ASSERT(*cellp);
JSRuntime *runtime = (*cellp)->runtime();
JSRuntime *runtime = (*cellp)->runtimeFromMainThread();
runtime->gcStoreBuffer.putRelocatableCell(cellp);
}
@ -326,7 +326,7 @@ JS::HeapCellRelocate(js::gc::Cell **cellp)
{
/* Called with old contents of *pp before overwriting. */
JS_ASSERT(*cellp);
JSRuntime *runtime = (*cellp)->runtime();
JSRuntime *runtime = (*cellp)->runtimeFromMainThread();
runtime->gcStoreBuffer.removeRelocatableCell(cellp);
}
@ -334,7 +334,7 @@ JS_PUBLIC_API(void)
JS::HeapValuePostBarrier(JS::Value *valuep)
{
JS_ASSERT(JSVAL_IS_TRACEABLE(*valuep));
JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtime();
JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread();
runtime->gcStoreBuffer.putRelocatableValue(valuep);
}
@ -343,7 +343,7 @@ JS::HeapValueRelocate(JS::Value *valuep)
{
/* Called with old contents of *valuep before overwriting. */
JS_ASSERT(JSVAL_IS_TRACEABLE(*valuep));
JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtime();
JSRuntime *runtime = static_cast<js::gc::Cell *>(valuep->toGCThing())->runtimeFromMainThread();
runtime->gcStoreBuffer.removeRelocatableValue(valuep);
}

Просмотреть файл

@ -22,7 +22,7 @@ using namespace js;
using namespace js::gc;
JS::Zone::Zone(JSRuntime *rt)
: rt(rt),
: runtime_(rt),
allocator(this),
hold(false),
ionUsingBarriers_(false),
@ -50,8 +50,8 @@ JS::Zone::Zone(JSRuntime *rt)
Zone::~Zone()
{
if (this == rt->systemZone)
rt->systemZone = NULL;
if (this == runtimeFromMainThread()->systemZone)
runtimeFromMainThread()->systemZone = NULL;
}
bool
@ -93,7 +93,7 @@ Zone::markTypes(JSTracer *trc)
for (size_t thingKind = FINALIZE_OBJECT0; thingKind < FINALIZE_OBJECT_LIMIT; thingKind++) {
ArenaHeader *aheader = allocator.arenas.getFirstArena(static_cast<AllocKind>(thingKind));
if (aheader)
rt->gcMarker.pushArenaList(aheader);
trc->runtime->gcMarker.pushArenaList(aheader);
}
for (CellIterUnderGC i(this, FINALIZE_TYPE_OBJECT); !i.done(); i.next()) {
@ -137,11 +137,11 @@ Zone::sweep(FreeOp *fop, bool releaseTypes)
releaseTypes = false;
if (!isPreservingCode()) {
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_DISCARD_ANALYSIS);
gcstats::AutoPhase ap(fop->runtime()->gcStats, gcstats::PHASE_DISCARD_ANALYSIS);
types.sweep(fop, releaseTypes);
}
if (!rt->debuggerList.isEmpty())
if (!fop->runtime()->debuggerList.isEmpty())
sweepBreakpoints(fop);
active = false;
@ -155,8 +155,8 @@ Zone::sweepBreakpoints(FreeOp *fop)
* to iterate over the scripts belonging to a single compartment in a zone.
*/
gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_SWEEP_TABLES);
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT);
gcstats::AutoPhase ap1(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_TABLES);
gcstats::AutoPhase ap2(fop->runtime()->gcStats, gcstats::PHASE_SWEEP_TABLES_BREAKPOINT);
for (CellIterUnderGC i(this, FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();

Просмотреть файл

@ -104,7 +104,12 @@ struct Zone : private JS::shadow::Zone,
public js::gc::GraphNodeBase<JS::Zone>,
public js::MallocProvider<JS::Zone>
{
JSRuntime *rt;
private:
JSRuntime *runtime_;
friend bool js::CurrentThreadCanAccessZone(Zone *zone);
public:
js::Allocator allocator;
js::CompartmentVector compartments;
@ -117,12 +122,23 @@ struct Zone : private JS::shadow::Zone,
public:
bool active; // GC flag, whether there are active frames
JSRuntime *runtimeFromMainThread() const {
JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
return runtime_;
}
// Note: Unrestricted access to the zone's runtime from an arbitrary
// thread can easily lead to races. Use this method very carefully.
JSRuntime *runtimeFromAnyThread() const {
return runtime_;
}
bool needsBarrier() const {
return needsBarrier_;
}
bool compileBarriers(bool needsBarrier) const {
return needsBarrier || rt->gcZeal() == js::gc::ZealVerifierPreValue;
return needsBarrier || runtimeFromMainThread()->gcZeal() == js::gc::ZealVerifierPreValue;
}
bool compileBarriers() const {
@ -142,7 +158,7 @@ struct Zone : private JS::shadow::Zone,
js::GCMarker *barrierTracer() {
JS_ASSERT(needsBarrier_);
return &rt->gcMarker;
return &runtimeFromMainThread()->gcMarker;
}
public:
@ -161,7 +177,7 @@ struct Zone : private JS::shadow::Zone,
public:
bool isCollecting() const {
if (rt->isHeapCollecting())
if (runtimeFromMainThread()->isHeapCollecting())
return gcState != NoGC;
else
return needsBarrier();
@ -176,16 +192,16 @@ struct Zone : private JS::shadow::Zone,
* tracer.
*/
bool requireGCTracer() const {
return rt->isHeapMajorCollecting() && gcState != NoGC;
return runtimeFromMainThread()->isHeapMajorCollecting() && gcState != NoGC;
}
void setGCState(CompartmentGCState state) {
JS_ASSERT(rt->isHeapBusy());
JS_ASSERT(runtimeFromMainThread()->isHeapBusy());
gcState = state;
}
void scheduleGC() {
JS_ASSERT(!rt->isHeapBusy());
JS_ASSERT(!runtimeFromMainThread()->isHeapBusy());
/* Note: zones cannot be collected while in use by other threads. */
if (!usedByExclusiveThread)
@ -209,7 +225,7 @@ struct Zone : private JS::shadow::Zone,
}
bool isGCMarking() {
if (rt->isHeapCollecting())
if (runtimeFromMainThread()->isHeapCollecting())
return gcState == Mark || gcState == MarkGray;
else
return needsBarrier();
@ -292,7 +308,7 @@ struct Zone : private JS::shadow::Zone,
void onTooMuchMalloc();
void *onOutOfMemory(void *p, size_t nbytes) {
return rt->onOutOfMemory(p, nbytes);
return runtimeFromMainThread()->onOutOfMemory(p, nbytes);
}
void reportAllocationOverflow() {
js_ReportAllocationOverflow(NULL);

Просмотреть файл

@ -1763,7 +1763,7 @@ class FunctionCompiler
JS_ASSERT(locals_.count() == argTypes.length() + varInitializers_.length());
alloc_ = lifo_.new_<TempAllocator>(&lifo_);
ionContext_.construct(m_.cx()->compartment(), alloc_);
ionContext_.construct(m_.cx()->runtime(), m_.cx()->compartment(), alloc_);
graph_ = lifo_.new_<MIRGraph>(alloc_);
info_ = lifo_.new_<CompileInfo>(locals_.count(), SequentialExecution);
@ -4843,7 +4843,7 @@ GenerateCodeForFinishedJob(ModuleCompiler &m, ParallelGroupState &group, AsmJSPa
{
// Perform code generation on the main thread.
IonContext ionContext(m.cx()->compartment(), &task->mir->temp());
IonContext ionContext(m.cx()->runtime(), m.cx()->compartment(), &task->mir->temp());
if (!GenerateCode(m, func, *task->mir, *task->lir))
return false;
}
@ -5152,7 +5152,7 @@ static const RegisterSet NonVolatileRegs =
static void
LoadAsmJSActivationIntoRegister(MacroAssembler &masm, Register reg)
{
masm.movePtr(ImmWord(GetIonContext()->compartment->rt), reg);
masm.movePtr(ImmWord(GetIonContext()->runtime), reg);
size_t offset = offsetof(JSRuntime, mainThread) +
PerThreadData::offsetOfAsmJSActivationStackReadOnly();
masm.loadPtr(Address(reg, offset), reg);
@ -6204,7 +6204,7 @@ FinishModule(ModuleCompiler &m,
ScopedJSFreePtr<char> *compilationTimeReport)
{
TempAllocator alloc(&m.cx()->tempLifoAlloc());
IonContext ionContext(m.cx()->compartment(), &alloc);
IonContext ionContext(m.cx()->runtime(), m.cx()->compartment(), &alloc);
if (!GenerateStubs(m))
return false;

Просмотреть файл

@ -436,7 +436,7 @@ BaselineCompiler::emitInterruptCheck()
frame.syncStack(0);
Label done;
void *interrupt = (void *)&cx->compartment()->rt->interrupt;
void *interrupt = (void *)&cx->runtime()->interrupt;
masm.branch32(Assembler::Equal, AbsoluteAddress(interrupt), Imm32(0), &done);
prepareVMCall();

Просмотреть файл

@ -3778,7 +3778,7 @@ ICGetElem_String::Compiler::generateStubCode(MacroAssembler &masm)
&failure);
// Load static string.
masm.movePtr(ImmWord(&cx->compartment()->rt->staticStrings.unitStaticTable), str);
masm.movePtr(ImmWord(&cx->runtime()->staticStrings.unitStaticTable), str);
masm.loadPtr(BaseIndex(str, scratchReg, ScalePointer), str);
// Return.

Просмотреть файл

@ -709,7 +709,7 @@ BaselineScript::toggleDebugTraps(JSScript *script, jsbytecode *pc)
SrcNoteLineScanner scanner(script->notes(), script->lineno);
IonContext ictx(script->compartment(), NULL);
IonContext ictx(script->runtimeFromMainThread(), script->compartment(), NULL);
AutoFlushCache afc("DebugTraps");
for (uint32_t i = 0; i < numPCMappingIndexEntries(); i++) {
@ -902,7 +902,7 @@ ion::MarkActiveBaselineScripts(Zone *zone)
{
// First check if there is a JitActivation on the stack, so that there
// must be a valid IonContext.
JitActivationIterator iter(zone->rt);
JitActivationIterator iter(zone->runtimeFromMainThread());
if (iter.done())
return;

Просмотреть файл

@ -624,7 +624,7 @@ CodeGenerator::visitIntToString(LIntToString *lir)
masm.branch32(Assembler::AboveOrEqual, input, Imm32(StaticStrings::INT_STATIC_LIMIT),
ool->entry());
masm.movePtr(ImmWord(&gen->compartment->rt->staticStrings.intStaticTable), output);
masm.movePtr(ImmWord(&GetIonContext()->runtime->staticStrings.intStaticTable), output);
masm.loadPtr(BaseIndex(output, input, ScalePointer), output);
masm.bind(ool->rejoin());
@ -653,7 +653,7 @@ CodeGenerator::visitDoubleToString(LDoubleToString *lir)
masm.branch32(Assembler::AboveOrEqual, temp, Imm32(StaticStrings::INT_STATIC_LIMIT),
ool->entry());
masm.movePtr(ImmWord(&gen->compartment->rt->staticStrings.intStaticTable), output);
masm.movePtr(ImmWord(&GetIonContext()->runtime->staticStrings.intStaticTable), output);
masm.loadPtr(BaseIndex(output, temp, ScalePointer), output);
masm.bind(ool->rejoin());
@ -1341,7 +1341,7 @@ CodeGenerator::visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier
}
Register runtimereg = regs.takeAny();
masm.mov(ImmWord(gen->compartment->rt), runtimereg);
masm.mov(ImmWord(GetIonContext()->runtime), runtimereg);
masm.setupUnalignedABICall(2, regs.takeAny());
masm.passABIArg(runtimereg);
@ -1363,7 +1363,7 @@ CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
if (!addOutOfLineCode(ool))
return false;
Nursery &nursery = gen->compartment->rt->gcNursery;
Nursery &nursery = GetIonContext()->runtime->gcNursery;
if (lir->object()->isConstant()) {
JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
@ -1395,7 +1395,7 @@ CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV *lir)
ValueOperand value = ToValue(lir, LPostWriteBarrierV::Input);
masm.branchTestObject(Assembler::NotEqual, value, ool->rejoin());
Nursery &nursery = gen->compartment->rt->gcNursery;
Nursery &nursery = GetIonContext()->runtime->gcNursery;
if (lir->object()->isConstant()) {
JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
@ -2274,7 +2274,7 @@ CodeGenerator::visitCheckOverRecursed(LCheckOverRecursed *lir)
// Ion may legally place frames very close to the limit. Calling additional
// C functions may then violate the limit without any checking.
JSRuntime *rt = gen->compartment->rt;
JSRuntime *rt = GetIonContext()->runtime;
// Since Ion frames exist on the C stack, the stack limit may be
// dynamically set by JS_SetThreadStackLimit() and JS_SetNativeStackQuota().
@ -2772,7 +2772,7 @@ CodeGenerator::visitNewSlots(LNewSlots *lir)
Register temp3 = ToRegister(lir->temp3());
Register output = ToRegister(lir->output());
masm.mov(ImmWord(gen->compartment->rt), temp1);
masm.mov(ImmWord(GetIonContext()->runtime), temp1);
masm.mov(Imm32(lir->mir()->nslots()), temp2);
masm.setupUnalignedABICall(2, temp3);
@ -4336,7 +4336,7 @@ CodeGenerator::visitFromCharCode(LFromCharCode *lir)
masm.branch32(Assembler::AboveOrEqual, code, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
ool->entry());
masm.movePtr(ImmWord(&gen->compartment->rt->staticStrings.unitStaticTable), output);
masm.movePtr(ImmWord(&GetIonContext()->runtime->staticStrings.unitStaticTable), output);
masm.loadPtr(BaseIndex(output, code, ScalePointer), output);
masm.bind(ool->rejoin());
@ -5043,7 +5043,7 @@ CodeGenerator::visitIteratorStart(LIteratorStart *lir)
JS_ASSERT(flags == JSITER_ENUMERATE);
// Fetch the most recent iterator and ensure it's not NULL.
masm.loadPtr(AbsoluteAddress(&gen->compartment->rt->nativeIterCache.last), output);
masm.loadPtr(AbsoluteAddress(&GetIonContext()->runtime->nativeIterCache.last), output);
masm.branchTestPtr(Assembler::Zero, output, output, ool->entry());
// Load NativeIterator.
@ -6246,7 +6246,7 @@ CodeGenerator::visitTypeOfV(LTypeOfV *lir)
if (!addOutOfLineCode(ool))
return false;
JSRuntime *rt = gen->compartment->rt;
JSRuntime *rt = GetIonContext()->runtime;
// Jump to the OOL path if the value is an object. Objects are complicated
// since they may have a typeof hook.
@ -7201,7 +7201,7 @@ CodeGenerator::visitAsmJSVoidReturn(LAsmJSVoidReturn *lir)
bool
CodeGenerator::visitAsmJSCheckOverRecursed(LAsmJSCheckOverRecursed *lir)
{
uintptr_t *limitAddr = &gen->compartment->rt->mainThread.nativeStackLimit;
uintptr_t *limitAddr = &GetIonContext()->runtime->mainThread.nativeStackLimit;
masm.branchPtr(Assembler::AboveOrEqual,
AbsoluteAddress(limitAddr),
StackPointer,

Просмотреть файл

@ -29,7 +29,7 @@ class CompilerRoot : public CompilerRootNode
: CompilerRootNode(NULL)
{
if (ptr) {
JS_ASSERT(!UninlinedIsInsideNursery(GetIonContext()->compartment->rt, ptr));
JS_ASSERT(!UninlinedIsInsideNursery(GetIonContext()->runtime, ptr));
setRoot(ptr);
}
}

Просмотреть файл

@ -123,8 +123,8 @@ IonContext::IonContext(JSContext *cx, TempAllocator *temp)
SetIonContext(this);
}
IonContext::IonContext(JSCompartment *comp, TempAllocator *temp)
: runtime(comp->rt),
IonContext::IonContext(JSRuntime *rt, JSCompartment *comp, TempAllocator *temp)
: runtime(rt),
cx(NULL),
compartment(comp),
temp(temp),
@ -550,7 +550,7 @@ void
IonCode::writeBarrierPre(IonCode *code)
{
#ifdef JSGC_INCREMENTAL
if (!code || !code->runtime()->needsBarrier())
if (!code || !code->runtimeFromMainThread()->needsBarrier())
return;
Zone *zone = code->zone();
@ -898,8 +898,9 @@ IonScript::purgeCaches(Zone *zone)
if (invalidated())
return;
IonContext ictx(zone->rt);
AutoFlushCache afc("purgeCaches", zone->rt->ionRuntime());
JSRuntime *rt = zone->runtimeFromMainThread();
IonContext ictx(rt);
AutoFlushCache afc("purgeCaches", rt->ionRuntime());
for (size_t i = 0; i < numCaches(); i++)
getCache(i).reset();
}
@ -937,11 +938,12 @@ IonScript::detachDependentAsmJSModules(FreeOp *fop) {
void
ion::ToggleBarriers(JS::Zone *zone, bool needs)
{
IonContext ictx(zone->rt);
if (!zone->rt->hasIonRuntime())
JSRuntime *rt = zone->runtimeFromMainThread();
IonContext ictx(rt);
if (!rt->hasIonRuntime())
return;
AutoFlushCache afc("ToggleBarriers", zone->rt->ionRuntime());
AutoFlushCache afc("ToggleBarriers", rt->ionRuntime());
for (gc::CellIterUnderGC i(zone, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->hasIonScript())
@ -2115,8 +2117,8 @@ ion::InvalidateAll(FreeOp *fop, Zone *zone)
for (JitActivationIterator iter(fop->runtime()); !iter.done(); ++iter) {
if (iter.activation()->compartment()->zone() == zone) {
IonContext ictx(zone->rt);
AutoFlushCache afc("InvalidateAll", zone->rt->ionRuntime());
IonContext ictx(fop->runtime());
AutoFlushCache afc("InvalidateAll", fop->runtime()->ionRuntime());
IonSpew(IonSpew_Invalidate, "Invalidating all frames for GC");
InvalidateActivation(fop, iter.jitTop(), true);
}

Просмотреть файл

@ -246,7 +246,7 @@ class IonContext
{
public:
IonContext(JSContext *cx, TempAllocator *temp);
IonContext(JSCompartment *comp, TempAllocator *temp);
IonContext(JSRuntime *rt, JSCompartment *comp, TempAllocator *temp);
IonContext(JSRuntime *rt);
~IonContext();

Просмотреть файл

@ -478,7 +478,7 @@ MacroAssembler::newGCThing(const Register &result, gc::AllocKind allocKind, Labe
#ifdef JS_GC_ZEAL
// Don't execute the inline path if gcZeal is active.
movePtr(ImmWord(zone->rt), result);
movePtr(ImmWord(GetIonContext()->runtime), result);
loadPtr(Address(result, offsetof(JSRuntime, gcZeal_)), result);
branch32(Assembler::NotEqual, result, Imm32(0), fail);
#endif
@ -489,7 +489,7 @@ MacroAssembler::newGCThing(const Register &result, gc::AllocKind allocKind, Labe
jump(fail);
#ifdef JSGC_GENERATIONAL
Nursery &nursery = zone->rt->gcNursery;
Nursery &nursery = GetIonContext()->runtime->gcNursery;
if (nursery.isEnabled() && allocKind <= gc::FINALIZE_OBJECT_LAST) {
// Inline Nursery::allocate. No explicit check for nursery.isEnabled()
// is needed, as the comparison with the nursery's end will always fail
@ -706,7 +706,7 @@ MacroAssembler::checkInterruptFlagsPar(const Register &tempReg,
{
JSCompartment *compartment = GetIonContext()->compartment;
void *interrupt = (void*)&compartment->rt->interrupt;
void *interrupt = (void*)&GetIonContext()->runtime->interrupt;
movePtr(ImmWord(interrupt), tempReg);
load32(Address(tempReg, 0), tempReg);
branchTest32(Assembler::NonZero, tempReg, tempReg, fail);

Просмотреть файл

@ -2722,7 +2722,7 @@ LIRGenerator::visitFunctionBoundary(MFunctionBoundary *ins)
return false;
// If slow assertions are enabled, then this node will result in a callVM
// out to a C++ function for the assertions, so we will need a safepoint.
return !gen->compartment->rt->spsProfiler.slowAssertionsEnabled() ||
return !GetIonContext()->runtime->spsProfiler.slowAssertionsEnabled() ||
assignSafepoint(lir, ins);
}

Просмотреть файл

@ -60,7 +60,7 @@ class MIRGenerator
return compartment->ionCompartment();
}
IonRuntime *ionRuntime() const {
return compartment->rt->ionRuntime();
return GetIonContext()->runtime->ionRuntime();
}
CompileInfo &info() {
return *info_;
@ -81,7 +81,7 @@ class MIRGenerator
}
bool instrumentedProfiling() {
return compartment->rt->spsProfiler.enabled();
return GetIonContext()->runtime->spsProfiler.enabled();
}
// Whether the main thread is trying to cancel this build.

Просмотреть файл

@ -132,7 +132,7 @@ IonBailoutIterator::IonBailoutIterator(const JitActivationIterator &activations,
// Compute the snapshot offset from the bailout ID.
JitActivation *activation = activations.activation()->asJit();
JSRuntime *rt = activation->compartment()->rt;
JSRuntime *rt = activation->compartment()->runtimeFromMainThread();
IonCode *code = rt->ionRuntime()->getBailoutTable(bailout->frameClass());
uintptr_t tableOffset = bailout->tableOffset();
uintptr_t tableStart = reinterpret_cast<uintptr_t>(code->raw());

Просмотреть файл

@ -1722,7 +1722,7 @@ CodeGeneratorARM::visitInterruptCheck(LInterruptCheck *lir)
if (!ool)
return false;
void *interrupt = (void*)&gen->compartment->rt->interrupt;
void *interrupt = (void*)&GetIonContext()->runtime->interrupt;
masm.load32(AbsoluteAddress(interrupt), lr);
masm.ma_cmp(lr, Imm32(0));
masm.ma_b(ool->entry(), Assembler::NonZero);

Просмотреть файл

@ -2965,7 +2965,7 @@ MacroAssemblerARMCompat::storeTypeTag(ImmTag tag, Register base, Register index,
void
MacroAssemblerARMCompat::linkExitFrame() {
uint8_t *dest = ((uint8_t*)GetIonContext()->compartment->rt) + offsetof(JSRuntime, mainThread.ionTop);
uint8_t *dest = ((uint8_t*)GetIonContext()->runtime) + offsetof(JSRuntime, mainThread.ionTop);
movePtr(ImmWord(dest), ScratchRegister);
ma_str(StackPointer, Operand(ScratchRegister, 0));
}

Просмотреть файл

@ -46,7 +46,7 @@ CodeGeneratorShared::CodeGeneratorShared(MIRGenerator *gen, LIRGraph *graph, Mac
pushedArgs_(0),
#endif
lastOsiPointOffset_(0),
sps_(&gen->compartment->rt->spsProfiler, &lastPC_),
sps_(&GetIonContext()->runtime->spsProfiler, &lastPC_),
osrEntryOffset_(0),
skipArgCheckEntryOffset_(0),
frameDepth_(graph->localSlotCount() * sizeof(STACK_SLOT_SIZE) +

Просмотреть файл

@ -293,7 +293,7 @@ CodeGeneratorX64::visitInterruptCheck(LInterruptCheck *lir)
if (!ool)
return false;
void *interrupt = (void*)&gen->compartment->rt->interrupt;
void *interrupt = (void*)&GetIonContext()->runtime->interrupt;
masm.movq(ImmWord(interrupt), ScratchReg);
masm.cmpl(Operand(ScratchReg, 0), Imm32(0));
masm.j(Assembler::NonZero, ool->entry());

Просмотреть файл

@ -85,7 +85,7 @@ IonBailoutIterator::IonBailoutIterator(const JitActivationIterator &activations,
// Compute the snapshot offset from the bailout ID.
JitActivation *activation = activations.activation()->asJit();
JSRuntime *rt = activation->compartment()->rt;
JSRuntime *rt = activation->compartment()->runtimeFromMainThread();
IonCode *code = rt->ionRuntime()->getBailoutTable(bailout->frameClass());
uintptr_t tableOffset = bailout->tableOffset();
uintptr_t tableStart = reinterpret_cast<uintptr_t>(code->raw());

Просмотреть файл

@ -275,7 +275,7 @@ CodeGeneratorX86::visitInterruptCheck(LInterruptCheck *lir)
if (!ool)
return false;
void *interrupt = (void*)&gen->compartment->rt->interrupt;
void *interrupt = (void*)&GetIonContext()->runtime->interrupt;
masm.cmpl(Operand(interrupt), Imm32(0));
masm.j(Assembler::NonZero, ool->entry());
masm.bind(ool->rejoin());

Просмотреть файл

@ -950,8 +950,8 @@ class MacroAssemblerX86 : public MacroAssemblerX86Shared
// Save an exit frame (which must be aligned to the stack pointer) to
// ThreadData::ionTop of the main thread.
void linkExitFrame() {
JSCompartment *compartment = GetIonContext()->compartment;
movl(StackPointer, Operand(&compartment->rt->mainThread.ionTop));
JSRuntime *runtime = GetIonContext()->runtime;
movl(StackPointer, Operand(&runtime->mainThread.ionTop));
}
void callWithExitFrame(IonCode *target, Register dynStack) {

Просмотреть файл

@ -793,7 +793,7 @@ static void
StartRequest(JSContext *cx)
{
JSRuntime *rt = cx->runtime();
rt->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
if (rt->requestDepth) {
rt->requestDepth++;
@ -810,7 +810,8 @@ static void
StopRequest(JSContext *cx)
{
JSRuntime *rt = cx->runtime();
rt->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
JS_ASSERT(rt->requestDepth != 0);
if (rt->requestDepth != 1) {
rt->requestDepth--;
@ -847,7 +848,7 @@ JS_PUBLIC_API(JSBool)
JS_IsInRequest(JSRuntime *rt)
{
#ifdef JS_THREADSAFE
rt->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
return rt->requestDepth != 0;
#else
return false;
@ -3049,7 +3050,7 @@ JS_IsNative(JSObject *obj)
JS_PUBLIC_API(JSRuntime *)
JS_GetObjectRuntime(JSObject *obj)
{
return obj->compartment()->rt;
return obj->compartment()->runtimeFromMainThread();
}
JS_PUBLIC_API(JSBool)
@ -6599,7 +6600,10 @@ JS_SetRuntimeThread(JSRuntime *rt)
extern JS_NEVER_INLINE JS_PUBLIC_API(void)
JS_AbortIfWrongThread(JSRuntime *rt)
{
rt->abortIfWrongThread();
if (!CurrentThreadCanAccessRuntime(rt))
MOZ_CRASH();
if (!js::TlsPerThreadData.get()->associatedWith(rt))
MOZ_CRASH();
}
#ifdef JS_GC_ZEAL

Просмотреть файл

@ -1304,7 +1304,7 @@ JS::AutoCheckRequestDepth::AutoCheckRequestDepth(JSContext *cx)
: cx(cx)
{
JS_ASSERT(cx->runtime()->requestDepth || cx->runtime()->isHeapBusy());
cx->runtime()->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
cx->runtime()->checkRequestDepth++;
}
@ -1313,7 +1313,7 @@ JS::AutoCheckRequestDepth::AutoCheckRequestDepth(ContextFriendFields *cxArg)
{
if (cx) {
JS_ASSERT(cx->runtime()->requestDepth || cx->runtime()->isHeapBusy());
cx->runtime()->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(cx->runtime()));
cx->runtime()->checkRequestDepth++;
}
}

Просмотреть файл

@ -54,14 +54,14 @@ class CompartmentChecker
/* Note: should only be used when neither c1 nor c2 may be the default compartment. */
static void check(JSCompartment *c1, JSCompartment *c2) {
JS_ASSERT(c1 != c1->rt->atomsCompartment);
JS_ASSERT(c2 != c2->rt->atomsCompartment);
JS_ASSERT(c1 != c1->runtimeFromMainThread()->atomsCompartment);
JS_ASSERT(c2 != c2->runtimeFromMainThread()->atomsCompartment);
if (c1 != c2)
fail(c1, c2);
}
void check(JSCompartment *c) {
if (c && c != compartment->rt->atomsCompartment) {
if (c && c != compartment->runtimeFromMainThread()->atomsCompartment) {
if (!compartment)
compartment = c;
else if (c != compartment)

Просмотреть файл

@ -36,9 +36,9 @@ using namespace js::gc;
using mozilla::DebugOnly;
JSCompartment::JSCompartment(Zone *zone, const JS::CompartmentOptions &options = JS::CompartmentOptions())
: zone_(zone),
options_(options),
rt(zone->rt),
: options_(options),
zone_(zone),
runtime_(zone->runtimeFromMainThread()),
principals(NULL),
isSystem(false),
marked(true),
@ -52,12 +52,12 @@ JSCompartment::JSCompartment(Zone *zone, const JS::CompartmentOptions &options =
data(NULL),
objectMetadataCallback(NULL),
lastAnimationTime(0),
regExps(rt),
regExps(runtime_),
propertyTree(thisForCtor()),
gcIncomingGrayPointers(NULL),
gcLiveArrayBuffers(NULL),
gcWeakMapList(NULL),
debugModeBits(rt->debugMode ? DebugFromC : 0),
debugModeBits(runtime_->debugMode ? DebugFromC : 0),
rngState(0),
watchpointMap(NULL),
scriptCountsMap(NULL),
@ -69,7 +69,7 @@ JSCompartment::JSCompartment(Zone *zone, const JS::CompartmentOptions &options =
, ionCompartment_(NULL)
#endif
{
rt->numCompartments++;
runtime_->numCompartments++;
}
JSCompartment::~JSCompartment()
@ -84,7 +84,7 @@ JSCompartment::~JSCompartment()
js_delete(debugScopes);
js_free(enumerators);
rt->numCompartments--;
runtime_->numCompartments--;
}
bool
@ -196,6 +196,8 @@ JSCompartment::putWrapper(const CrossCompartmentKey &wrapped, const js::Value &w
bool
JSCompartment::wrap(JSContext *cx, MutableHandleValue vp, HandleObject existingArg)
{
JSRuntime *rt = runtimeFromMainThread();
JS_ASSERT(cx->compartment() == this);
JS_ASSERT(this != rt->atomsCompartment);
JS_ASSERT_IF(existingArg, existingArg->compartment() == cx->compartment());
@ -521,6 +523,8 @@ JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
/* This function includes itself in PHASE_SWEEP_TABLES. */
sweepCrossCompartmentWrappers();
JSRuntime *rt = runtimeFromMainThread();
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES);
@ -582,6 +586,8 @@ JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
void
JSCompartment::sweepCrossCompartmentWrappers()
{
JSRuntime *rt = runtimeFromMainThread();
gcstats::AutoPhase ap1(rt->gcStats, gcstats::PHASE_SWEEP_TABLES);
gcstats::AutoPhase ap2(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_WRAPPER);
@ -609,7 +615,7 @@ JSCompartment::purge()
bool
JSCompartment::hasScriptsOnStack()
{
for (ActivationIterator iter(rt); !iter.done(); ++iter) {
for (ActivationIterator iter(runtimeFromMainThread()); !iter.done(); ++iter) {
if (iter.activation()->compartment() == this)
return true;
}
@ -729,6 +735,8 @@ JSCompartment::setDebugModeFromC(JSContext *cx, bool b, AutoDebugModeGC &dmgc)
void
JSCompartment::updateForDebugMode(FreeOp *fop, AutoDebugModeGC &dmgc)
{
JSRuntime *rt = runtimeFromMainThread();
for (ContextIter acx(rt); !acx.done(); acx.next()) {
if (acx->compartment() == this)
acx->updateJITEnabled();
@ -789,7 +797,7 @@ JSCompartment::removeDebuggee(FreeOp *fop,
js::GlobalObject *global,
js::GlobalObjectSet::Enum *debuggeesEnum)
{
AutoDebugModeGC dmgc(rt);
AutoDebugModeGC dmgc(fop->runtime());
return removeDebuggee(fop, global, dmgc, debuggeesEnum);
}
@ -828,7 +836,7 @@ JSCompartment::clearBreakpointsIn(FreeOp *fop, js::Debugger *dbg, JSObject *hand
void
JSCompartment::clearTraps(FreeOp *fop)
{
MinorGC(rt, JS::gcreason::EVICT_NURSERY);
MinorGC(fop->runtime(), JS::gcreason::EVICT_NURSERY);
for (gc::CellIter i(zone(), gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->compartment() == this && script->hasAnyBreakpointsOrStepMode())
@ -863,5 +871,5 @@ JSCompartment::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf, size_t *c
void
JSCompartment::adoptWorkerAllocator(Allocator *workerAllocator)
{
zone()->allocator.arenas.adoptArenas(rt, &workerAllocator->arenas);
zone()->allocator.arenas.adoptArenas(runtimeFromMainThread(), &workerAllocator->arenas);
}

Просмотреть файл

@ -122,10 +122,13 @@ class WeakMapBase;
struct JSCompartment
{
JS::Zone *zone_;
JS::CompartmentOptions options_;
JSRuntime *rt;
private:
JS::Zone *zone_;
JSRuntime *runtime_;
public:
JSPrincipals *principals;
bool isSystem;
bool marked;
@ -154,6 +157,17 @@ struct JSCompartment
JS::CompartmentOptions &options() { return options_; }
const JS::CompartmentOptions &options() const { return options_; }
JSRuntime *runtimeFromMainThread() {
JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
return runtime_;
}
// Note: Unrestricted access to the zone's runtime from an arbitrary
// thread can easily lead to races. Use this method very carefully.
JSRuntime *runtimeFromAnyThread() const {
return runtime_;
}
/*
* Nb: global_ might be NULL, if (a) it's the atoms compartment, or (b) the
* compartment's global has been collected. The latter can happen if e.g.

Просмотреть файл

@ -218,12 +218,12 @@ JS_SetCompartmentPrincipals(JSCompartment *compartment, JSPrincipals *principals
// Any compartment with the trusted principals -- and there can be
// multiple -- is a system compartment.
JSPrincipals *trusted = compartment->rt->trustedPrincipals();
JSPrincipals *trusted = compartment->runtimeFromMainThread()->trustedPrincipals();
bool isSystem = principals && principals == trusted;
// Clear out the old principals, if any.
if (compartment->principals) {
JS_DropPrincipals(compartment->rt, compartment->principals);
JS_DropPrincipals(compartment->runtimeFromMainThread(), compartment->principals);
compartment->principals = NULL;
// We'd like to assert that our new principals is always same-origin
// with the old one, but JSPrincipals doesn't give us a way to do that.
@ -337,7 +337,7 @@ js::IsSystemZone(Zone *zone)
JS_FRIEND_API(bool)
js::IsAtomsCompartment(JSCompartment *comp)
{
return comp == comp->rt->atomsCompartment;
return comp == comp->runtimeFromAnyThread()->atomsCompartment;
}
JS_FRIEND_API(bool)
@ -920,7 +920,7 @@ JS::IncrementalObjectBarrier(JSObject *obj)
if (!obj)
return;
JS_ASSERT(!obj->zone()->rt->isHeapMajorCollecting());
JS_ASSERT(!obj->zone()->runtimeFromMainThread()->isHeapMajorCollecting());
AutoMarkInDeadZone amn(obj->zone());
@ -938,7 +938,7 @@ JS::IncrementalReferenceBarrier(void *ptr, JSGCTraceKind kind)
? static_cast<JSObject *>(cell)->zone()
: cell->tenuredZone();
JS_ASSERT(!zone->rt->isHeapMajorCollecting());
JS_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
AutoMarkInDeadZone amn(zone);

Просмотреть файл

@ -259,7 +259,7 @@ ArenaHeader::checkSynchronizedWithFreeList() const
* list in the zone can mutate at any moment. We cannot do any
* checks in this case.
*/
if (IsBackgroundFinalized(getAllocKind()) && zone->rt->gcHelperThread.onBackgroundThread())
if (IsBackgroundFinalized(getAllocKind()) && zone->runtimeFromAnyThread()->gcHelperThread.onBackgroundThread())
return;
FreeSpan firstSpan = FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
@ -669,7 +669,7 @@ Chunk::init(JSRuntime *rt)
static inline Chunk **
GetAvailableChunkList(Zone *zone)
{
JSRuntime *rt = zone->rt;
JSRuntime *rt = zone->runtimeFromAnyThread();
return zone->isSystem
? &rt->gcSystemAvailableChunkListHead
: &rt->gcUserAvailableChunkListHead;
@ -768,7 +768,7 @@ Chunk::allocateArena(Zone *zone, AllocKind thingKind)
{
JS_ASSERT(hasAvailableArenas());
JSRuntime *rt = zone->rt;
JSRuntime *rt = zone->runtimeFromAnyThread();
if (!rt->isHeapMinorCollecting() && rt->gcBytes >= rt->gcMaxBytes)
return NULL;
@ -804,7 +804,7 @@ Chunk::releaseArena(ArenaHeader *aheader)
JS_ASSERT(aheader->allocated());
JS_ASSERT(!aheader->hasDelayedMarking);
Zone *zone = aheader->zone;
JSRuntime *rt = zone->rt;
JSRuntime *rt = zone->runtimeFromAnyThread();
AutoLockGC maybeLock;
if (rt->gcHelperThread.sweeping())
maybeLock.lock(rt);
@ -836,7 +836,7 @@ Chunk::releaseArena(ArenaHeader *aheader)
static Chunk *
PickChunk(Zone *zone)
{
JSRuntime *rt = zone->rt;
JSRuntime *rt = zone->runtimeFromAnyThread();
Chunk **listHeadp = GetAvailableChunkList(zone);
Chunk *chunk = *listHeadp;
if (chunk)
@ -1110,7 +1110,7 @@ typedef RootedValueMap::Enum RootEnum;
static size_t
ComputeTriggerBytes(Zone *zone, size_t lastBytes, size_t maxBytes, JSGCInvocationKind gckind)
{
size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, zone->rt->gcAllocationThreshold);
size_t base = gckind == GC_SHRINK ? lastBytes : Max(lastBytes, zone->runtimeFromMainThread()->gcAllocationThreshold);
float trigger = float(base) * zone->gcHeapGrowthFactor;
return size_t(Min(float(maxBytes), trigger));
}
@ -1126,6 +1126,7 @@ Zone::setGCLastBytes(size_t lastBytes, JSGCInvocationKind gckind)
* lastBytes > highFrequencyHighLimit: 150%
* otherwise: linear interpolation between 150% and 300% based on lastBytes
*/
JSRuntime *rt = runtimeFromMainThread();
if (!rt->gcDynamicHeapGrowth) {
gcHeapGrowthFactor = 3.0;
@ -1161,7 +1162,7 @@ Zone::reduceGCTriggerBytes(size_t amount)
{
JS_ASSERT(amount > 0);
JS_ASSERT(gcTriggerBytes >= amount);
if (gcTriggerBytes - amount < rt->gcAllocationThreshold * gcHeapGrowthFactor)
if (gcTriggerBytes - amount < runtimeFromAnyThread()->gcAllocationThreshold * gcHeapGrowthFactor)
return;
gcTriggerBytes -= amount;
}
@ -1216,7 +1217,7 @@ ArenaLists::allocateFromArenaInline(Zone *zone, AllocKind thingKind)
* background finalization runs and can modify head or cursor at any
* moment. So we always allocate a new arena in that case.
*/
maybeLock.lock(zone->rt);
maybeLock.lock(zone->runtimeFromMainThread());
if (*bfs == BFS_RUN) {
JS_ASSERT(!*al->cursor);
chunk = PickChunk(zone);
@ -1256,9 +1257,9 @@ ArenaLists::allocateFromArenaInline(Zone *zone, AllocKind thingKind)
if (JS_UNLIKELY(zone->wasGCStarted())) {
if (zone->needsBarrier()) {
aheader->allocatedDuringIncremental = true;
zone->rt->gcMarker.delayMarkingArena(aheader);
zone->runtimeFromMainThread()->gcMarker.delayMarkingArena(aheader);
} else if (zone->isGCSweeping()) {
PushArenaAllocatedDuringSweep(zone->rt, aheader);
PushArenaAllocatedDuringSweep(zone->runtimeFromMainThread(), aheader);
}
}
return freeLists[thingKind].infallibleAllocate(Arena::thingSize(thingKind));
@ -1266,7 +1267,7 @@ ArenaLists::allocateFromArenaInline(Zone *zone, AllocKind thingKind)
/* Make sure we hold the GC lock before we call PickChunk. */
if (!maybeLock.locked())
maybeLock.lock(zone->rt);
maybeLock.lock(zone->runtimeFromAnyThread());
chunk = PickChunk(zone);
if (!chunk)
return NULL;
@ -1289,9 +1290,9 @@ ArenaLists::allocateFromArenaInline(Zone *zone, AllocKind thingKind)
if (JS_UNLIKELY(zone->wasGCStarted())) {
if (zone->needsBarrier()) {
aheader->allocatedDuringIncremental = true;
zone->rt->gcMarker.delayMarkingArena(aheader);
zone->runtimeFromMainThread()->gcMarker.delayMarkingArena(aheader);
} else if (zone->isGCSweeping()) {
PushArenaAllocatedDuringSweep(zone->rt, aheader);
PushArenaAllocatedDuringSweep(zone->runtimeFromMainThread(), aheader);
}
}
aheader->next = al->head;
@ -1506,14 +1507,13 @@ template <AllowGC allowGC>
ArenaLists::refillFreeList(ThreadSafeContext *cx, AllocKind thingKind)
{
JS_ASSERT(cx->allocator()->arenas.freeLists[thingKind].isEmpty());
JS_ASSERT(!cx->isHeapBusy());
Zone *zone = cx->allocator()->zone_;
JSRuntime *rt = zone->rt;
JS_ASSERT(!rt->isHeapBusy());
bool runGC = rt->gcIncrementalState != NO_INCREMENTAL &&
zone->gcBytes > zone->gcTriggerBytes &&
cx->allowGC() && allowGC;
bool runGC = cx->allowGC() && allowGC &&
cx->asJSContext()->runtime()->gcIncrementalState != NO_INCREMENTAL &&
zone->gcBytes > zone->gcTriggerBytes;
for (;;) {
if (JS_UNLIKELY(runGC)) {
@ -1534,15 +1534,15 @@ ArenaLists::refillFreeList(ThreadSafeContext *cx, AllocKind thingKind)
*/
for (bool secondAttempt = false; ; secondAttempt = true) {
void *thing = cx->allocator()->arenas.allocateFromArenaInline(zone, thingKind);
if (JS_LIKELY(!!thing) || cx->isForkJoinSlice())
if (JS_LIKELY(!!thing) || !cx->isJSContext())
return thing;
if (secondAttempt)
break;
rt->gcHelperThread.waitBackgroundSweepEnd();
cx->asJSContext()->runtime()->gcHelperThread.waitBackgroundSweepEnd();
}
if (!allowGC)
if (!cx->allowGC() || !allowGC)
return NULL;
/*
@ -1929,7 +1929,7 @@ js::TriggerGC(JSRuntime *rt, JS::gcreason::Reason reason)
return;
}
rt->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
if (rt->isHeapBusy())
return;
@ -1950,8 +1950,7 @@ js::TriggerZoneGC(Zone *zone, JS::gcreason::Reason reason)
return;
}
JSRuntime *rt = zone->rt;
rt->assertValidThread();
JSRuntime *rt = zone->runtimeFromMainThread();
if (rt->isHeapBusy())
return;
@ -1975,7 +1974,7 @@ void
js::MaybeGC(JSContext *cx)
{
JSRuntime *rt = cx->runtime();
rt->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(rt));
if (rt->gcZeal() == ZealAllocValue || rt->gcZeal() == ZealPokeValue) {
JS::PrepareForFullGC(rt);
@ -2516,7 +2515,7 @@ ReleaseObservedTypes(JSRuntime *rt)
static void
SweepCompartments(FreeOp *fop, Zone *zone, bool keepAtleastOne, bool lastGC)
{
JSRuntime *rt = zone->rt;
JSRuntime *rt = zone->runtimeFromMainThread();
JSDestroyCompartmentCallback callback = rt->destroyCompartmentCallback;
JSCompartment **read = zone->compartments.begin();
@ -2603,10 +2602,11 @@ PurgeRuntime(JSRuntime *rt)
static bool
ShouldPreserveJITCode(JSCompartment *comp, int64_t currentTime)
{
if (comp->rt->gcShouldCleanUpEverything || !comp->zone()->types.inferenceEnabled)
JSRuntime *rt = comp->runtimeFromMainThread();
if (rt->gcShouldCleanUpEverything || !comp->zone()->types.inferenceEnabled)
return false;
if (comp->rt->alwaysPreserveCode)
if (rt->alwaysPreserveCode)
return true;
if (comp->lastAnimationTime + PRMJ_USEC_PER_SEC >= currentTime &&
comp->lastCodeRelease + (PRMJ_USEC_PER_SEC * 300) >= currentTime)
@ -3300,6 +3300,7 @@ Zone::findOutgoingEdges(ComponentFinder<JS::Zone> &finder)
* Any compartment may have a pointer to an atom in the atoms
* compartment, and these aren't in the cross compartment map.
*/
JSRuntime *rt = runtimeFromMainThread();
if (rt->atomsCompartment->zone()->isGCMarking())
finder.addEdgeTo(rt->atomsCompartment->zone());
@ -5071,7 +5072,7 @@ AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSContext *cx)
}
AutoMaybeTouchDeadZones::AutoMaybeTouchDeadZones(JSObject *obj)
: runtime(obj->compartment()->rt),
: runtime(obj->compartment()->runtimeFromMainThread()),
markCount(runtime->gcObjectsMarkedInDeadZones),
inIncremental(JS::IsIncrementalGCInProgress(runtime)),
manipulatingDeadZones(runtime->gcManipulatingDeadZones)
@ -5096,7 +5097,7 @@ AutoSuppressGC::AutoSuppressGC(JSContext *cx)
}
AutoSuppressGC::AutoSuppressGC(JSCompartment *comp)
: suppressGC_(comp->rt->mainThread.suppressGC)
: suppressGC_(comp->runtimeFromMainThread()->mainThread.suppressGC)
{
suppressGC_++;
}

Просмотреть файл

@ -32,8 +32,9 @@ struct AutoMarkInDeadZone
: zone(zone),
scheduled(zone->scheduledForDestruction)
{
if (zone->rt->gcManipulatingDeadZones && zone->scheduledForDestruction) {
zone->rt->gcObjectsMarkedInDeadZones++;
JSRuntime *rt = zone->runtimeFromMainThread();
if (rt->gcManipulatingDeadZones && zone->scheduledForDestruction) {
rt->gcObjectsMarkedInDeadZones++;
zone->scheduledForDestruction = false;
}
}
@ -90,7 +91,7 @@ GetGCThingTraceKind(const void *thing)
JS_ASSERT(thing);
const Cell *cell = static_cast<const Cell *>(thing);
#ifdef JSGC_GENERATIONAL
if (IsInsideNursery(cell->runtime(), cell))
if (IsInsideNursery(cell->runtimeFromMainThread(), cell))
return JSTRACE_OBJECT;
#endif
return MapAllocToTraceKind(cell->tenuredGetAllocKind());
@ -237,12 +238,12 @@ class CellIterUnderGC : public CellIterImpl
{
public:
CellIterUnderGC(JS::Zone *zone, AllocKind kind) {
JS_ASSERT(zone->rt->isHeapBusy());
JS_ASSERT(zone->runtimeFromAnyThread()->isHeapBusy());
init(zone, kind);
}
CellIterUnderGC(ArenaHeader *aheader) {
JS_ASSERT(aheader->zone->rt->isHeapBusy());
JS_ASSERT(aheader->zone->runtimeFromAnyThread()->isHeapBusy());
init(aheader);
}
};
@ -268,16 +269,16 @@ class CellIter : public CellIterImpl
if (IsBackgroundFinalized(kind) &&
zone->allocator.arenas.needBackgroundFinalizeWait(kind))
{
gc::FinishBackgroundFinalize(zone->rt);
gc::FinishBackgroundFinalize(zone->runtimeFromMainThread());
}
if (lists->isSynchronizedFreeList(kind)) {
lists = NULL;
} else {
JS_ASSERT(!zone->rt->isHeapBusy());
JS_ASSERT(!zone->runtimeFromMainThread()->isHeapBusy());
lists->copyFreeListToArena(kind);
}
#ifdef DEBUG
counter = &zone->rt->noGCOrAllocationCheck;
counter = &zone->runtimeFromAnyThread()->noGCOrAllocationCheck;
++*counter;
#endif
init(zone, kind);

Просмотреть файл

@ -6545,7 +6545,8 @@ TypeCompartment::sweepCompilerOutputs(FreeOp *fop, bool discardConstraints)
void
JSCompartment::sweepNewTypeObjectTable(TypeObjectSet &table)
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats,
gcstats::PHASE_SWEEP_TABLES_TYPE_OBJECT);
JS_ASSERT(zone()->isGCSweeping());
if (table.initialized()) {
@ -6856,7 +6857,7 @@ TypeZone::sweep(FreeOp *fop, bool releaseTypes)
{
JS_ASSERT(zone()->isGCSweeping());
JSRuntime *rt = zone()->rt;
JSRuntime *rt = fop->runtime();
/*
* Clear the analysis pool, but don't release its data yet. While

Просмотреть файл

@ -1570,7 +1570,7 @@ inline void
TypeObject::writeBarrierPre(TypeObject *type)
{
#ifdef JSGC_INCREMENTAL
if (!type || !type->runtime()->needsBarrier())
if (!type || !type->runtimeFromAnyThread()->needsBarrier())
return;
JS::Zone *zone = type->zone();
@ -1599,7 +1599,7 @@ inline void
TypeNewScript::writeBarrierPre(TypeNewScript *newScript)
{
#ifdef JSGC_INCREMENTAL
if (!newScript || !newScript->fun->runtime()->needsBarrier())
if (!newScript || !newScript->fun->runtimeFromAnyThread()->needsBarrier())
return;
JS::Zone *zone = newScript->fun->zone();

Просмотреть файл

@ -829,10 +829,8 @@ PropertyIteratorObject::trace(JSTracer *trc, JSObject *obj)
void
PropertyIteratorObject::finalize(FreeOp *fop, JSObject *obj)
{
if (NativeIterator *ni = obj->as<PropertyIteratorObject>().getNativeIterator()) {
obj->as<PropertyIteratorObject>().setNativeIterator(NULL);
if (NativeIterator *ni = obj->as<PropertyIteratorObject>().getNativeIterator())
fop->free_(ni);
}
}
Class PropertyIteratorObject::class_ = {

Просмотреть файл

@ -231,11 +231,11 @@ StatsCellCallback(JSRuntime *rt, void *data, void *thing, JSGCTraceKind traceKin
cStats->shapesExtraDictTables += propTableSize;
JS_ASSERT(kidsSize == 0);
} else {
if (shape->base()->getObjectParent() == shape->compartment()->maybeGlobal()) {
JSObject *parent = shape->base()->getObjectParent();
if (parent && parent->is<GlobalObject>())
cStats->gcHeapShapesTreeGlobalParented += thingSize;
} else {
else
cStats->gcHeapShapesTreeNonGlobalParented += thingSize;
}
cStats->shapesExtraTreeTables += propTableSize;
cStats->shapesExtraTreeShapeKids += kidsSize;
}

Просмотреть файл

@ -106,7 +106,7 @@ JSObject::finalize(js::FreeOp *fop)
JS_ASSERT(isTenured());
if (!IsBackgroundFinalized(tenuredGetAllocKind())) {
/* Assert we're on the main thread. */
fop->runtime()->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(fop->runtime()));
}
#endif
js::Class *clasp = getClass();
@ -278,7 +278,7 @@ inline void
JSObject::initDenseElements(uint32_t dstStart, const js::Value *src, uint32_t count)
{
JS_ASSERT(dstStart + count <= getDenseCapacity());
JSRuntime *rt = runtime();
JSRuntime *rt = runtimeFromMainThread();
for (uint32_t i = 0; i < count; ++i)
elements[dstStart + i].init(rt, this, js::HeapSlot::Element, dstStart + i, src[i]);
}
@ -316,7 +316,7 @@ JSObject::moveDenseElements(uint32_t dstStart, uint32_t srcStart, uint32_t count
}
} else {
memmove(elements + dstStart, elements + srcStart, count * sizeof(js::HeapSlot));
DenseRangeWriteBarrierPost(runtime(), this, dstStart, count);
DenseRangeWriteBarrierPost(runtimeFromMainThread(), this, dstStart, count);
}
}
@ -352,7 +352,7 @@ JSObject::ensureDenseInitializedLength(js::ExclusiveContext *cx, uint32_t index,
markDenseElementsNotPacked(cx);
if (initlen < index + extra) {
JSRuntime *rt = runtime();
JSRuntime *rt = runtimeFromAnyThread();
size_t offset = initlen;
for (js::HeapSlot *sp = elements + initlen;
sp != elements + (index + extra);

Просмотреть файл

@ -111,12 +111,12 @@ inline void
JSScript::writeBarrierPre(JSScript *script)
{
#ifdef JSGC_INCREMENTAL
if (!script || !script->runtime()->needsBarrier())
if (!script || !script->runtimeFromAnyThread()->needsBarrier())
return;
JS::Zone *zone = script->zone();
if (zone->needsBarrier()) {
JS_ASSERT(!zone->rt->isHeapMajorCollecting());
JS_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
JSScript *tmp = script;
MarkScriptUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
JS_ASSERT(tmp == script);
@ -128,12 +128,12 @@ JSScript::writeBarrierPre(JSScript *script)
js::LazyScript::writeBarrierPre(js::LazyScript *lazy)
{
#ifdef JSGC_INCREMENTAL
if (!lazy)
if (!lazy || !lazy->runtimeFromAnyThread()->needsBarrier())
return;
JS::Zone *zone = lazy->zone();
if (zone->needsBarrier()) {
JS_ASSERT(!zone->rt->isHeapMajorCollecting());
JS_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
js::LazyScript *tmp = lazy;
MarkLazyScriptUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
JS_ASSERT(tmp == lazy);

Просмотреть файл

@ -99,8 +99,8 @@ static void
FinishOffThreadIonCompile(ion::IonBuilder *builder)
{
JSCompartment *compartment = builder->script()->compartment();
JS_ASSERT(compartment->rt->workerThreadState);
JS_ASSERT(compartment->rt->workerThreadState->isLocked());
JS_ASSERT(compartment->runtimeFromAnyThread()->workerThreadState);
JS_ASSERT(compartment->runtimeFromAnyThread()->workerThreadState->isLocked());
compartment->ionCompartment()->finishedOffThreadCompilations().append(builder);
}
@ -116,16 +116,18 @@ CompiledScriptMatches(JSCompartment *compartment, JSScript *script, JSScript *ta
void
js::CancelOffThreadIonCompile(JSCompartment *compartment, JSScript *script)
{
if (!compartment->rt->workerThreadState)
JSRuntime *rt = compartment->runtimeFromMainThread();
if (!rt->workerThreadState)
return;
WorkerThreadState &state = *compartment->rt->workerThreadState;
WorkerThreadState &state = *rt->workerThreadState;
ion::IonCompartment *ion = compartment->ionCompartment();
if (!ion)
return;
AutoLockWorkerThreadState lock(compartment->rt);
AutoLockWorkerThreadState lock(rt);
/* Cancel any pending entries for which processing hasn't started. */
for (size_t i = 0; i < state.ionWorklist.length(); i++) {
@ -475,7 +477,7 @@ WorkerThread::handleAsmJSWorkload(WorkerThreadState &state)
state.unlock();
do {
ion::IonContext icx(asmData->mir->compartment, &asmData->mir->temp());
ion::IonContext icx(runtime, asmData->mir->compartment, &asmData->mir->temp());
int64_t before = PRMJ_Now();
@ -523,7 +525,7 @@ WorkerThread::handleIonWorkload(WorkerThreadState &state)
state.unlock();
{
ion::IonContext ictx(ionBuilder->script()->compartment(), &ionBuilder->temp());
ion::IonContext ictx(runtime, ionBuilder->script()->compartment(), &ionBuilder->temp());
ionBuilder->setBackgroundCodegen(ion::CompileBackEnd(ionBuilder));
}
state.lock();
@ -612,7 +614,7 @@ AutoPauseWorkersForGC::AutoPauseWorkersForGC(JSRuntime *rt MOZ_GUARD_OBJECT_NOTI
if (!runtime->workerThreadState)
return;
runtime->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(runtime));
WorkerThreadState &state = *runtime->workerThreadState;
if (!state.numThreads)

Просмотреть файл

@ -166,7 +166,7 @@ bool CrossCompartmentWrapper::finalizeInBackground(Value priv)
* Make the 'background-finalized-ness' of the wrapper the same as the
* wrapped object, to allow transplanting between them.
*/
if (IsInsideNursery(priv.toObject().runtime(), &priv.toObject()))
if (IsInsideNursery(priv.toObject().runtimeFromMainThread(), &priv.toObject()))
return false;
return IsBackgroundFinalized(priv.toObject().tenuredGetAllocKind());
}

Просмотреть файл

@ -378,7 +378,7 @@ Debugger::~Debugger()
JS_ASSERT(debuggees.empty());
/* This always happens in the GC thread, so no locking is required. */
JS_ASSERT(object->compartment()->rt->isHeapBusy());
JS_ASSERT(object->runtimeFromMainThread()->isHeapBusy());
/*
* Since the inactive state for this link is a singleton cycle, it's always
@ -1643,7 +1643,10 @@ Debugger::findCompartmentEdges(Zone *zone, js::gc::ComponentFinder<Zone> &finder
* This ensure that debuggers and their debuggees are finalized in the same
* group.
*/
for (Debugger *dbg = zone->rt->debuggerList.getFirst(); dbg; dbg = dbg->getNext()) {
for (Debugger *dbg = zone->runtimeFromMainThread()->debuggerList.getFirst();
dbg;
dbg = dbg->getNext())
{
Zone *w = dbg->object->zone();
if (w == zone || !w->isGCMarking())
continue;
@ -2214,7 +2217,7 @@ Debugger::removeDebuggeeGlobal(FreeOp *fop, GlobalObject *global,
GlobalObjectSet::Enum *compartmentEnum,
GlobalObjectSet::Enum *debugEnum)
{
AutoDebugModeGC dmgc(global->compartment()->rt);
AutoDebugModeGC dmgc(fop->runtime());
return removeDebuggeeGlobal(fop, global, dmgc, compartmentEnum, debugEnum);
}

Просмотреть файл

@ -117,9 +117,9 @@ ParallelBailoutRecord::addTrace(JSScript *script,
}
bool
js::InSequentialOrExclusiveParallelSection()
js::InExclusiveParallelSection()
{
return true;
return false;
}
bool
@ -1453,7 +1453,7 @@ ForkJoinShared::executePortion(PerThreadData *perThread,
// Make a new IonContext for the slice, which is needed if we need to
// re-enter the VM.
IonContext icx(cx_->compartment(), NULL);
IonContext icx(cx_->runtime(), cx_->compartment(), NULL);
JS_ASSERT(slice.bailoutRecord->topScript == NULL);
@ -2141,9 +2141,9 @@ parallel::SpewBailoutIR(uint32_t bblockId, uint32_t lirId,
#endif // DEBUG
bool
js::InSequentialOrExclusiveParallelSection()
js::InExclusiveParallelSection()
{
return !InParallelSection() || ForkJoinSlice::Current()->hasAcquiredContext();
return InParallelSection() && ForkJoinSlice::Current()->hasAcquiredContext();
}
bool

Просмотреть файл

@ -410,7 +410,7 @@ InParallelSection()
#endif
}
bool InSequentialOrExclusiveParallelSection();
bool InExclusiveParallelSection();
bool ParallelTestsShouldPass(JSContext *cx);

Просмотреть файл

@ -161,7 +161,7 @@ js::ObjectImpl::inDictionaryMode() const
JS_ALWAYS_INLINE JS::Zone *
js::ObjectImpl::zone() const
{
JS_ASSERT(InSequentialOrExclusiveParallelSection());
JS_ASSERT(CurrentThreadCanAccessZone(shape_->zone()));
return shape_->zone();
}
@ -171,7 +171,7 @@ js::ObjectImpl::readBarrier(ObjectImpl *obj)
#ifdef JSGC_INCREMENTAL
Zone *zone = obj->zone();
if (zone->needsBarrier()) {
MOZ_ASSERT(!zone->rt->isHeapMajorCollecting());
MOZ_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
JSObject *tmp = obj->asObjectPtr();
MarkObjectUnbarriered(zone->barrierTracer(), &tmp, "read barrier");
MOZ_ASSERT(tmp == obj->asObjectPtr());
@ -195,7 +195,7 @@ inline void
js::ObjectImpl::privateWriteBarrierPost(void **pprivate)
{
#ifdef JSGC_GENERATIONAL
runtime()->gcStoreBuffer.putCell(reinterpret_cast<js::gc::Cell **>(pprivate));
runtimeFromAnyThread()->gcStoreBuffer.putCell(reinterpret_cast<js::gc::Cell **>(pprivate));
#endif
}
@ -207,12 +207,12 @@ js::ObjectImpl::writeBarrierPre(ObjectImpl *obj)
* This would normally be a null test, but TypeScript::global uses 0x1 as a
* special value.
*/
if (IsNullTaggedPointer(obj) || !obj->runtime()->needsBarrier())
if (IsNullTaggedPointer(obj) || !obj->runtimeFromMainThread()->needsBarrier())
return;
Zone *zone = obj->zone();
if (zone->needsBarrier()) {
MOZ_ASSERT(!zone->rt->isHeapMajorCollecting());
MOZ_ASSERT(!zone->runtimeFromMainThread()->isHeapMajorCollecting());
JSObject *tmp = obj->asObjectPtr();
MarkObjectUnbarriered(zone->barrierTracer(), &tmp, "write barrier");
MOZ_ASSERT(tmp == obj->asObjectPtr());
@ -226,7 +226,7 @@ js::ObjectImpl::writeBarrierPost(ObjectImpl *obj, void *addr)
#ifdef JSGC_GENERATIONAL
if (IsNullTaggedPointer(obj))
return;
obj->runtime()->gcStoreBuffer.putCell((Cell **)addr);
obj->runtimeFromAnyThread()->gcStoreBuffer.putCell((Cell **)addr);
#endif
}
@ -234,7 +234,7 @@ js::ObjectImpl::writeBarrierPost(ObjectImpl *obj, void *addr)
js::ObjectImpl::writeBarrierPostRelocate(ObjectImpl *obj, void *addr)
{
#ifdef JSGC_GENERATIONAL
obj->runtime()->gcStoreBuffer.putRelocatableCell((Cell **)addr);
obj->runtimeFromAnyThread()->gcStoreBuffer.putRelocatableCell((Cell **)addr);
#endif
}
@ -242,7 +242,7 @@ js::ObjectImpl::writeBarrierPostRelocate(ObjectImpl *obj, void *addr)
js::ObjectImpl::writeBarrierPostRemove(ObjectImpl *obj, void *addr)
{
#ifdef JSGC_GENERATIONAL
obj->runtime()->gcStoreBuffer.removeRelocatableCell((Cell **)addr);
obj->runtimeFromAnyThread()->gcStoreBuffer.removeRelocatableCell((Cell **)addr);
#endif
}

Просмотреть файл

@ -264,7 +264,7 @@ js::ObjectImpl::initializeSlotRange(uint32_t start, uint32_t length)
HeapSlot *fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
getSlotRangeUnchecked(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
JSRuntime *rt = runtime();
JSRuntime *rt = runtimeFromAnyThread();
uint32_t offset = start;
for (HeapSlot *sp = fixedStart; sp < fixedEnd; sp++)
sp->init(rt, this->asObjectPtr(), HeapSlot::Slot, offset++, UndefinedValue());
@ -275,7 +275,7 @@ js::ObjectImpl::initializeSlotRange(uint32_t start, uint32_t length)
void
js::ObjectImpl::initSlotRange(uint32_t start, const Value *vector, uint32_t length)
{
JSRuntime *rt = runtime();
JSRuntime *rt = runtimeFromAnyThread();
HeapSlot *fixedStart, *fixedEnd, *slotsStart, *slotsEnd;
getSlotRange(start, length, &fixedStart, &fixedEnd, &slotsStart, &slotsEnd);
for (HeapSlot *sp = fixedStart; sp < fixedEnd; sp++)

Просмотреть файл

@ -85,14 +85,14 @@ PerThreadData::addToThreadList()
{
// PerThreadData which are created/destroyed off the main thread do not
// show up in the runtime's thread list.
runtime_->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
runtime_->threadList.insertBack(this);
}
void
PerThreadData::removeFromThreadList()
{
runtime_->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
removeFrom(runtime_->threadList);
}
@ -393,8 +393,6 @@ JSRuntime::~JSRuntime()
# endif
sourceCompressorThread.finish();
clearOwnerThread();
JS_ASSERT(!operationCallbackOwner);
if (operationCallbackLock)
PR_DestroyLock(operationCallbackLock);
@ -454,6 +452,10 @@ JSRuntime::~JSRuntime()
DebugOnly<size_t> oldCount = liveRuntimesCount--;
JS_ASSERT(oldCount > 0);
#ifdef JS_THREADSAFE
clearOwnerThread();
#endif
}
#ifdef JS_THREADSAFE
@ -476,7 +478,7 @@ JSRuntime::setOwnerThread()
void
JSRuntime::clearOwnerThread()
{
assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(this));
JS_ASSERT(requestDepth == 0);
ownerThread_ = (void *)0xc1ea12; /* "clear" */
js::TlsPerThreadData.set(NULL);
@ -490,25 +492,7 @@ JSRuntime::clearOwnerThread()
asmJSMachExceptionHandler.clearCurrentThread();
#endif
}
JS_FRIEND_API(void)
JSRuntime::abortIfWrongThread() const
{
if (ownerThread_ != PR_GetCurrentThread())
MOZ_CRASH();
if (!js::TlsPerThreadData.get()->associatedWith(this))
MOZ_CRASH();
}
#ifdef DEBUG
JS_FRIEND_API(void)
JSRuntime::assertValidThread() const
{
JS_ASSERT(ownerThread_ == PR_GetCurrentThread());
JS_ASSERT(js::TlsPerThreadData.get()->associatedWith(this));
}
#endif /* DEBUG */
#endif /* JS_THREADSAFE */
#endif /* JS_THREADSAFE */
void
NewObjectCache::clearNurseryObjects(JSRuntime *rt)
@ -748,3 +732,36 @@ JSRuntime::onOutOfMemory(void *p, size_t nbytes, JSContext *cx)
return NULL;
}
#ifdef JS_THREADSAFE
bool
js::CurrentThreadCanAccessRuntime(JSRuntime *rt)
{
PerThreadData *pt = js::TlsPerThreadData.get();
JS_ASSERT(pt && pt->associatedWith(rt));
return rt->ownerThread_ == PR_GetCurrentThread() || InExclusiveParallelSection();
}
bool
js::CurrentThreadCanAccessZone(Zone *zone)
{
PerThreadData *pt = js::TlsPerThreadData.get();
JS_ASSERT(pt && pt->associatedWith(zone->runtime_));
return !InParallelSection() || InExclusiveParallelSection();
}
#else
bool
js::CurrentThreadCanAccessRuntime(JSRuntime *rt)
{
return true;
}
bool
js::CurrentThreadCanAccessZone(Zone *zone)
{
return true;
}
#endif

Просмотреть файл

@ -800,19 +800,10 @@ struct JSRuntime : public JS::shadow::Runtime,
void *ownerThread() const { return ownerThread_; }
void clearOwnerThread();
void setOwnerThread();
JS_FRIEND_API(void) abortIfWrongThread() const;
#ifdef DEBUG
JS_FRIEND_API(void) assertValidThread() const;
#else
void assertValidThread() const {}
#endif
private:
void *ownerThread_;
void *ownerThread_;
friend bool js::CurrentThreadCanAccessRuntime(JSRuntime *rt);
public:
#else
public:
void abortIfWrongThread() const {}
void assertValidThread() const {}
#endif
/* Temporary arena pool used while compiling and decompiling. */
@ -1696,7 +1687,7 @@ PerThreadData::setIonStackLimit(uintptr_t limit)
inline JSRuntime *
PerThreadData::runtimeFromMainThread()
{
runtime_->assertValidThread();
JS_ASSERT(js::CurrentThreadCanAccessRuntime(runtime_));
return runtime_;
}

Просмотреть файл

@ -1690,7 +1690,7 @@ DebugScopes::ensureCompartmentData(JSContext *cx)
if (c->debugScopes)
return c->debugScopes;
c->debugScopes = c->rt->new_<DebugScopes>(cx);
c->debugScopes = cx->runtime()->new_<DebugScopes>(cx);
if (c->debugScopes && c->debugScopes->init())
return c->debugScopes;
@ -2026,7 +2026,7 @@ DebugScopes::hasLiveFrame(ScopeObject &scope)
* 4. GC completes, live objects may now point to values that weren't
* marked and thus may point to swept GC things
*/
if (JSGenerator *gen = frame.maybeSuspendedGenerator(scope.compartment()->rt))
if (JSGenerator *gen = frame.maybeSuspendedGenerator(scope.compartment()->runtimeFromMainThread()))
JSObject::readBarrier(gen->obj);
return frame;

Просмотреть файл

@ -74,11 +74,11 @@ BaseShape::BaseShape(JSCompartment *comp, Class *clasp, JSObject *parent, JSObje
this->rawSetter = rawSetter;
if ((attrs & JSPROP_GETTER) && rawGetter) {
this->flags |= HAS_GETTER_OBJECT;
GetterSetterWriteBarrierPost(runtime(), &this->getterObj);
GetterSetterWriteBarrierPost(runtimeFromMainThread(), &this->getterObj);
}
if ((attrs & JSPROP_SETTER) && rawSetter) {
this->flags |= HAS_SETTER_OBJECT;
GetterSetterWriteBarrierPost(runtime(), &this->setterObj);
GetterSetterWriteBarrierPost(runtimeFromMainThread(), &this->setterObj);
}
this->compartment_ = comp;
}
@ -94,9 +94,9 @@ BaseShape::BaseShape(const StackBaseShape &base)
this->rawGetter = base.rawGetter;
this->rawSetter = base.rawSetter;
if ((base.flags & HAS_GETTER_OBJECT) && base.rawGetter)
GetterSetterWriteBarrierPost(runtime(), &this->getterObj);
GetterSetterWriteBarrierPost(runtimeFromMainThread(), &this->getterObj);
if ((base.flags & HAS_SETTER_OBJECT) && base.rawSetter)
GetterSetterWriteBarrierPost(runtime(), &this->setterObj);
GetterSetterWriteBarrierPost(runtimeFromMainThread(), &this->setterObj);
this->compartment_ = base.compartment;
}
@ -110,18 +110,18 @@ BaseShape::operator=(const BaseShape &other)
slotSpan_ = other.slotSpan_;
if (flags & HAS_GETTER_OBJECT) {
getterObj = other.getterObj;
GetterSetterWriteBarrierPost(runtime(), &getterObj);
GetterSetterWriteBarrierPost(runtimeFromMainThread(), &getterObj);
} else {
if (rawGetter)
GetterSetterWriteBarrierPostRemove(runtime(), &getterObj);
GetterSetterWriteBarrierPostRemove(runtimeFromMainThread(), &getterObj);
rawGetter = other.rawGetter;
}
if (flags & HAS_SETTER_OBJECT) {
setterObj = other.setterObj;
GetterSetterWriteBarrierPost(runtime(), &setterObj);
GetterSetterWriteBarrierPost(runtimeFromMainThread(), &setterObj);
} else {
if (rawSetter)
GetterSetterWriteBarrierPostRemove(runtime(), &setterObj);
GetterSetterWriteBarrierPostRemove(runtimeFromMainThread(), &setterObj);
rawSetter = other.rawSetter;
}
compartment_ = other.compartment_;
@ -372,7 +372,7 @@ inline void
Shape::writeBarrierPre(Shape *shape)
{
#ifdef JSGC_INCREMENTAL
if (!shape || !shape->runtime()->needsBarrier())
if (!shape || !shape->runtimeFromAnyThread()->needsBarrier())
return;
JS::Zone *zone = shape->zone();
@ -410,7 +410,7 @@ inline void
BaseShape::writeBarrierPre(BaseShape *base)
{
#ifdef JSGC_INCREMENTAL
if (!base || !base->runtime()->needsBarrier())
if (!base || !base->runtimeFromAnyThread()->needsBarrier())
return;
JS::Zone *zone = base->zone();

Просмотреть файл

@ -1269,7 +1269,8 @@ BaseShape::getUnowned(ExclusiveContext *cx, const StackBaseShape &base)
void
JSCompartment::sweepBaseShapeTable()
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_BASE_SHAPE);
gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats,
gcstats::PHASE_SWEEP_TABLES_BASE_SHAPE);
if (baseShapes.initialized()) {
for (BaseShapeSet::Enum e(baseShapes); !e.empty(); e.popFront()) {
@ -1466,7 +1467,8 @@ JSCompartment::markAllInitialShapeTableEntries(JSTracer *trc)
void
JSCompartment::sweepInitialShapeTable()
{
gcstats::AutoPhase ap(rt->gcStats, gcstats::PHASE_SWEEP_TABLES_INITIAL_SHAPE);
gcstats::AutoPhase ap(runtimeFromMainThread()->gcStats,
gcstats::PHASE_SWEEP_TABLES_INITIAL_SHAPE);
if (initialShapes.initialized()) {
for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {

Просмотреть файл

@ -105,7 +105,7 @@ inline void
JSString::writeBarrierPre(JSString *str)
{
#ifdef JSGC_INCREMENTAL
if (!str || !str->runtime()->needsBarrier())
if (!str || !str->runtimeFromAnyThread()->needsBarrier())
return;
JS::Zone *zone = str->zone();

Просмотреть файл

@ -287,8 +287,7 @@ bool
ThreadPool::submitOne(JSContext *cx, TaskExecutor *executor)
{
JS_ASSERT(numWorkers() > 0);
runtime_->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
if (!lazyStartWorkers(cx))
return false;
@ -301,7 +300,7 @@ ThreadPool::submitOne(JSContext *cx, TaskExecutor *executor)
bool
ThreadPool::submitAll(JSContext *cx, TaskExecutor *executor)
{
runtime_->assertValidThread();
JS_ASSERT(CurrentThreadCanAccessRuntime(runtime_));
if (!lazyStartWorkers(cx))
return false;

Просмотреть файл

@ -284,7 +284,7 @@ PostBarrierTypedArrayObject(JSObject *obj)
{
#ifdef JSGC_GENERATIONAL
JS_ASSERT(obj);
JSRuntime *rt = obj->runtime();
JSRuntime *rt = obj->runtimeFromMainThread();
if (!rt->isHeapBusy() && !IsInsideNursery(rt, obj))
rt->gcStoreBuffer.putWholeCell(obj);
#endif
@ -325,7 +325,7 @@ InitViewList(ArrayBufferObject *obj, ArrayBufferViewObject *viewsHead)
static EncapsulatedPtr<ArrayBufferViewObject> &
GetViewListRef(ArrayBufferObject *obj)
{
JS_ASSERT(obj->runtime()->isHeapBusy());
JS_ASSERT(obj->runtimeFromMainThread()->isHeapBusy());
return reinterpret_cast<OldObjectRepresentationHack*>(obj->getElementsHeader())->views;
}