зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1283855
part 28 - Make more GC APIs take JSContext instead of JSRuntime. r=terrence
--HG-- extra : rebase_source : c2d35b5d45cad074b9a9a144bc25ea4a32b8b246
This commit is contained in:
Родитель
7ec38687c8
Коммит
900af968f3
|
@ -3075,7 +3075,7 @@ nsDOMWindowUtils::FlushPendingFileDeletions()
|
|||
NS_IMETHODIMP
|
||||
nsDOMWindowUtils::IsIncrementalGCEnabled(JSContext* cx, bool* aResult)
|
||||
{
|
||||
*aResult = JS::IsIncrementalGCEnabled(JS_GetRuntime(cx));
|
||||
*aResult = JS::IsIncrementalGCEnabled(cx);
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
|
|
|
@ -4942,7 +4942,7 @@ WorkerPrivate::BlockAndCollectRuntimeStats(JS::RuntimeStats* aRtStats,
|
|||
if (mMemoryReporter) {
|
||||
// Don't hold the lock while doing the actual report.
|
||||
MutexAutoUnlock unlock(mMutex);
|
||||
succeeded = JS::CollectRuntimeStats(rt, aRtStats, nullptr, aAnonymize);
|
||||
succeeded = JS::CollectRuntimeStats(mJSContext, aRtStats, nullptr, aAnonymize);
|
||||
}
|
||||
|
||||
NS_ASSERTION(mMemoryReporterRunning, "This isn't possible!");
|
||||
|
|
|
@ -393,7 +393,7 @@ SetGCNurseryCollectionCallback(JSContext* cx, GCNurseryCollectionCallback callba
|
|||
* disabled on the runtime.
|
||||
*/
|
||||
extern JS_PUBLIC_API(void)
|
||||
DisableIncrementalGC(JSRuntime* rt);
|
||||
DisableIncrementalGC(JSContext* cx);
|
||||
|
||||
/**
|
||||
* Returns true if incremental GC is enabled. Simply having incremental GC
|
||||
|
@ -404,14 +404,14 @@ DisableIncrementalGC(JSRuntime* rt);
|
|||
* collections are not happening incrementally when expected.
|
||||
*/
|
||||
extern JS_PUBLIC_API(bool)
|
||||
IsIncrementalGCEnabled(JSRuntime* rt);
|
||||
IsIncrementalGCEnabled(JSContext* cx);
|
||||
|
||||
/**
|
||||
* Returns true while an incremental GC is ongoing, both when actively
|
||||
* collecting and between slices.
|
||||
*/
|
||||
extern JS_PUBLIC_API(bool)
|
||||
IsIncrementalGCInProgress(JSRuntime* rt);
|
||||
IsIncrementalGCInProgress(JSContext* cx);
|
||||
|
||||
/*
|
||||
* Returns true when writes to GC things must call an incremental (pre) barrier.
|
||||
|
@ -419,7 +419,7 @@ IsIncrementalGCInProgress(JSRuntime* rt);
|
|||
* At other times, the barrier may be elided for performance.
|
||||
*/
|
||||
extern JS_PUBLIC_API(bool)
|
||||
IsIncrementalBarrierNeeded(JSRuntime* rt);
|
||||
IsIncrementalBarrierNeeded(JSContext* cx);
|
||||
|
||||
/*
|
||||
* Notify the GC that a reference to a GC thing is about to be overwritten.
|
||||
|
@ -438,7 +438,7 @@ IncrementalObjectBarrier(JSObject* obj);
|
|||
* Returns true if the most recent GC ran incrementally.
|
||||
*/
|
||||
extern JS_PUBLIC_API(bool)
|
||||
WasIncrementalGC(JSRuntime* rt);
|
||||
WasIncrementalGC(JSContext* cx);
|
||||
|
||||
/*
|
||||
* Generational GC:
|
||||
|
|
|
@ -883,7 +883,7 @@ class ObjectPrivateVisitor
|
|||
};
|
||||
|
||||
extern JS_PUBLIC_API(bool)
|
||||
CollectRuntimeStats(JSRuntime* rt, RuntimeStats* rtStats, ObjectPrivateVisitor* opv, bool anonymize);
|
||||
CollectRuntimeStats(JSContext* cx, RuntimeStats* rtStats, ObjectPrivateVisitor* opv, bool anonymize);
|
||||
|
||||
extern JS_PUBLIC_API(size_t)
|
||||
SystemCompartmentCount(JSRuntime* rt);
|
||||
|
@ -895,11 +895,11 @@ extern JS_PUBLIC_API(size_t)
|
|||
PeakSizeOfTemporary(const JSRuntime* rt);
|
||||
|
||||
extern JS_PUBLIC_API(bool)
|
||||
AddSizeOfTab(JSRuntime* rt, JS::HandleObject obj, mozilla::MallocSizeOf mallocSizeOf,
|
||||
AddSizeOfTab(JSContext* cx, JS::HandleObject obj, mozilla::MallocSizeOf mallocSizeOf,
|
||||
ObjectPrivateVisitor* opv, TabSizes* sizes);
|
||||
|
||||
extern JS_PUBLIC_API(bool)
|
||||
AddServoSizeOf(JSRuntime *rt, mozilla::MallocSizeOf mallocSizeOf,
|
||||
AddServoSizeOf(JSContext* cx, mozilla::MallocSizeOf mallocSizeOf,
|
||||
ObjectPrivateVisitor *opv, ServoSizes *sizes);
|
||||
|
||||
} // namespace JS
|
||||
|
|
|
@ -1089,17 +1089,13 @@ class JS_PUBLIC_API(ObjectPtr)
|
|||
/* Always call finalize before the destructor. */
|
||||
~ObjectPtr() { MOZ_ASSERT(!value); }
|
||||
|
||||
void finalize(JSRuntime* rt) {
|
||||
if (IsIncrementalBarrierNeeded(rt))
|
||||
IncrementalObjectBarrier(value);
|
||||
value = nullptr;
|
||||
}
|
||||
void finalize(JSRuntime* rt);
|
||||
|
||||
void init(JSObject* obj) { value = obj; }
|
||||
|
||||
JSObject* get() const { return value; }
|
||||
|
||||
void writeBarrierPre(JSRuntime* rt) {
|
||||
void writeBarrierPre(JSContext* cx) {
|
||||
IncrementalObjectBarrier(value);
|
||||
}
|
||||
|
||||
|
|
|
@ -431,7 +431,7 @@ GCParameter(JSContext* cx, unsigned argc, Value* vp)
|
|||
}
|
||||
|
||||
uint32_t value = floor(d);
|
||||
if (param == JSGC_MARK_STACK_LIMIT && JS::IsIncrementalGCInProgress(cx->runtime())) {
|
||||
if (param == JSGC_MARK_STACK_LIMIT && JS::IsIncrementalGCInProgress(cx)) {
|
||||
JS_ReportError(cx, "attempt to set markStackLimit while a GC is in progress");
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -20,7 +20,7 @@
|
|||
namespace js {
|
||||
namespace gc {
|
||||
|
||||
void FinishGC(JSRuntime* rt);
|
||||
void FinishGC(JSContext* cx);
|
||||
|
||||
/*
|
||||
* This class should be used by any code that needs to exclusive access to the
|
||||
|
@ -50,7 +50,7 @@ class MOZ_RAII AutoPrepareForTracing
|
|||
mozilla::Maybe<AutoTraceSession> session_;
|
||||
|
||||
public:
|
||||
AutoPrepareForTracing(JSRuntime* rt, ZoneSelector selector);
|
||||
AutoPrepareForTracing(JSContext* cx, ZoneSelector selector);
|
||||
AutoTraceSession& session() { return session_.ref(); }
|
||||
};
|
||||
|
||||
|
|
|
@ -24,7 +24,7 @@ js::TraceRuntime(JSTracer* trc)
|
|||
|
||||
JSRuntime* rt = trc->runtime();
|
||||
rt->gc.evictNursery();
|
||||
AutoPrepareForTracing prep(rt, WithAtoms);
|
||||
AutoPrepareForTracing prep(rt->contextFromMainThread(), WithAtoms);
|
||||
gcstats::AutoPhase ap(rt->gc.stats, gcstats::PHASE_TRACE_HEAP);
|
||||
rt->gc.markRuntime(trc, GCRuntime::TraceRuntime, prep.session().lock);
|
||||
}
|
||||
|
@ -52,62 +52,62 @@ IterateCompartmentsArenasCells(JSRuntime* rt, Zone* zone, void* data,
|
|||
}
|
||||
|
||||
void
|
||||
js::IterateZonesCompartmentsArenasCells(JSRuntime* rt, void* data,
|
||||
js::IterateZonesCompartmentsArenasCells(JSContext* cx, void* data,
|
||||
IterateZoneCallback zoneCallback,
|
||||
JSIterateCompartmentCallback compartmentCallback,
|
||||
IterateArenaCallback arenaCallback,
|
||||
IterateCellCallback cellCallback)
|
||||
{
|
||||
AutoPrepareForTracing prop(rt, WithAtoms);
|
||||
AutoPrepareForTracing prop(cx, WithAtoms);
|
||||
|
||||
for (ZonesIter zone(rt, WithAtoms); !zone.done(); zone.next()) {
|
||||
(*zoneCallback)(rt, data, zone);
|
||||
IterateCompartmentsArenasCells(rt, zone, data,
|
||||
for (ZonesIter zone(cx, WithAtoms); !zone.done(); zone.next()) {
|
||||
(*zoneCallback)(cx, data, zone);
|
||||
IterateCompartmentsArenasCells(cx, zone, data,
|
||||
compartmentCallback, arenaCallback, cellCallback);
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
js::IterateZoneCompartmentsArenasCells(JSRuntime* rt, Zone* zone, void* data,
|
||||
js::IterateZoneCompartmentsArenasCells(JSContext* cx, Zone* zone, void* data,
|
||||
IterateZoneCallback zoneCallback,
|
||||
JSIterateCompartmentCallback compartmentCallback,
|
||||
IterateArenaCallback arenaCallback,
|
||||
IterateCellCallback cellCallback)
|
||||
{
|
||||
AutoPrepareForTracing prop(rt, WithAtoms);
|
||||
AutoPrepareForTracing prop(cx, WithAtoms);
|
||||
|
||||
(*zoneCallback)(rt, data, zone);
|
||||
IterateCompartmentsArenasCells(rt, zone, data,
|
||||
(*zoneCallback)(cx, data, zone);
|
||||
IterateCompartmentsArenasCells(cx, zone, data,
|
||||
compartmentCallback, arenaCallback, cellCallback);
|
||||
}
|
||||
|
||||
void
|
||||
js::IterateChunks(JSRuntime* rt, void* data, IterateChunkCallback chunkCallback)
|
||||
js::IterateChunks(JSContext* cx, void* data, IterateChunkCallback chunkCallback)
|
||||
{
|
||||
AutoPrepareForTracing prep(rt, SkipAtoms);
|
||||
AutoPrepareForTracing prep(cx, SkipAtoms);
|
||||
|
||||
for (auto chunk = rt->gc.allNonEmptyChunks(); !chunk.done(); chunk.next())
|
||||
chunkCallback(rt, data, chunk);
|
||||
for (auto chunk = cx->gc.allNonEmptyChunks(); !chunk.done(); chunk.next())
|
||||
chunkCallback(cx, data, chunk);
|
||||
}
|
||||
|
||||
void
|
||||
js::IterateScripts(JSRuntime* rt, JSCompartment* compartment,
|
||||
js::IterateScripts(JSContext* cx, JSCompartment* compartment,
|
||||
void* data, IterateScriptCallback scriptCallback)
|
||||
{
|
||||
MOZ_ASSERT(!rt->mainThread.suppressGC);
|
||||
AutoEmptyNursery empty(rt);
|
||||
AutoPrepareForTracing prep(rt, SkipAtoms);
|
||||
MOZ_ASSERT(!cx->mainThread().suppressGC);
|
||||
AutoEmptyNursery empty(cx);
|
||||
AutoPrepareForTracing prep(cx, SkipAtoms);
|
||||
|
||||
if (compartment) {
|
||||
Zone* zone = compartment->zone();
|
||||
for (auto script = zone->cellIter<JSScript>(empty); !script.done(); script.next()) {
|
||||
if (script->compartment() == compartment)
|
||||
scriptCallback(rt, data, script);
|
||||
scriptCallback(cx, data, script);
|
||||
}
|
||||
} else {
|
||||
for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
|
||||
for (ZonesIter zone(cx, SkipAtoms); !zone.done(); zone.next()) {
|
||||
for (auto script = zone->cellIter<JSScript>(empty); !script.done(); script.next())
|
||||
scriptCallback(rt, data, script);
|
||||
scriptCallback(cx, data, script);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
@ -117,7 +117,7 @@ js::IterateGrayObjects(Zone* zone, GCThingCallback cellCallback, void* data)
|
|||
{
|
||||
JSRuntime* rt = zone->runtimeFromMainThread();
|
||||
AutoEmptyNursery empty(rt);
|
||||
AutoPrepareForTracing prep(rt, SkipAtoms);
|
||||
AutoPrepareForTracing prep(rt->contextFromMainThread(), SkipAtoms);
|
||||
|
||||
for (auto thingKind : ObjectAllocKinds()) {
|
||||
for (auto obj = zone->cellIter<JSObject>(thingKind, empty); !obj.done(); obj.next()) {
|
||||
|
|
|
@ -183,7 +183,7 @@ gc::GCRuntime::startVerifyPreBarriers()
|
|||
if (!trc)
|
||||
return;
|
||||
|
||||
AutoPrepareForTracing prep(rt, WithAtoms);
|
||||
AutoPrepareForTracing prep(rt->contextFromMainThread(), WithAtoms);
|
||||
|
||||
for (auto chunk = allNonEmptyChunks(); !chunk.done(); chunk.next())
|
||||
chunk->bitmap.clear();
|
||||
|
@ -312,7 +312,7 @@ gc::GCRuntime::endVerifyPreBarriers()
|
|||
|
||||
MOZ_ASSERT(!JS::IsGenerationalGCEnabled(rt));
|
||||
|
||||
AutoPrepareForTracing prep(rt, SkipAtoms);
|
||||
AutoPrepareForTracing prep(rt->contextFromMainThread(), SkipAtoms);
|
||||
|
||||
bool compartmentCreated = false;
|
||||
|
||||
|
|
|
@ -48,7 +48,7 @@ BEGIN_TEST(testGCAllocator)
|
|||
#endif
|
||||
|
||||
/* Finish any ongoing background free activity. */
|
||||
js::gc::FinishGC(rt);
|
||||
js::gc::FinishGC(cx);
|
||||
|
||||
bool growUp;
|
||||
CHECK(addressesGrowUp(&growUp));
|
||||
|
|
|
@ -168,7 +168,7 @@ BEGIN_TEST(testIncrementalRoots)
|
|||
// We'd better be between iGC slices now. There's always a risk that
|
||||
// something will decide that we need to do a full GC (such as gczeal, but
|
||||
// that is turned off.)
|
||||
MOZ_ASSERT(JS::IsIncrementalGCInProgress(rt));
|
||||
MOZ_ASSERT(JS::IsIncrementalGCInProgress(cx));
|
||||
|
||||
// And assert that the mark bits are as we expect them to be.
|
||||
MOZ_ASSERT(vec[0]->asTenured().isMarked());
|
||||
|
|
|
@ -12,13 +12,13 @@
|
|||
#include "jsapi-tests/tests.h"
|
||||
|
||||
static void
|
||||
MinimizeHeap(JSRuntime* rt)
|
||||
MinimizeHeap(JSContext* cx)
|
||||
{
|
||||
// The second collection is to force us to wait for the background
|
||||
// sweeping that the first GC started to finish.
|
||||
JS_GC(JS_GetContext(rt));
|
||||
JS_GC(JS_GetContext(rt));
|
||||
js::gc::FinishGC(rt);
|
||||
JS_GC(cx);
|
||||
JS_GC(cx);
|
||||
js::gc::FinishGC(cx);
|
||||
}
|
||||
|
||||
BEGIN_TEST(testGCUID)
|
||||
|
@ -31,7 +31,7 @@ BEGIN_TEST(testGCUID)
|
|||
uint64_t tmp = 0;
|
||||
|
||||
// Ensure the heap is as minimal as it can get.
|
||||
MinimizeHeap(rt);
|
||||
MinimizeHeap(cx);
|
||||
|
||||
JS::RootedObject obj(cx, JS_NewPlainObject(cx));
|
||||
uintptr_t nurseryAddr = uintptr_t(obj.get());
|
||||
|
@ -53,7 +53,7 @@ BEGIN_TEST(testGCUID)
|
|||
CHECK(uid == tmp);
|
||||
|
||||
// Tenure the thing and check that the UID moved with it.
|
||||
MinimizeHeap(rt);
|
||||
MinimizeHeap(cx);
|
||||
uintptr_t tenuredAddr = uintptr_t(obj.get());
|
||||
CHECK(tenuredAddr != nurseryAddr);
|
||||
CHECK(!js::gc::IsInsideNursery(obj));
|
||||
|
@ -71,9 +71,9 @@ BEGIN_TEST(testGCUID)
|
|||
// Try to get another tenured object in the same location and check that
|
||||
// the uid was removed correctly.
|
||||
obj = nullptr;
|
||||
MinimizeHeap(rt);
|
||||
MinimizeHeap(cx);
|
||||
obj = JS_NewPlainObject(cx);
|
||||
MinimizeHeap(rt);
|
||||
MinimizeHeap(cx);
|
||||
CHECK(uintptr_t(obj.get()) == tenuredAddr);
|
||||
CHECK(!obj->zone()->hasUniqueId(obj));
|
||||
CHECK(obj->zone()->getUniqueId(obj, &tmp));
|
||||
|
@ -91,7 +91,7 @@ BEGIN_TEST(testGCUID)
|
|||
}
|
||||
|
||||
// Transfer our vector to tenured if it isn't there already.
|
||||
MinimizeHeap(rt);
|
||||
MinimizeHeap(cx);
|
||||
|
||||
// Tear holes in the heap by unrooting the even objects and collecting.
|
||||
JS::Rooted<ObjectVector> vec2(cx, ObjectVector(cx));
|
||||
|
@ -100,7 +100,7 @@ BEGIN_TEST(testGCUID)
|
|||
vec2.append(vec[i]);
|
||||
}
|
||||
vec.clear();
|
||||
MinimizeHeap(rt);
|
||||
MinimizeHeap(cx);
|
||||
|
||||
// Grab the last object in the vector as our object of interest.
|
||||
obj = vec2.back();
|
||||
|
@ -112,7 +112,7 @@ BEGIN_TEST(testGCUID)
|
|||
// the new tenured heap location.
|
||||
JS::PrepareForFullGC(cx);
|
||||
JS::GCForReason(cx, GC_SHRINK, JS::gcreason::API);
|
||||
MinimizeHeap(rt);
|
||||
MinimizeHeap(cx);
|
||||
CHECK(uintptr_t(obj.get()) != tenuredAddr);
|
||||
CHECK(obj->zone()->hasUniqueId(obj));
|
||||
CHECK(obj->zone()->getUniqueId(obj, &tmp));
|
||||
|
|
|
@ -28,7 +28,7 @@ unsigned
|
|||
countIonScripts(JSObject* global)
|
||||
{
|
||||
unsigned count = 0;
|
||||
js::IterateScripts(rt, global->compartment(), &count, ScriptCallback);
|
||||
js::IterateScripts(cx, global->compartment(), &count, ScriptCallback);
|
||||
return count;
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ BEGIN_TEST(testWeakMap_keyDelegates)
|
|||
CHECK(newCCW(map, delegateRoot));
|
||||
js::SliceBudget budget(js::WorkBudget(1000000));
|
||||
rt->gc.startDebugGC(GC_NORMAL, budget);
|
||||
while (JS::IsIncrementalGCInProgress(rt))
|
||||
while (JS::IsIncrementalGCInProgress(cx))
|
||||
rt->gc.debugGCSlice(budget);
|
||||
#ifdef DEBUG
|
||||
CHECK(map->zone()->lastZoneGroupIndex() < delegateRoot->zone()->lastZoneGroupIndex());
|
||||
|
@ -115,7 +115,7 @@ BEGIN_TEST(testWeakMap_keyDelegates)
|
|||
CHECK(newCCW(map, delegateRoot));
|
||||
budget = js::SliceBudget(js::WorkBudget(100000));
|
||||
rt->gc.startDebugGC(GC_NORMAL, budget);
|
||||
while (JS::IsIncrementalGCInProgress(rt))
|
||||
while (JS::IsIncrementalGCInProgress(cx))
|
||||
rt->gc.debugGCSlice(budget);
|
||||
CHECK(checkSize(map, 1));
|
||||
|
||||
|
|
|
@ -856,7 +856,7 @@ JS_TransplantObject(JSContext* cx, HandleObject origobj, HandleObject target)
|
|||
RootedObject newIdentity(cx);
|
||||
|
||||
// Don't allow a compacting GC to observe any intermediate state.
|
||||
AutoDisableCompactingGC nocgc(cx->runtime());
|
||||
AutoDisableCompactingGC nocgc(cx);
|
||||
|
||||
AutoDisableProxyCheck adpc(cx->runtime());
|
||||
|
||||
|
|
|
@ -1160,6 +1160,14 @@ js::GetAnyCompartmentInZone(JS::Zone* zone)
|
|||
return comp.get();
|
||||
}
|
||||
|
||||
void
|
||||
JS::ObjectPtr::finalize(JSRuntime* rt)
|
||||
{
|
||||
if (IsIncrementalBarrierNeeded(rt->contextFromMainThread()))
|
||||
IncrementalObjectBarrier(value);
|
||||
value = nullptr;
|
||||
}
|
||||
|
||||
void
|
||||
JS::ObjectPtr::updateWeakPointerAfterGC()
|
||||
{
|
||||
|
|
|
@ -1478,7 +1478,7 @@ JSFunction::createScriptForLazilyInterpretedFunction(JSContext* cx, HandleFuncti
|
|||
// Additionally, the lazy script cache is not used during incremental
|
||||
// GCs, to avoid resurrecting dead scripts after incremental sweeping
|
||||
// has started.
|
||||
if (canRelazify && !JS::IsIncrementalGCInProgress(cx->runtime())) {
|
||||
if (canRelazify && !JS::IsIncrementalGCInProgress(cx)) {
|
||||
LazyScriptCache::Lookup lookup(cx, lazy);
|
||||
cx->caches.lazyScriptCache.lookup(lookup, script.address());
|
||||
}
|
||||
|
|
|
@ -1742,12 +1742,12 @@ GCRuntime::isCompactingGCEnabled() const
|
|||
return compactingEnabled && compactingDisabledCount == 0;
|
||||
}
|
||||
|
||||
AutoDisableCompactingGC::AutoDisableCompactingGC(JSRuntime* rt)
|
||||
: gc(rt->gc)
|
||||
AutoDisableCompactingGC::AutoDisableCompactingGC(JSContext* cx)
|
||||
: gc(cx->gc)
|
||||
{
|
||||
gc.disableCompactingGC();
|
||||
if (gc.isIncrementalGCInProgress() && gc.isCompactingGc())
|
||||
FinishGC(rt);
|
||||
FinishGC(cx);
|
||||
}
|
||||
|
||||
AutoDisableCompactingGC::~AutoDisableCompactingGC()
|
||||
|
@ -6604,20 +6604,21 @@ GCRuntime::gcIfRequested(JSContext* cx /* = nullptr */)
|
|||
return false;
|
||||
}
|
||||
|
||||
void js::gc::FinishGC(JSRuntime* rt)
|
||||
void
|
||||
js::gc::FinishGC(JSContext* cx)
|
||||
{
|
||||
if (JS::IsIncrementalGCInProgress(rt)) {
|
||||
JS::PrepareForIncrementalGC(rt->contextFromMainThread());
|
||||
JS::FinishIncrementalGC(rt->contextFromMainThread(), JS::gcreason::API);
|
||||
if (JS::IsIncrementalGCInProgress(cx)) {
|
||||
JS::PrepareForIncrementalGC(cx);
|
||||
JS::FinishIncrementalGC(cx, JS::gcreason::API);
|
||||
}
|
||||
|
||||
rt->gc.nursery.waitBackgroundFreeEnd();
|
||||
cx->gc.nursery.waitBackgroundFreeEnd();
|
||||
}
|
||||
|
||||
AutoPrepareForTracing::AutoPrepareForTracing(JSRuntime* rt, ZoneSelector selector)
|
||||
AutoPrepareForTracing::AutoPrepareForTracing(JSContext* cx, ZoneSelector selector)
|
||||
{
|
||||
js::gc::FinishGC(rt);
|
||||
session_.emplace(rt);
|
||||
js::gc::FinishGC(cx);
|
||||
session_.emplace(cx);
|
||||
}
|
||||
|
||||
JSCompartment*
|
||||
|
@ -6677,9 +6678,9 @@ gc::MergeCompartments(JSCompartment* source, JSCompartment* target)
|
|||
MOZ_ASSERT(source->creationOptions().addonIdOrNull() ==
|
||||
target->creationOptions().addonIdOrNull());
|
||||
|
||||
JSRuntime* rt = source->runtimeFromMainThread();
|
||||
JSContext* cx = source->contextFromMainThread();
|
||||
|
||||
AutoPrepareForTracing prepare(rt, SkipAtoms);
|
||||
AutoPrepareForTracing prepare(cx, SkipAtoms);
|
||||
|
||||
// Cleanup tables and other state in the source compartment that will be
|
||||
// meaningless after merging into the target compartment.
|
||||
|
@ -6695,14 +6696,14 @@ gc::MergeCompartments(JSCompartment* source, JSCompartment* target)
|
|||
|
||||
// Release any relocated arenas which we may be holding on to as they might
|
||||
// be in the source zone
|
||||
rt->gc.releaseHeldRelocatedArenas();
|
||||
cx->gc.releaseHeldRelocatedArenas();
|
||||
|
||||
// Fixup compartment pointers in source to refer to target, and make sure
|
||||
// type information generations are in sync.
|
||||
|
||||
// Get the static global lexical scope of the target compartment. Static
|
||||
// scopes need to be fixed up below.
|
||||
RootedObject targetStaticGlobalLexicalScope(rt);
|
||||
RootedObject targetStaticGlobalLexicalScope(cx);
|
||||
targetStaticGlobalLexicalScope = &target->maybeGlobal()->lexicalScope().staticBlock();
|
||||
|
||||
for (auto script = source->zone()->cellIter<JSScript>(); !script.done(); script.next()) {
|
||||
|
@ -6781,7 +6782,7 @@ gc::MergeCompartments(JSCompartment* source, JSCompartment* target)
|
|||
MOZ_ASSERT(c.get() == source);
|
||||
|
||||
// Merge the allocator, stats and UIDs in source's zone into target's zone.
|
||||
target->zone()->arenas.adoptArenas(rt, &source->zone()->arenas);
|
||||
target->zone()->arenas.adoptArenas(cx, &source->zone()->arenas);
|
||||
target->zone()->usage.adopt(source->zone()->usage);
|
||||
target->zone()->adoptUniqueIds(source->zone());
|
||||
|
||||
|
@ -7326,27 +7327,27 @@ JS::SetGCNurseryCollectionCallback(JSContext* cx, GCNurseryCollectionCallback ca
|
|||
}
|
||||
|
||||
JS_PUBLIC_API(void)
|
||||
JS::DisableIncrementalGC(JSRuntime* rt)
|
||||
JS::DisableIncrementalGC(JSContext* cx)
|
||||
{
|
||||
rt->gc.disallowIncrementalGC();
|
||||
cx->gc.disallowIncrementalGC();
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(bool)
|
||||
JS::IsIncrementalGCEnabled(JSRuntime* rt)
|
||||
JS::IsIncrementalGCEnabled(JSContext* cx)
|
||||
{
|
||||
return rt->gc.isIncrementalGCEnabled();
|
||||
return cx->gc.isIncrementalGCEnabled();
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(bool)
|
||||
JS::IsIncrementalGCInProgress(JSRuntime* rt)
|
||||
JS::IsIncrementalGCInProgress(JSContext* cx)
|
||||
{
|
||||
return rt->gc.isIncrementalGCInProgress() && !rt->gc.isVerifyPreBarriersEnabled();
|
||||
return cx->gc.isIncrementalGCInProgress() && !cx->gc.isVerifyPreBarriersEnabled();
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(bool)
|
||||
JS::IsIncrementalBarrierNeeded(JSRuntime* rt)
|
||||
JS::IsIncrementalBarrierNeeded(JSContext* cx)
|
||||
{
|
||||
return rt->gc.state() == gc::MARK && !rt->isHeapBusy();
|
||||
return cx->gc.state() == gc::MARK && !cx->isHeapBusy();
|
||||
}
|
||||
|
||||
struct IncrementalReferenceBarrierFunctor {
|
||||
|
@ -7380,9 +7381,9 @@ JS::IncrementalObjectBarrier(JSObject* obj)
|
|||
}
|
||||
|
||||
JS_PUBLIC_API(bool)
|
||||
JS::WasIncrementalGC(JSRuntime* rt)
|
||||
JS::WasIncrementalGC(JSContext* cx)
|
||||
{
|
||||
return rt->gc.isIncrementalGc();
|
||||
return cx->gc.isIncrementalGc();
|
||||
}
|
||||
|
||||
JS::AutoDisableGenerationalGC::AutoDisableGenerationalGC(JSRuntime* rt)
|
||||
|
|
|
@ -992,7 +992,7 @@ typedef void (*IterateCellCallback)(JSRuntime* rt, void* data, void* thing,
|
|||
* on every in-use cell in the GC heap.
|
||||
*/
|
||||
extern void
|
||||
IterateZonesCompartmentsArenasCells(JSRuntime* rt, void* data,
|
||||
IterateZonesCompartmentsArenasCells(JSContext* cx, void* data,
|
||||
IterateZoneCallback zoneCallback,
|
||||
JSIterateCompartmentCallback compartmentCallback,
|
||||
IterateArenaCallback arenaCallback,
|
||||
|
@ -1003,7 +1003,7 @@ IterateZonesCompartmentsArenasCells(JSRuntime* rt, void* data,
|
|||
* single zone.
|
||||
*/
|
||||
extern void
|
||||
IterateZoneCompartmentsArenasCells(JSRuntime* rt, Zone* zone, void* data,
|
||||
IterateZoneCompartmentsArenasCells(JSContext* cx, Zone* zone, void* data,
|
||||
IterateZoneCallback zoneCallback,
|
||||
JSIterateCompartmentCallback compartmentCallback,
|
||||
IterateArenaCallback arenaCallback,
|
||||
|
@ -1013,7 +1013,7 @@ IterateZoneCompartmentsArenasCells(JSRuntime* rt, Zone* zone, void* data,
|
|||
* Invoke chunkCallback on every in-use chunk.
|
||||
*/
|
||||
extern void
|
||||
IterateChunks(JSRuntime* rt, void* data, IterateChunkCallback chunkCallback);
|
||||
IterateChunks(JSContext* cx, void* data, IterateChunkCallback chunkCallback);
|
||||
|
||||
typedef void (*IterateScriptCallback)(JSRuntime* rt, void* data, JSScript* script);
|
||||
|
||||
|
@ -1022,7 +1022,7 @@ typedef void (*IterateScriptCallback)(JSRuntime* rt, void* data, JSScript* scrip
|
|||
* the given compartment or for all compartments if it is null.
|
||||
*/
|
||||
extern void
|
||||
IterateScripts(JSRuntime* rt, JSCompartment* compartment,
|
||||
IterateScripts(JSContext* cx, JSCompartment* compartment,
|
||||
void* data, IterateScriptCallback scriptCallback);
|
||||
|
||||
extern void
|
||||
|
@ -1443,7 +1443,7 @@ struct MOZ_RAII AutoDisableProxyCheck
|
|||
|
||||
struct MOZ_RAII AutoDisableCompactingGC
|
||||
{
|
||||
explicit AutoDisableCompactingGC(JSRuntime* rt);
|
||||
explicit AutoDisableCompactingGC(JSContext* cx);
|
||||
~AutoDisableCompactingGC();
|
||||
|
||||
private:
|
||||
|
|
|
@ -2021,7 +2021,7 @@ GenerateLcovInfo(JSContext* cx, JSCompartment* comp, GenericPrinter& out)
|
|||
|
||||
// Collect the list of scripts which are part of the current compartment.
|
||||
{
|
||||
js::gc::AutoPrepareForTracing apft(rt, SkipAtoms);
|
||||
js::gc::AutoPrepareForTracing apft(cx, SkipAtoms);
|
||||
}
|
||||
Rooted<ScriptVector> topScripts(cx, ScriptVector(cx));
|
||||
for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
|
||||
|
|
|
@ -2495,8 +2495,8 @@ SaveSharedScriptData(ExclusiveContext* cx, Handle<JSScript*> script, SharedScrip
|
|||
* reachable. This is effectively a read barrier.
|
||||
*/
|
||||
if (cx->isJSContext()) {
|
||||
JSRuntime* rt = cx->asJSContext()->runtime();
|
||||
if (JS::IsIncrementalGCInProgress(rt) && rt->gc.isFullGc())
|
||||
JSContext* ncx = cx->asJSContext();
|
||||
if (JS::IsIncrementalGCInProgress(ncx) && ncx->gc.isFullGc())
|
||||
ssd->marked = true;
|
||||
}
|
||||
|
||||
|
|
|
@ -136,7 +136,8 @@ class WeakMap : public HashMap<Key, Value, HashPolicy, RuntimeAllocPolicy>,
|
|||
if (!Base::init(len))
|
||||
return false;
|
||||
zone->gcWeakMapList.insertFront(this);
|
||||
marked = JS::IsIncrementalGCInProgress(zone->runtimeFromMainThread());
|
||||
JSRuntime* rt = zone->runtimeFromMainThread();
|
||||
marked = JS::IsIncrementalGCInProgress(rt->contextFromMainThread());
|
||||
return true;
|
||||
}
|
||||
|
||||
|
|
|
@ -3917,7 +3917,7 @@ runOffThreadScript(JSContext* cx, unsigned argc, Value* vp)
|
|||
|
||||
JSRuntime* rt = cx->runtime();
|
||||
if (OffThreadParsingMustWaitForGC(rt))
|
||||
gc::FinishGC(rt);
|
||||
gc::FinishGC(cx);
|
||||
|
||||
void* token = offThreadState.waitUntilDone(cx, ScriptKind::Script);
|
||||
if (!token) {
|
||||
|
@ -4003,7 +4003,7 @@ FinishOffThreadModule(JSContext* cx, unsigned argc, Value* vp)
|
|||
|
||||
JSRuntime* rt = cx->runtime();
|
||||
if (OffThreadParsingMustWaitForGC(rt))
|
||||
gc::FinishGC(rt);
|
||||
gc::FinishGC(cx);
|
||||
|
||||
void* token = offThreadState.waitUntilDone(cx, ScriptKind::Module);
|
||||
if (!token) {
|
||||
|
@ -7030,7 +7030,7 @@ Shell(JSContext* cx, OptionParser* op, char** envp)
|
|||
|
||||
Maybe<AutoDisableCompactingGC> nocgc;
|
||||
if (op->getBoolOption("no-cgc"))
|
||||
nocgc.emplace(cx->runtime());
|
||||
nocgc.emplace(cx);
|
||||
|
||||
JSAutoRequest ar(cx);
|
||||
|
||||
|
|
|
@ -4128,7 +4128,7 @@ class MOZ_STACK_CLASS Debugger::ScriptQuery
|
|||
/* Search each compartment for debuggee scripts. */
|
||||
MOZ_ASSERT(vector.empty());
|
||||
oom = false;
|
||||
IterateScripts(cx->runtime(), singletonComp, this, considerScript);
|
||||
IterateScripts(cx, singletonComp, this, considerScript);
|
||||
if (oom) {
|
||||
ReportOutOfMemory(cx);
|
||||
return false;
|
||||
|
|
|
@ -1157,7 +1157,7 @@ GlobalHelperThreadState::finishParseTask(JSContext* maybecx, JSRuntime* rt, Pars
|
|||
return nullptr;
|
||||
}
|
||||
|
||||
mergeParseTaskCompartment(rt, parseTask, global, cx->compartment());
|
||||
mergeParseTaskCompartment(cx, parseTask, global, cx->compartment());
|
||||
|
||||
if (!parseTask->finish(cx))
|
||||
return nullptr;
|
||||
|
@ -1229,7 +1229,7 @@ GlobalObject::getStarGeneratorFunctionPrototype()
|
|||
}
|
||||
|
||||
void
|
||||
GlobalHelperThreadState::mergeParseTaskCompartment(JSRuntime* rt, ParseTask* parseTask,
|
||||
GlobalHelperThreadState::mergeParseTaskCompartment(JSContext* cx, ParseTask* parseTask,
|
||||
Handle<GlobalObject*> global,
|
||||
JSCompartment* dest)
|
||||
{
|
||||
|
@ -1237,10 +1237,10 @@ GlobalHelperThreadState::mergeParseTaskCompartment(JSRuntime* rt, ParseTask* par
|
|||
// finished merging the contents of the parse task's compartment into the
|
||||
// destination compartment. Finish any ongoing incremental GC first and
|
||||
// assert that no allocation can occur.
|
||||
gc::FinishGC(rt);
|
||||
JS::AutoAssertNoAlloc noAlloc(rt);
|
||||
gc::FinishGC(cx);
|
||||
JS::AutoAssertNoAlloc noAlloc(cx);
|
||||
|
||||
LeaveParseTaskZone(rt, parseTask);
|
||||
LeaveParseTaskZone(cx, parseTask);
|
||||
|
||||
{
|
||||
// Generator functions don't have Function.prototype as prototype but a
|
||||
|
|
|
@ -226,7 +226,7 @@ class GlobalHelperThreadState
|
|||
}
|
||||
|
||||
JSScript* finishParseTask(JSContext* maybecx, JSRuntime* rt, ParseTaskKind kind, void* token);
|
||||
void mergeParseTaskCompartment(JSRuntime* rt, ParseTask* parseTask,
|
||||
void mergeParseTaskCompartment(JSContext* cx, ParseTask* parseTask,
|
||||
Handle<GlobalObject*> global,
|
||||
JSCompartment* dest);
|
||||
|
||||
|
|
|
@ -768,9 +768,10 @@ FindNotableScriptSources(JS::RuntimeSizes& runtime)
|
|||
}
|
||||
|
||||
static bool
|
||||
CollectRuntimeStatsHelper(JSRuntime* rt, RuntimeStats* rtStats, ObjectPrivateVisitor* opv,
|
||||
CollectRuntimeStatsHelper(JSContext* cx, RuntimeStats* rtStats, ObjectPrivateVisitor* opv,
|
||||
bool anonymize, IterateCellCallback statsCellCallback)
|
||||
{
|
||||
JSRuntime* rt = cx;
|
||||
if (!rtStats->compartmentStatsVector.reserve(rt->numCompartments))
|
||||
return false;
|
||||
|
||||
|
@ -783,14 +784,14 @@ CollectRuntimeStatsHelper(JSRuntime* rt, RuntimeStats* rtStats, ObjectPrivateVis
|
|||
rtStats->gcHeapUnusedChunks =
|
||||
size_t(JS_GetGCParameter(rt, JSGC_UNUSED_CHUNKS)) * gc::ChunkSize;
|
||||
|
||||
IterateChunks(rt, &rtStats->gcHeapDecommittedArenas,
|
||||
IterateChunks(cx, &rtStats->gcHeapDecommittedArenas,
|
||||
DecommittedArenasChunkCallback);
|
||||
|
||||
// Take the per-compartment measurements.
|
||||
StatsClosure closure(rtStats, opv, anonymize);
|
||||
if (!closure.init())
|
||||
return false;
|
||||
IterateZonesCompartmentsArenasCells(rt, &closure,
|
||||
IterateZonesCompartmentsArenasCells(cx, &closure,
|
||||
StatsZoneCallback,
|
||||
StatsCompartmentCallback,
|
||||
StatsArenaCallback,
|
||||
|
@ -865,10 +866,10 @@ CollectRuntimeStatsHelper(JSRuntime* rt, RuntimeStats* rtStats, ObjectPrivateVis
|
|||
}
|
||||
|
||||
JS_PUBLIC_API(bool)
|
||||
JS::CollectRuntimeStats(JSRuntime *rt, RuntimeStats *rtStats, ObjectPrivateVisitor *opv,
|
||||
JS::CollectRuntimeStats(JSContext* cx, RuntimeStats *rtStats, ObjectPrivateVisitor *opv,
|
||||
bool anonymize)
|
||||
{
|
||||
return CollectRuntimeStatsHelper(rt, rtStats, opv, anonymize, StatsCellCallback<FineGrained>);
|
||||
return CollectRuntimeStatsHelper(cx, rtStats, opv, anonymize, StatsCellCallback<FineGrained>);
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(size_t)
|
||||
|
@ -918,7 +919,7 @@ class SimpleJSRuntimeStats : public JS::RuntimeStats
|
|||
};
|
||||
|
||||
JS_PUBLIC_API(bool)
|
||||
AddSizeOfTab(JSRuntime* rt, HandleObject obj, MallocSizeOf mallocSizeOf, ObjectPrivateVisitor* opv,
|
||||
AddSizeOfTab(JSContext* cx, HandleObject obj, MallocSizeOf mallocSizeOf, ObjectPrivateVisitor* opv,
|
||||
TabSizes* sizes)
|
||||
{
|
||||
SimpleJSRuntimeStats rtStats(mallocSizeOf);
|
||||
|
@ -936,7 +937,7 @@ AddSizeOfTab(JSRuntime* rt, HandleObject obj, MallocSizeOf mallocSizeOf, ObjectP
|
|||
StatsClosure closure(&rtStats, opv, /* anonymize = */ false);
|
||||
if (!closure.init())
|
||||
return false;
|
||||
IterateZoneCompartmentsArenasCells(rt, zone, &closure,
|
||||
IterateZoneCompartmentsArenasCells(cx, zone, &closure,
|
||||
StatsZoneCallback,
|
||||
StatsCompartmentCallback,
|
||||
StatsArenaCallback,
|
||||
|
@ -958,13 +959,13 @@ AddSizeOfTab(JSRuntime* rt, HandleObject obj, MallocSizeOf mallocSizeOf, ObjectP
|
|||
}
|
||||
|
||||
JS_PUBLIC_API(bool)
|
||||
AddServoSizeOf(JSRuntime *rt, MallocSizeOf mallocSizeOf, ObjectPrivateVisitor *opv,
|
||||
AddServoSizeOf(JSContext* cx, MallocSizeOf mallocSizeOf, ObjectPrivateVisitor *opv,
|
||||
ServoSizes *sizes)
|
||||
{
|
||||
SimpleJSRuntimeStats rtStats(mallocSizeOf);
|
||||
|
||||
// No need to anonymize because the results will be aggregated.
|
||||
if (!CollectRuntimeStatsHelper(rt, &rtStats, opv, /* anonymize = */ false,
|
||||
if (!CollectRuntimeStatsHelper(cx, &rtStats, opv, /* anonymize = */ false,
|
||||
StatsCellCallback<CoarseGrained>))
|
||||
return false;
|
||||
|
||||
|
|
|
@ -385,8 +385,9 @@ JSRuntime::destroyRuntime()
|
|||
* Finish any in-progress GCs first. This ensures the parseWaitingOnGC
|
||||
* list is empty in CancelOffThreadParses.
|
||||
*/
|
||||
if (JS::IsIncrementalGCInProgress(this))
|
||||
FinishGC(this);
|
||||
JSContext* cx = contextFromMainThread();
|
||||
if (JS::IsIncrementalGCInProgress(cx))
|
||||
FinishGC(cx);
|
||||
|
||||
/* Free source hook early, as its destructor may want to delete roots. */
|
||||
sourceHook = nullptr;
|
||||
|
|
|
@ -2952,7 +2952,7 @@ JSReporter::CollectReports(WindowPaths* windowPaths,
|
|||
XPCJSRuntimeStats rtStats(windowPaths, topWindowPaths, getLocations,
|
||||
anonymize);
|
||||
OrphanReporter orphanReporter(XPCConvert::GetISupportsFromJSObject);
|
||||
if (!JS::CollectRuntimeStats(xpcrt->Runtime(), &rtStats, &orphanReporter,
|
||||
if (!JS::CollectRuntimeStats(xpcrt->Context(), &rtStats, &orphanReporter,
|
||||
anonymize))
|
||||
{
|
||||
return NS_ERROR_FAILURE;
|
||||
|
@ -3135,12 +3135,12 @@ static nsresult
|
|||
JSSizeOfTab(JSObject* objArg, size_t* jsObjectsSize, size_t* jsStringsSize,
|
||||
size_t* jsPrivateSize, size_t* jsOtherSize)
|
||||
{
|
||||
JSRuntime* rt = nsXPConnect::GetRuntimeInstance()->Runtime();
|
||||
JS::RootedObject obj(rt, objArg);
|
||||
JSContext* cx = nsXPConnect::GetRuntimeInstance()->Context();
|
||||
JS::RootedObject obj(cx, objArg);
|
||||
|
||||
TabSizes sizes;
|
||||
OrphanReporter orphanReporter(XPCConvert::GetISupportsFromJSObject);
|
||||
NS_ENSURE_TRUE(JS::AddSizeOfTab(rt, obj, moz_malloc_size_of,
|
||||
NS_ENSURE_TRUE(JS::AddSizeOfTab(cx, obj, moz_malloc_size_of,
|
||||
&orphanReporter, &sizes),
|
||||
NS_ERROR_OUT_OF_MEMORY);
|
||||
|
||||
|
|
|
@ -625,8 +625,8 @@ XPCWrappedNative::UpdateScriptableInfo(XPCNativeScriptableInfo* si)
|
|||
MOZ_ASSERT(mScriptableInfo, "UpdateScriptableInfo expects an existing scriptable info");
|
||||
|
||||
// Write barrier for incremental GC.
|
||||
JSRuntime* rt = GetRuntime()->Runtime();
|
||||
if (IsIncrementalBarrierNeeded(rt))
|
||||
JSContext* cx = GetRuntime()->Context();
|
||||
if (IsIncrementalBarrierNeeded(cx))
|
||||
mScriptableInfo->Mark();
|
||||
|
||||
mScriptableInfo = si;
|
||||
|
@ -640,8 +640,8 @@ XPCWrappedNative::SetProto(XPCWrappedNativeProto* p)
|
|||
MOZ_ASSERT(HasProto());
|
||||
|
||||
// Write barrier for incremental GC.
|
||||
JSRuntime* rt = GetRuntime()->Runtime();
|
||||
GetProto()->WriteBarrierPre(rt);
|
||||
JSContext* cx = GetRuntime()->Context();
|
||||
GetProto()->WriteBarrierPre(cx);
|
||||
|
||||
mMaybeProto = p;
|
||||
}
|
||||
|
|
|
@ -1735,10 +1735,10 @@ public:
|
|||
TraceInside(trc);
|
||||
}
|
||||
|
||||
void WriteBarrierPre(JSRuntime* rt)
|
||||
void WriteBarrierPre(JSContext* cx)
|
||||
{
|
||||
if (JS::IsIncrementalBarrierNeeded(rt) && mJSProtoObject)
|
||||
mJSProtoObject.writeBarrierPre(rt);
|
||||
if (JS::IsIncrementalBarrierNeeded(cx) && mJSProtoObject)
|
||||
mJSProtoObject.writeBarrierPre(cx);
|
||||
}
|
||||
|
||||
// NOP. This is just here to make the AutoMarkingPtr code compile.
|
||||
|
|
|
@ -1226,7 +1226,7 @@ void
|
|||
CycleCollectedJSRuntime::FixWeakMappingGrayBits() const
|
||||
{
|
||||
MOZ_ASSERT(mJSRuntime);
|
||||
MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSRuntime),
|
||||
MOZ_ASSERT(!JS::IsIncrementalGCInProgress(mJSContext),
|
||||
"Don't call FixWeakMappingGrayBits during a GC.");
|
||||
FixWeakMappingGrayBitsTracer fixer(mJSRuntime);
|
||||
fixer.FixAll();
|
||||
|
@ -1634,7 +1634,7 @@ CycleCollectedJSRuntime::OnGC(JSGCStatus aStatus)
|
|||
#endif
|
||||
|
||||
// Do any deferred finalization of native objects.
|
||||
FinalizeDeferredThings(JS::WasIncrementalGC(mJSRuntime) ? FinalizeIncrementally :
|
||||
FinalizeDeferredThings(JS::WasIncrementalGC(mJSContext) ? FinalizeIncrementally :
|
||||
FinalizeNow);
|
||||
break;
|
||||
}
|
||||
|
|
|
@ -3543,7 +3543,7 @@ nsCycleCollector::FixGrayBits(bool aForceGC, TimeLog& aTimeLog)
|
|||
bool
|
||||
nsCycleCollector::IsIncrementalGCInProgress()
|
||||
{
|
||||
return mJSRuntime && JS::IsIncrementalGCInProgress(mJSRuntime->Runtime());
|
||||
return mJSRuntime && JS::IsIncrementalGCInProgress(mJSRuntime->Context());
|
||||
}
|
||||
|
||||
void
|
||||
|
|
Загрузка…
Ссылка в новой задаче