зеркало из https://github.com/mozilla/gecko-dev.git
Bug 650161 - Enable compacting GC on GC_SHRINK collections r=terrence r=glandium
This commit is contained in:
Родитель
90c9dbbc91
Коммит
8b0024a51a
|
@ -3052,18 +3052,6 @@ MOZ_ARG_WITH_STRING(wrap-malloc,
|
|||
[ --with-wrap-malloc=DIR Location of malloc wrapper library],
|
||||
WRAP_LDFLAGS="${WRAP_LDFLAGS} $withval")
|
||||
|
||||
dnl ========================================================
|
||||
dnl = Use compacting GC
|
||||
dnl ========================================================
|
||||
dnl Compact the heap by moving GC things when doing a shrinking colletion.
|
||||
MOZ_ARG_ENABLE_BOOL(gccompacting,
|
||||
[ --enable-gccompacting Compact the heap by moving GC things],
|
||||
JSGC_COMPACTING=1,
|
||||
JSGC_COMPACTING= )
|
||||
if test -n "$JSGC_COMPACTING"; then
|
||||
AC_DEFINE(JSGC_COMPACTING)
|
||||
fi
|
||||
|
||||
dnl ========================================================
|
||||
dnl = Use a smaller chunk size for GC chunks
|
||||
dnl ========================================================
|
||||
|
|
|
@ -143,7 +143,6 @@ void
|
|||
CheckHashTablesAfterMovingGC(JSRuntime *rt);
|
||||
#endif
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
struct MovingTracer : JSTracer {
|
||||
explicit MovingTracer(JSRuntime *rt) : JSTracer(rt, Visit, TraceWeakMapKeysValues) {}
|
||||
|
||||
|
@ -152,7 +151,6 @@ struct MovingTracer : JSTracer {
|
|||
return trc->callback == Visit;
|
||||
}
|
||||
};
|
||||
#endif
|
||||
|
||||
} /* namespace gc */
|
||||
} /* namespace js */
|
||||
|
|
|
@ -34,11 +34,8 @@ struct FinalizePhase;
|
|||
class MarkingValidator;
|
||||
struct AutoPrepareForTracing;
|
||||
class AutoTraceSession;
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
struct ArenasToUpdate;
|
||||
struct MovingTracer;
|
||||
#endif
|
||||
|
||||
class ChunkPool
|
||||
{
|
||||
|
@ -297,11 +294,7 @@ class GCRuntime
|
|||
bool isHeapMajorCollecting() { return heapState == js::MajorCollecting; }
|
||||
bool isHeapMinorCollecting() { return heapState == js::MinorCollecting; }
|
||||
bool isHeapCollecting() { return isHeapMajorCollecting() || isHeapMinorCollecting(); }
|
||||
#ifdef JSGC_COMPACTING
|
||||
bool isHeapCompacting() { return isHeapMajorCollecting() && state() == COMPACT; }
|
||||
#else
|
||||
bool isHeapCompacting() { return false; }
|
||||
#endif
|
||||
|
||||
bool triggerGC(JS::gcreason::Reason reason);
|
||||
void maybeAllocTriggerZoneGC(Zone *zone, const AutoLockGC &lock);
|
||||
|
@ -437,11 +430,9 @@ class GCRuntime
|
|||
void disableGenerationalGC();
|
||||
void enableGenerationalGC();
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void disableCompactingGC();
|
||||
void enableCompactingGC();
|
||||
bool isCompactingGCEnabled();
|
||||
#endif
|
||||
|
||||
void setGrayRootsTracer(JSTraceDataOp traceOp, void *data);
|
||||
bool addBlackRootsTracer(JSTraceDataOp traceOp, void *data);
|
||||
|
@ -604,7 +595,6 @@ class GCRuntime
|
|||
void assertBackgroundSweepingFinished();
|
||||
bool shouldCompact();
|
||||
bool compactPhase(bool lastGC);
|
||||
#ifdef JSGC_COMPACTING
|
||||
void sweepTypesAfterCompacting(Zone *zone);
|
||||
void sweepZoneAfterCompacting(Zone *zone);
|
||||
ArenaHeader *relocateArenas();
|
||||
|
@ -616,7 +606,6 @@ class GCRuntime
|
|||
#ifdef DEBUG
|
||||
void protectRelocatedArenas(ArenaHeader *relocatedList);
|
||||
void unprotectRelocatedArenas(ArenaHeader *relocatedList);
|
||||
#endif
|
||||
#endif
|
||||
void finishCollection();
|
||||
|
||||
|
@ -811,13 +800,11 @@ class GCRuntime
|
|||
*/
|
||||
unsigned generationalDisabled;
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
/*
|
||||
* Some code cannot tolerate compacting GC so it can be disabled with this
|
||||
* counter.
|
||||
*/
|
||||
unsigned compactingDisabled;
|
||||
#endif
|
||||
|
||||
/*
|
||||
* This is true if we are in the middle of a brain transplant (e.g.,
|
||||
|
@ -918,9 +905,7 @@ class GCRuntime
|
|||
|
||||
size_t noGCOrAllocationCheck;
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
ArenaHeader* relocatedArenasToRelease;
|
||||
#endif
|
||||
|
||||
#endif
|
||||
|
||||
|
|
|
@ -642,10 +642,8 @@ struct ArenaHeader
|
|||
|
||||
void unmarkAll();
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
size_t countUsedCells();
|
||||
size_t countFreeCells();
|
||||
#endif
|
||||
};
|
||||
static_assert(ArenaZoneOffset == offsetof(ArenaHeader, zone),
|
||||
"The hardcoded API zone offset must match the actual offset.");
|
||||
|
|
|
@ -161,18 +161,14 @@ CheckMarkedThing(JSTracer *trc, T **thingp)
|
|||
T *thing = *thingp;
|
||||
MOZ_ASSERT(*thingp);
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
thing = MaybeForwarded(thing);
|
||||
#endif
|
||||
|
||||
/* This function uses data that's not available in the nursery. */
|
||||
if (IsInsideNursery(thing))
|
||||
return;
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc) && !Nursery::IsMinorCollectionTracer(trc),
|
||||
!IsForwarded(*thingp));
|
||||
#endif
|
||||
|
||||
/*
|
||||
* Permanent atoms are not associated with this runtime, but will be ignored
|
||||
|
@ -184,13 +180,8 @@ CheckMarkedThing(JSTracer *trc, T **thingp)
|
|||
Zone *zone = thing->zoneFromAnyThread();
|
||||
JSRuntime *rt = trc->runtime();
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc), CurrentThreadCanAccessZone(zone));
|
||||
MOZ_ASSERT_IF(!MovingTracer::IsMovingTracer(trc), CurrentThreadCanAccessRuntime(rt));
|
||||
#else
|
||||
MOZ_ASSERT(CurrentThreadCanAccessZone(zone));
|
||||
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
|
||||
#endif
|
||||
|
||||
MOZ_ASSERT(zone->runtimeFromAnyThread() == trc->runtime());
|
||||
MOZ_ASSERT(trc->hasTracingDetails());
|
||||
|
@ -437,10 +428,8 @@ IsMarkedFromAnyThread(T **thingp)
|
|||
Zone *zone = (*thingp)->asTenured().zoneFromAnyThread();
|
||||
if (!zone->isCollectingFromAnyThread() || zone->isGCFinished())
|
||||
return true;
|
||||
#ifdef JSGC_COMPACTING
|
||||
if (zone->isGCCompacting() && IsForwarded(*thingp))
|
||||
*thingp = Forwarded(*thingp);
|
||||
#endif
|
||||
return (*thingp)->asTenured().isMarked();
|
||||
}
|
||||
|
||||
|
@ -481,12 +470,10 @@ IsAboutToBeFinalizedFromAnyThread(T **thingp)
|
|||
return false;
|
||||
return !thing->asTenured().isMarked();
|
||||
}
|
||||
#ifdef JSGC_COMPACTING
|
||||
else if (zone->isGCCompacting() && IsForwarded(thing)) {
|
||||
*thingp = Forwarded(thing);
|
||||
return false;
|
||||
}
|
||||
#endif
|
||||
|
||||
return false;
|
||||
}
|
||||
|
@ -504,11 +491,10 @@ UpdateIfRelocated(JSRuntime *rt, T **thingp)
|
|||
return *thingp;
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
Zone *zone = (*thingp)->zone();
|
||||
if (zone->isGCCompacting() && IsForwarded(*thingp))
|
||||
*thingp = Forwarded(*thingp);
|
||||
#endif
|
||||
|
||||
return *thingp;
|
||||
}
|
||||
|
||||
|
|
|
@ -31,9 +31,6 @@
|
|||
/* Define to 1 if SpiderMonkey should use small chunks. */
|
||||
#undef JS_GC_SMALL_CHUNK_SIZE
|
||||
|
||||
/* Define to 1 if SpiderMonkey should use Compacting GC. */
|
||||
#undef JSGC_COMPACTING
|
||||
|
||||
/* Define to 1 if the <endian.h> header is present and
|
||||
useable. See jscpucfg.h. */
|
||||
#undef JS_HAVE_ENDIAN_H
|
||||
|
|
|
@ -66,10 +66,6 @@ checkSize(JS::HandleObject map, uint32_t expected)
|
|||
}
|
||||
END_TEST(testWeakMap_basicOperations)
|
||||
|
||||
// TODO: this test stores object pointers in a private slot which is not marked
|
||||
// and so doesn't work with compacting GC.
|
||||
#ifndef JSGC_COMPACTING
|
||||
|
||||
BEGIN_TEST(testWeakMap_keyDelegates)
|
||||
{
|
||||
JS_SetGCParameter(rt, JSGC_MODE, JSGC_MODE_INCREMENTAL);
|
||||
|
@ -253,5 +249,3 @@ checkSize(JS::HandleObject map, uint32_t expected)
|
|||
return true;
|
||||
}
|
||||
END_TEST(testWeakMap_keyDelegates)
|
||||
|
||||
#endif
|
||||
|
|
|
@ -649,8 +649,6 @@ JSCompartment::sweepCrossCompartmentWrappers()
|
|||
}
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
|
||||
void JSCompartment::fixupAfterMovingGC()
|
||||
{
|
||||
fixupGlobal();
|
||||
|
@ -667,8 +665,6 @@ JSCompartment::fixupGlobal()
|
|||
global_.set(MaybeForwarded(global));
|
||||
}
|
||||
|
||||
#endif // JSGC_COMPACTING
|
||||
|
||||
void
|
||||
JSCompartment::purge()
|
||||
{
|
||||
|
|
|
@ -398,12 +398,10 @@ struct JSCompartment
|
|||
void purge();
|
||||
void clearTables();
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void fixupInitialShapeTable();
|
||||
void fixupNewTypeObjectTable(js::types::NewTypeObjectTable &table);
|
||||
void fixupAfterMovingGC();
|
||||
void fixupGlobal();
|
||||
#endif
|
||||
|
||||
bool hasObjectMetadataCallback() const { return objectMetadataCallback; }
|
||||
void setObjectMetadataCallback(js::ObjectMetadataCallback callback);
|
||||
|
|
|
@ -1112,9 +1112,7 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
|
|||
sliceBudget(SliceBudget::Unlimited),
|
||||
incrementalAllowed(true),
|
||||
generationalDisabled(0),
|
||||
#ifdef JSGC_COMPACTING
|
||||
compactingDisabled(0),
|
||||
#endif
|
||||
manipulatingDeadZones(false),
|
||||
objectsMarkedInDeadZones(0),
|
||||
poked(false),
|
||||
|
@ -1134,9 +1132,7 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
|
|||
#ifdef DEBUG
|
||||
inUnsafeRegion(0),
|
||||
noGCOrAllocationCheck(0),
|
||||
#ifdef JSGC_COMPACTING
|
||||
relocatedArenasToRelease(nullptr),
|
||||
#endif
|
||||
#endif
|
||||
lock(nullptr),
|
||||
lockOwner(nullptr),
|
||||
|
@ -1930,15 +1926,9 @@ ArenaLists::allocateFromArenaInner(JS::Zone *zone, ArenaHeader *aheader, AllocKi
|
|||
bool
|
||||
GCRuntime::shouldCompact()
|
||||
{
|
||||
#ifdef JSGC_COMPACTING
|
||||
return invocationKind == GC_SHRINK && isCompactingGCEnabled();
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
|
||||
void
|
||||
GCRuntime::disableCompactingGC()
|
||||
{
|
||||
|
@ -2715,12 +2705,10 @@ GCRuntime::releaseRelocatedArenasWithoutUnlocking(ArenaHeader *relocatedList, co
|
|||
}
|
||||
}
|
||||
|
||||
#endif // JSGC_COMPACTING
|
||||
|
||||
void
|
||||
GCRuntime::releaseHeldRelocatedArenas()
|
||||
{
|
||||
#if defined(JSGC_COMPACTING) && defined(DEBUG)
|
||||
#ifdef DEBUG
|
||||
// In debug mode we don't release relocated arenas straight away. Instead
|
||||
// we protect them and hold onto them until the next GC sweep phase to catch
|
||||
// any pointers to them that didn't get forwarded.
|
||||
|
@ -5480,9 +5468,6 @@ GCRuntime::endSweepPhase(bool lastGC)
|
|||
bool
|
||||
GCRuntime::compactPhase(bool lastGC)
|
||||
{
|
||||
#ifndef JSGC_COMPACTING
|
||||
MOZ_CRASH();
|
||||
#else
|
||||
gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT);
|
||||
|
||||
if (isIncremental) {
|
||||
|
@ -5547,8 +5532,6 @@ GCRuntime::compactPhase(bool lastGC)
|
|||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
#endif // JSGC_COMPACTING
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -5708,7 +5691,6 @@ GCRuntime::resetIncrementalGC(const char *reason)
|
|||
break;
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
case COMPACT: {
|
||||
{
|
||||
gcstats::AutoPhase ap(stats, gcstats::PHASE_WAIT_BACKGROUND_THREAD);
|
||||
|
@ -5724,7 +5706,6 @@ GCRuntime::resetIncrementalGC(const char *reason)
|
|||
invocationKind = oldInvocationKind;
|
||||
break;
|
||||
}
|
||||
#endif
|
||||
|
||||
default:
|
||||
MOZ_CRASH("Invalid incremental GC state");
|
||||
|
@ -6412,7 +6393,7 @@ GCRuntime::onOutOfMallocMemory(const AutoLockGC &lock)
|
|||
{
|
||||
// Release any relocated arenas we may be holding on to, without releasing
|
||||
// the GC lock.
|
||||
#if defined(JSGC_COMPACTING) && defined(DEBUG)
|
||||
#ifdef DEBUG
|
||||
unprotectRelocatedArenas(relocatedArenasToRelease);
|
||||
releaseRelocatedArenasWithoutUnlocking(relocatedArenasToRelease, lock);
|
||||
relocatedArenasToRelease = nullptr;
|
||||
|
@ -7133,19 +7114,13 @@ JS::IsIncrementalGCEnabled(JSRuntime *rt)
|
|||
JS_PUBLIC_API(void)
|
||||
JS::DisableCompactingGC(JSRuntime *rt)
|
||||
{
|
||||
#ifdef JSGC_COMPACTING
|
||||
rt->gc.disableCompactingGC();
|
||||
#endif
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(bool)
|
||||
JS::IsCompactingGCEnabled(JSRuntime *rt)
|
||||
{
|
||||
#ifdef JSGC_COMPACTING
|
||||
return rt->gc.isCompactingGCEnabled();
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
}
|
||||
|
||||
JS_PUBLIC_API(bool)
|
||||
|
|
|
@ -454,11 +454,9 @@ class ArenaList {
|
|||
return *this;
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
ArenaHeader *removeRemainingArenas(ArenaHeader **arenap, const AutoLockGC &lock);
|
||||
ArenaHeader *pickArenasToRelocate(JSRuntime *runtime);
|
||||
ArenaHeader *relocateArenas(ArenaHeader *toRelocate, ArenaHeader *relocated);
|
||||
#endif
|
||||
};
|
||||
|
||||
/*
|
||||
|
@ -785,9 +783,7 @@ class ArenaLists
|
|||
MOZ_ASSERT(freeLists[kind].isEmpty());
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
ArenaHeader *relocateArenas(ArenaHeader *relocatedList);
|
||||
#endif
|
||||
|
||||
void queueForegroundObjectsForSweep(FreeOp *fop);
|
||||
void queueForegroundThingsForSweep(FreeOp *fop);
|
||||
|
@ -1270,9 +1266,7 @@ inline void
|
|||
CheckGCThingAfterMovingGC(T *t)
|
||||
{
|
||||
MOZ_ASSERT_IF(t, !IsInsideNursery(t));
|
||||
#ifdef JSGC_COMPACTING
|
||||
MOZ_ASSERT_IF(t, !IsForwarded(t));
|
||||
#endif
|
||||
}
|
||||
|
||||
inline void
|
||||
|
@ -1429,16 +1423,11 @@ struct AutoDisableProxyCheck
|
|||
|
||||
struct AutoDisableCompactingGC
|
||||
{
|
||||
#ifdef JSGC_COMPACTING
|
||||
explicit AutoDisableCompactingGC(JSRuntime *rt);
|
||||
~AutoDisableCompactingGC();
|
||||
|
||||
private:
|
||||
gc::GCRuntime &gc;
|
||||
#else
|
||||
explicit AutoDisableCompactingGC(JSRuntime *rt) {}
|
||||
~AutoDisableCompactingGC() {}
|
||||
#endif
|
||||
};
|
||||
|
||||
void
|
||||
|
|
|
@ -4891,8 +4891,6 @@ JSCompartment::sweepNewTypeObjectTable(NewTypeObjectTable &table)
|
|||
}
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
|
||||
void
|
||||
JSCompartment::fixupNewTypeObjectTable(NewTypeObjectTable &table)
|
||||
{
|
||||
|
@ -4966,8 +4964,6 @@ TypeObject::fixupAfterMovingGC()
|
|||
}
|
||||
}
|
||||
|
||||
#endif // JSGC_COMPACTING
|
||||
|
||||
#ifdef JSGC_HASH_TABLE_CHECKS
|
||||
|
||||
void
|
||||
|
|
|
@ -925,10 +925,7 @@ class TypeNewScript
|
|||
|
||||
void trace(JSTracer *trc);
|
||||
void sweep();
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void fixupAfterMovingGC();
|
||||
#endif
|
||||
|
||||
void registerNewObject(PlainObject *res);
|
||||
void unregisterNewObject(PlainObject *res);
|
||||
|
@ -1241,9 +1238,7 @@ struct TypeObject : public gc::TenuredCell
|
|||
flags_ |= generation << OBJECT_FLAG_GENERATION_SHIFT;
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void fixupAfterMovingGC();
|
||||
#endif
|
||||
|
||||
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
|
||||
|
||||
|
|
|
@ -235,8 +235,6 @@ Shape::finalize(FreeOp *fop)
|
|||
fop->delete_(kids.toHash());
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
|
||||
void
|
||||
Shape::fixupDictionaryShapeAfterMovingGC()
|
||||
{
|
||||
|
@ -322,8 +320,6 @@ Shape::fixupAfterMovingGC()
|
|||
fixupShapeTreeAfterMovingGC();
|
||||
}
|
||||
|
||||
#endif // JSGC_COMPACTING
|
||||
|
||||
void
|
||||
ShapeGetterSetterRef::mark(JSTracer *trc)
|
||||
{
|
||||
|
|
|
@ -20,8 +20,7 @@
|
|||
|
||||
#include "js/TypeDecls.h"
|
||||
|
||||
#if (defined(JS_GC_ZEAL)) || \
|
||||
(defined(JSGC_COMPACTING) && defined(DEBUG))
|
||||
#if (defined(JS_GC_ZEAL)) || defined(DEBUG)
|
||||
# define JSGC_HASH_TABLE_CHECKS
|
||||
#endif
|
||||
|
||||
|
|
|
@ -224,14 +224,12 @@ GetShapeAttributes(JSObject *obj, Shape *shape)
|
|||
return shape->attributes();
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
inline void
|
||||
BaseShape::fixupAfterMovingGC()
|
||||
{
|
||||
if (hasTable())
|
||||
table().fixupAfterMovingGC();
|
||||
}
|
||||
#endif
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
|
|
|
@ -256,7 +256,6 @@ ShapeTable::search(jsid id, bool adding)
|
|||
MOZ_CRASH("Shape::search failed to find an expected entry.");
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void
|
||||
ShapeTable::fixupAfterMovingGC()
|
||||
{
|
||||
|
@ -268,7 +267,6 @@ ShapeTable::fixupAfterMovingGC()
|
|||
entry.setPreservingCollision(Forwarded(shape));
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
bool
|
||||
ShapeTable::change(int log2Delta, ExclusiveContext *cx)
|
||||
|
@ -1693,7 +1691,6 @@ JSCompartment::sweepInitialShapeTable()
|
|||
}
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void
|
||||
JSCompartment::fixupInitialShapeTable()
|
||||
{
|
||||
|
@ -1732,7 +1729,6 @@ JSCompartment::fixupInitialShapeTable()
|
|||
}
|
||||
}
|
||||
}
|
||||
#endif // JSGC_COMPACTING
|
||||
|
||||
void
|
||||
AutoRooterGetterSetter::Inner::trace(JSTracer *trc)
|
||||
|
|
|
@ -227,10 +227,8 @@ class ShapeTable {
|
|||
bool change(int log2Delta, ExclusiveContext *cx);
|
||||
Entry &search(jsid id, bool adding);
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
/* Update entries whose shapes have been moved */
|
||||
void fixupAfterMovingGC();
|
||||
#endif
|
||||
|
||||
private:
|
||||
Entry &getEntry(uint32_t i) const {
|
||||
|
@ -530,9 +528,7 @@ class BaseShape : public gc::TenuredCell
|
|||
gc::MarkObject(trc, &metadata, "metadata");
|
||||
}
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void fixupAfterMovingGC();
|
||||
#endif
|
||||
|
||||
private:
|
||||
static void staticAsserts() {
|
||||
|
@ -1061,9 +1057,7 @@ class Shape : public gc::TenuredCell
|
|||
inline Shape *search(ExclusiveContext *cx, jsid id);
|
||||
inline Shape *searchLinear(jsid id);
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void fixupAfterMovingGC();
|
||||
#endif
|
||||
|
||||
/* For JIT usage */
|
||||
static inline size_t offsetOfBase() { return offsetof(Shape, base_); }
|
||||
|
@ -1071,10 +1065,8 @@ class Shape : public gc::TenuredCell
|
|||
static inline uint32_t fixedSlotsMask() { return FIXED_SLOTS_MASK; }
|
||||
|
||||
private:
|
||||
#ifdef JSGC_COMPACTING
|
||||
void fixupDictionaryShapeAfterMovingGC();
|
||||
void fixupShapeTreeAfterMovingGC();
|
||||
#endif
|
||||
|
||||
static void staticAsserts() {
|
||||
JS_STATIC_ASSERT(offsetof(Shape, base_) == offsetof(js::shadow::Shape, base));
|
||||
|
|
Загрузка…
Ссылка в новой задаче