Bug 1010655 - Always use the faster version of IsInsideNursery when possible; r=jonco

--HG--
extra : rebase_source : 18c195a3a5897529275deb210bfa7f57f8e29bd8
This commit is contained in:
Terrence Cole 2014-05-14 19:48:09 -07:00
Родитель 7c3190bd62
Коммит 9900a0509a
41 изменённых файлов: 223 добавлений и 177 удалений

Просмотреть файл

@ -465,7 +465,7 @@ ExposeGCThingToActiveJS(void *thing, JSGCTraceKind kind)
* All live objects in the nursery are moved to tenured at the beginning of
* each GC slice, so the gray marker never sees nursery things.
*/
if (js::gc::IsInsideNursery(rt, thing))
if (js::gc::IsInsideNursery((js::gc::Cell *)thing))
return;
#endif
if (IsIncrementalBarrierNeededOnGCThing(rt, thing, kind))
@ -498,7 +498,7 @@ MarkGCThingAsLive(JSRuntime *rt_, void *thing, JSGCTraceKind kind)
/*
* Any object in the nursery will not be freed during any GC running at that time.
*/
if (js::gc::IsInsideNursery(rt, thing))
if (js::gc::IsInsideNursery((js::gc::Cell *)thing))
return;
#endif
if (IsIncrementalBarrierNeededOnGCThing(rt, thing, kind))

Просмотреть файл

@ -61,14 +61,65 @@ static const uint32_t GRAY = 1;
const uintptr_t ChunkLocationNursery = 0;
const uintptr_t ChunkLocationTenuredHeap = 1;
#ifdef JS_DEBUG
/* When downcasting, ensure we are actually the right type. */
extern JS_FRIEND_API(void)
AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind);
#else
inline void
AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind) {}
#endif
} /* namespace gc */
} /* namespace js */
namespace JS {
struct Zone;
} /* namespace JS */
namespace JS {
/*
* We cannot expose the class hierarchy: the implementation is hidden. Instead
* we provide cast functions with strong debug-mode assertions.
*/
static MOZ_ALWAYS_INLINE js::gc::Cell *
AsCell(JSObject *obj)
{
js::gc::Cell *cell = reinterpret_cast<js::gc::Cell *>(obj);
AssertGCThingHasType(cell, JSTRACE_OBJECT);
return cell;
}
static MOZ_ALWAYS_INLINE js::gc::Cell *
AsCell(JSFunction *fun)
{
js::gc::Cell *cell = reinterpret_cast<js::gc::Cell *>(fun);
AssertGCThingHasType(cell, JSTRACE_OBJECT);
return cell;
}
static MOZ_ALWAYS_INLINE js::gc::Cell *
AsCell(JSString *str)
{
js::gc::Cell *cell = reinterpret_cast<js::gc::Cell *>(str);
AssertGCThingHasType(cell, JSTRACE_STRING);
return cell;
}
static MOZ_ALWAYS_INLINE js::gc::Cell *
AsCell(JSFlatString *flat)
{
js::gc::Cell *cell = reinterpret_cast<js::gc::Cell *>(flat);
AssertGCThingHasType(cell, JSTRACE_STRING);
return cell;
}
static MOZ_ALWAYS_INLINE js::gc::Cell *
AsCell(JSScript *script)
{
js::gc::Cell *cell = reinterpret_cast<js::gc::Cell *>(script);
AssertGCThingHasType(cell, JSTRACE_SCRIPT);
return cell;
}
namespace shadow {
struct ArenaHeader
@ -164,16 +215,6 @@ GetGCThingArena(void *thing)
return reinterpret_cast<JS::shadow::ArenaHeader *>(addr);
}
MOZ_ALWAYS_INLINE bool
IsInsideNursery(const JS::shadow::Runtime *runtime, const void *p)
{
#ifdef JSGC_GENERATIONAL
return uintptr_t(p) >= runtime->gcNurseryStart_ && uintptr_t(p) < runtime->gcNurseryEnd_;
#else
return false;
#endif
}
MOZ_ALWAYS_INLINE bool
IsInsideNursery(const js::gc::Cell *cell)
{
@ -220,8 +261,7 @@ GCThingIsMarkedGray(void *thing)
* All live objects in the nursery are moved to tenured at the beginning of
* each GC slice, so the gray marker never sees nursery things.
*/
JS::shadow::Runtime *rt = js::gc::GetGCThingRuntime(thing);
if (js::gc::IsInsideNursery(rt, thing))
if (js::gc::IsInsideNursery((js::gc::Cell *)thing))
return false;
#endif
uintptr_t *word, mask;

Просмотреть файл

@ -117,7 +117,7 @@ OBJECT_TO_JSID(JSObject *obj)
jsid id;
MOZ_ASSERT(obj != nullptr);
MOZ_ASSERT(((size_t)obj & JSID_TYPE_MASK) == 0);
JS_ASSERT(!js::gc::IsInsideNursery(js::gc::GetGCThingRuntime(obj), obj));
JS_ASSERT(!js::gc::IsInsideNursery(JS::AsCell(obj)));
JSID_BITS(id) = ((size_t)obj | JSID_TYPE_OBJECT);
return id;
}

Просмотреть файл

@ -1089,8 +1089,7 @@ bool
Cell::isTenured() const
{
#ifdef JSGC_GENERATIONAL
JS::shadow::Runtime *rt = js::gc::GetGCThingRuntime(this);
return !IsInsideNursery(rt, this);
return !IsInsideNursery(this);
#endif
return true;
}

Просмотреть файл

@ -172,7 +172,7 @@ CheckMarkedThing(JSTracer *trc, T **thingp)
#ifdef DEBUG
/* This function uses data that's not available in the nursery. */
if (IsInsideNursery(trc->runtime(), thing))
if (IsInsideNursery(thing))
return;
/*
@ -235,7 +235,7 @@ MarkInternal(JSTracer *trc, T **thingp)
* not needed in this case because we perform a minor collection before
* each incremental slice.
*/
if (IsInsideNursery(trc->runtime(), thing))
if (IsInsideNursery(thing))
return;
/*
@ -357,9 +357,10 @@ IsMarked(T **thingp)
JS_ASSERT(thingp);
JS_ASSERT(*thingp);
#ifdef JSGC_GENERATIONAL
Nursery &nursery = (*thingp)->runtimeFromMainThread()->gc.nursery;
if (nursery.isInside(*thingp))
if (IsInsideNursery(*thingp)) {
Nursery &nursery = (*thingp)->runtimeFromMainThread()->gc.nursery;
return nursery.getForwardedPointer(thingp);
}
#endif
Zone *zone = (*thingp)->tenuredZone();
if (!zone->isCollecting() || zone->isGCFinished())
@ -383,9 +384,9 @@ IsAboutToBeFinalized(T **thingp)
#ifdef JSGC_GENERATIONAL
Nursery &nursery = rt->gc.nursery;
JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !nursery.isInside(thing));
JS_ASSERT_IF(!rt->isHeapMinorCollecting(), !IsInsideNursery(thing));
if (rt->isHeapMinorCollecting()) {
if (nursery.isInside(thing))
if (IsInsideNursery(thing))
return !nursery.getForwardedPointer(thingp);
return false;
}
@ -412,7 +413,7 @@ UpdateIfRelocated(JSRuntime *rt, T **thingp)
{
JS_ASSERT(thingp);
#ifdef JSGC_GENERATIONAL
if (*thingp && rt->isHeapMinorCollecting() && rt->gc.nursery.isInside(*thingp))
if (*thingp && rt->isHeapMinorCollecting() && IsInsideNursery(*thingp))
rt->gc.nursery.getForwardedPointer(thingp);
#endif
return *thingp;
@ -786,7 +787,7 @@ ShouldMarkCrossCompartment(JSTracer *trc, JSObject *src, Cell *cell)
uint32_t color = AsGCMarker(trc)->getMarkColor();
JS_ASSERT(color == BLACK || color == GRAY);
if (IsInsideNursery(trc->runtime(), cell)) {
if (IsInsideNursery(cell)) {
JS_ASSERT(color == BLACK);
return false;
}
@ -876,7 +877,7 @@ static void
PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
JS_ASSERT(!IsInsideNursery(thing));
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushObject(thing);
@ -892,11 +893,11 @@ PushMarkStack(GCMarker *gcmarker, ObjectImpl *thing)
static void
MaybePushMarkStackBetweenSlices(GCMarker *gcmarker, JSObject *thing)
{
JSRuntime *rt = gcmarker->runtime();
DebugOnly<JSRuntime *> rt = gcmarker->runtime();
JS_COMPARTMENT_ASSERT(rt, thing);
JS_ASSERT_IF(rt->isHeapBusy(), !IsInsideNursery(rt, thing));
JS_ASSERT_IF(rt->isHeapBusy(), !IsInsideNursery(thing));
if (!IsInsideNursery(rt, thing) && thing->markIfUnmarked(gcmarker->getMarkColor()))
if (!IsInsideNursery(thing) && thing->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushObject(thing);
}
@ -904,7 +905,7 @@ static void
PushMarkStack(GCMarker *gcmarker, JSFunction *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
JS_ASSERT(!IsInsideNursery(thing));
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushObject(thing);
@ -914,7 +915,7 @@ static void
PushMarkStack(GCMarker *gcmarker, types::TypeObject *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
JS_ASSERT(!IsInsideNursery(thing));
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushType(thing);
@ -924,7 +925,7 @@ static void
PushMarkStack(GCMarker *gcmarker, JSScript *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
JS_ASSERT(!IsInsideNursery(thing));
/*
* We mark scripts directly rather than pushing on the stack as they can
@ -939,7 +940,7 @@ static void
PushMarkStack(GCMarker *gcmarker, LazyScript *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
JS_ASSERT(!IsInsideNursery(thing));
/*
* We mark lazy scripts directly rather than pushing on the stack as they
@ -956,7 +957,7 @@ static void
PushMarkStack(GCMarker *gcmarker, Shape *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
JS_ASSERT(!IsInsideNursery(thing));
/* We mark shapes directly rather than pushing on the stack. */
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
@ -967,7 +968,7 @@ static void
PushMarkStack(GCMarker *gcmarker, jit::JitCode *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
JS_ASSERT(!IsInsideNursery(thing));
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
gcmarker->pushJitCode(thing);
@ -980,7 +981,7 @@ static void
PushMarkStack(GCMarker *gcmarker, BaseShape *thing)
{
JS_COMPARTMENT_ASSERT(gcmarker->runtime(), thing);
JS_ASSERT(!IsInsideNursery(gcmarker->runtime(), thing));
JS_ASSERT(!IsInsideNursery(thing));
/* We mark base shapes directly rather than pushing on the stack. */
if (thing->markIfUnmarked(gcmarker->getMarkColor()))
@ -1762,7 +1763,7 @@ UnmarkGrayChildren(JSTracer *trc, void **thingp, JSGCTraceKind kind)
}
UnmarkGrayTracer *tracer = static_cast<UnmarkGrayTracer *>(trc);
if (!IsInsideNursery(trc->runtime(), thing)) {
if (!IsInsideNursery(static_cast<Cell *>(thing))) {
if (!JS::GCThingIsMarkedGray(thing))
return;
@ -1809,7 +1810,7 @@ JS::UnmarkGrayGCThingRecursively(void *thing, JSGCTraceKind kind)
JSRuntime *rt = static_cast<Cell *>(thing)->runtimeFromMainThread();
bool unmarkedArg = false;
if (!IsInsideNursery(rt, thing)) {
if (!IsInsideNursery(static_cast<Cell *>(thing))) {
if (!JS::GCThingIsMarkedGray(thing))
return false;

Просмотреть файл

@ -72,7 +72,7 @@ MOZ_ALWAYS_INLINE bool
js::Nursery::getForwardedPointer(T **ref)
{
JS_ASSERT(ref);
JS_ASSERT(isInside(*ref));
JS_ASSERT(isInside((void *)*ref));
const gc::RelocationOverlay *overlay = reinterpret_cast<const gc::RelocationOverlay *>(*ref);
if (!overlay->isForwarded())
return false;

Просмотреть файл

@ -48,8 +48,6 @@ static int64_t GCReportThreshold = INT64_MAX;
bool
js::Nursery::init()
{
JS_ASSERT(start() == 0);
if (!hugeSlots.init())
return false;
@ -57,10 +55,9 @@ js::Nursery::init()
if (!heap)
return false;
JSRuntime *rt = runtime();
rt->gcNurseryStart_ = uintptr_t(heap);
heapStart_ = uintptr_t(heap);
currentStart_ = start();
rt->gcNurseryEnd_ = chunk(LastNurseryChunk).end();
heapEnd_ = chunk(LastNurseryChunk).end();
numActiveChunks_ = 1;
JS_POISON(heap, JS_FRESH_NURSERY_PATTERN, NurserySize);
setCurrentChunk(0);
@ -195,7 +192,7 @@ js::Nursery::allocateSlots(JSContext *cx, JSObject *obj, uint32_t nslots)
JS_ASSERT(obj);
JS_ASSERT(nslots > 0);
if (!isInside(obj))
if (!IsInsideNursery(obj))
return cx->pod_malloc<HeapSlot>(nslots);
if (nslots > MaxNurserySlots)
@ -223,7 +220,7 @@ js::Nursery::reallocateSlots(JSContext *cx, JSObject *obj, HeapSlot *oldSlots,
size_t oldSize = oldCount * sizeof(HeapSlot);
size_t newSize = newCount * sizeof(HeapSlot);
if (!isInside(obj))
if (!IsInsideNursery(obj))
return static_cast<HeapSlot *>(cx->realloc_(oldSlots, oldSize, newSize));
if (!isInside(oldSlots)) {
@ -275,7 +272,7 @@ js::Nursery::allocateHugeSlots(JSContext *cx, size_t nslots)
void
js::Nursery::notifyInitialSlots(Cell *cell, HeapSlot *slots)
{
if (isInside(cell) && !isInside(slots)) {
if (IsInsideNursery(cell) && !isInside(slots)) {
/* If this put fails, we will only leak the slots. */
(void)hugeSlots.put(slots);
}
@ -367,7 +364,7 @@ GetObjectAllocKindForCopy(JSRuntime *rt, JSObject *obj)
JS_ASSERT(obj->numFixedSlots() == 0);
/* Use minimal size object if we are just going to copy the pointer. */
if (!IsInsideNursery(rt, (void *)obj->getElementsHeader()))
if (!rt->gc.nursery.isInside(obj->getElementsHeader()))
return FINALIZE_OBJECT0_BACKGROUND;
size_t nelements = obj->getDenseCapacity();
@ -534,7 +531,7 @@ js::Nursery::markSlot(MinorCollectionTracer *trc, HeapSlot *slotp)
return;
JSObject *obj = &slotp->toObject();
if (!isInside(obj))
if (!IsInsideNursery(obj))
return;
if (getForwardedPointer(&obj)) {
@ -687,7 +684,7 @@ ShouldMoveToTenured(MinorCollectionTracer *trc, void **thingp)
{
Cell *cell = static_cast<Cell *>(*thingp);
Nursery &nursery = *trc->nursery;
return !nursery.isInside(thingp) && nursery.isInside(cell) &&
return !nursery.isInside(thingp) && IsInsideNursery(cell) &&
!nursery.getForwardedPointer(thingp);
}

Просмотреть файл

@ -61,6 +61,8 @@ class Nursery
explicit Nursery(JSRuntime *rt)
: runtime_(rt),
position_(0),
heapStart_(0),
heapEnd_(0),
currentStart_(0),
currentEnd_(0),
currentChunk_(0),
@ -77,9 +79,13 @@ class Nursery
/* Return true if no allocations have been made since the last collection. */
bool isEmpty() const;
template <typename T>
MOZ_ALWAYS_INLINE bool isInside(const T *p) const {
return gc::IsInsideNursery((JS::shadow::Runtime *)runtime_, p);
/*
* Check whether an arbitrary pointer is within the nursery. This is
* slower than IsInsideNursery(Cell*), but works on all types of pointers.
*/
MOZ_ALWAYS_INLINE bool isInside(gc::Cell *cellp) const MOZ_DELETE;
MOZ_ALWAYS_INLINE bool isInside(const void *p) const {
return uintptr_t(p) >= heapStart_ && uintptr_t(p) < heapEnd_;
}
/*
@ -142,13 +148,11 @@ class Nursery
}
MOZ_ALWAYS_INLINE uintptr_t start() const {
JS_ASSERT(runtime_);
return ((JS::shadow::Runtime *)runtime_)->gcNurseryStart_;
return heapStart_;
}
MOZ_ALWAYS_INLINE uintptr_t heapEnd() const {
JS_ASSERT(runtime_);
return ((JS::shadow::Runtime *)runtime_)->gcNurseryEnd_;
return heapEnd_;
}
#ifdef JS_GC_ZEAL
@ -183,6 +187,10 @@ class Nursery
/* Pointer to the first unallocated byte in the nursery. */
uintptr_t position_;
/* Pointer to first and last address of the total nursery allocation. */
uintptr_t heapStart_;
uintptr_t heapEnd_;
/* Pointer to the logical start of the Nursery. */
uintptr_t currentStart_;

Просмотреть файл

@ -26,7 +26,7 @@ StoreBuffer::SlotsEdge::mark(JSTracer *trc)
{
JSObject *obj = object();
if (trc->runtime()->gc.nursery.isInside(obj))
if (IsInsideNursery(obj))
return;
if (!obj->isNative()) {

Просмотреть файл

@ -244,7 +244,7 @@ class StoreBuffer
bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
bool maybeInRememberedSet(const Nursery &nursery) const {
JS_ASSERT(nursery.isInside(*edge));
JS_ASSERT(IsInsideNursery(*edge));
return !nursery.isInside(edge);
}
@ -265,10 +265,10 @@ class StoreBuffer
bool operator==(const ValueEdge &other) const { return edge == other.edge; }
bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
void *deref() const { return edge->isGCThing() ? edge->toGCThing() : nullptr; }
Cell *deref() const { return edge->isGCThing() ? static_cast<Cell *>(edge->toGCThing()) : nullptr; }
bool maybeInRememberedSet(const Nursery &nursery) const {
JS_ASSERT(nursery.isInside(deref()));
JS_ASSERT(IsInsideNursery(deref()));
return !nursery.isInside(edge);
}
@ -313,8 +313,8 @@ class StoreBuffer
return !(*this == other);
}
bool maybeInRememberedSet(const Nursery &nursery) const {
return !nursery.isInside(object());
bool maybeInRememberedSet(const Nursery &) const {
return !IsInsideNursery(JS::AsCell(object()));
}
void mark(JSTracer *trc);
@ -337,7 +337,7 @@ class StoreBuffer
bool operator==(const WholeCellEdges &other) const { return edge == other.edge; }
bool operator!=(const WholeCellEdges &other) const { return edge != other.edge; }
bool maybeInRememberedSet(const Nursery &nursery) const { return true; }
bool maybeInRememberedSet(const Nursery &) const { return true; }
static bool supportsDeduplication() { return true; }
void *deduplicationKey() const { return (void *)edge; }

Просмотреть файл

@ -120,7 +120,7 @@ AccumulateEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
{
VerifyPreTracer *trc = (VerifyPreTracer *)jstrc;
JS_ASSERT(!IsInsideNursery(trc->runtime(), *(uintptr_t **)thingp));
JS_ASSERT(!IsInsideNursery(*reinterpret_cast<Cell **>(thingp)));
trc->edgeptr += sizeof(EdgeValue);
if (trc->edgeptr >= trc->term) {
@ -433,7 +433,7 @@ PostVerifierCollectStoreBufferEdges(JSTracer *jstrc, void **thingp, JSGCTraceKin
/* The store buffer may store extra, non-cross-generational edges. */
JSObject *dst = *reinterpret_cast<JSObject **>(thingp);
if (trc->runtime()->gc.nursery.isInside(thingp) || !trc->runtime()->gc.nursery.isInside(dst))
if (trc->runtime()->gc.nursery.isInside(thingp) || !IsInsideNursery(dst))
return;
/*
@ -471,7 +471,7 @@ PostVerifierVisitEdge(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
/* Filter out non cross-generational edges. */
JS_ASSERT(!trc->runtime()->gc.nursery.isInside(thingp));
JSObject *dst = *reinterpret_cast<JSObject **>(thingp);
if (!trc->runtime()->gc.nursery.isInside(dst))
if (!IsInsideNursery(dst))
return;
/*

Просмотреть файл

@ -3402,7 +3402,7 @@ IsCacheableGetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *sh
#ifdef JSGC_GENERATIONAL
// Information from get prop call ICs may be used directly from Ion code,
// and should not be nursery allocated.
if (cx->runtime()->gc.nursery.isInside(holder) || cx->runtime()->gc.nursery.isInside(func))
if (IsInsideNursery(holder) || IsInsideNursery(func))
return false;
#endif
@ -3521,7 +3521,7 @@ IsCacheableSetPropCall(JSContext *cx, JSObject *obj, JSObject *holder, Shape *sh
#ifdef JSGC_GENERATIONAL
// Information from set prop call ICs may be used directly from Ion code,
// and should not be nursery allocated.
if (cx->runtime()->gc.nursery.isInside(holder) || cx->runtime()->gc.nursery.isInside(func))
if (IsInsideNursery(holder) || IsInsideNursery(func))
return false;
#endif

Просмотреть файл

@ -1946,8 +1946,7 @@ CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO *lir)
if (lir->object()->isConstant()) {
#ifdef DEBUG
const Nursery &nursery = GetIonContext()->runtime->gcNursery();
JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
JS_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
#endif
} else {
masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->object()), temp,
@ -1973,8 +1972,7 @@ CodeGenerator::visitPostWriteBarrierV(LPostWriteBarrierV *lir)
if (lir->object()->isConstant()) {
#ifdef DEBUG
const Nursery &nursery = GetIonContext()->runtime->gcNursery();
JS_ASSERT(!nursery.isInside(&lir->object()->toConstant()->toObject()));
JS_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
#endif
} else {
masm.branchPtrInNurseryRange(Assembler::Equal, ToRegister(lir->object()), temp,

Просмотреть файл

@ -150,7 +150,7 @@ CompileRuntime::positiveInfinityValue()
bool
CompileRuntime::isInsideNursery(gc::Cell *cell)
{
return UninlinedIsInsideNursery(runtime(), cell);
return UninlinedIsInsideNursery(cell);
}
#endif

Просмотреть файл

@ -7440,7 +7440,12 @@ IonBuilder::getTypedArrayElements(MDefinition *obj)
void *data = tarr->viewData();
// Bug 979449 - Optimistically embed the elements and use TI to
// invalidate if we move them.
if (!gc::IsInsideNursery(tarr->runtimeFromMainThread(), data)) {
#ifdef JSGC_GENERATIONAL
bool isTenured = !tarr->runtimeFromMainThread()->gc.nursery.isInside(data);
#else
bool isTenured = true;
#endif
if (isTenured) {
// The 'data' pointer can change in rare circumstances
// (ArrayBufferObject::changeContents).
types::TypeObjectKey *tarrType = types::TypeObjectKey::get(tarr);
@ -7747,8 +7752,10 @@ IonBuilder::setElemTryTypedStatic(bool *emitted, MDefinition *object,
TypedArrayObject *tarr = &tarrObj->as<TypedArrayObject>();
if (gc::IsInsideNursery(tarr->runtimeFromMainThread(), tarr->viewData()))
#ifdef JSGC_GENERATIONAL
if (tarr->runtimeFromMainThread()->gc.nursery.isInside(tarr->viewData()))
return true;
#endif
ArrayBufferView::ViewType viewType = (ArrayBufferView::ViewType) tarr->type();

Просмотреть файл

@ -248,7 +248,7 @@ MacroAssembler::branchNurseryPtr(Condition cond, const Address &ptr1, ImmMaybeNu
Label *label)
{
#ifdef JSGC_GENERATIONAL
if (ptr2.value && gc::IsInsideNursery(GetIonContext()->cx->runtime(), (void *)ptr2.value))
if (ptr2.value && gc::IsInsideNursery(ptr2.value))
embedsNurseryPointers_ = true;
#endif
branchPtr(cond, ptr1, ptr2, label);
@ -258,7 +258,7 @@ void
MacroAssembler::moveNurseryPtr(ImmMaybeNurseryPtr ptr, Register reg)
{
#ifdef JSGC_GENERATIONAL
if (ptr.value && gc::IsInsideNursery(GetIonContext()->cx->runtime(), (void *)ptr.value))
if (ptr.value && gc::IsInsideNursery(ptr.value))
embedsNurseryPointers_ = true;
#endif
movePtr(ptr, reg);

Просмотреть файл

@ -1919,7 +1919,6 @@ IonBuilder::inlineBoundFunction(CallInfo &nativeCallInfo, JSFunction *target)
return InliningStatus_NotInlined;
JSFunction *scriptedTarget = &(target->getBoundFunctionTarget()->as<JSFunction>());
JSRuntime *runtime = scriptedTarget->runtimeFromMainThread();
// Don't optimize if we're constructing and the callee is not a
// constructor, so that CallKnown does not have to handle this case
@ -1930,17 +1929,17 @@ IonBuilder::inlineBoundFunction(CallInfo &nativeCallInfo, JSFunction *target)
return InliningStatus_NotInlined;
}
if (gc::IsInsideNursery(runtime, scriptedTarget))
if (gc::IsInsideNursery(scriptedTarget))
return InliningStatus_NotInlined;
for (size_t i = 0; i < target->getBoundFunctionArgumentCount(); i++) {
const Value val = target->getBoundFunctionArgument(i);
if (val.isObject() && gc::IsInsideNursery(runtime, &val.toObject()))
if (val.isObject() && gc::IsInsideNursery(&val.toObject()))
return InliningStatus_NotInlined;
}
const Value thisVal = target->getBoundFunctionThis();
if (thisVal.isObject() && gc::IsInsideNursery(runtime, &thisVal.toObject()))
if (thisVal.isObject() && gc::IsInsideNursery(&thisVal.toObject()))
return InliningStatus_NotInlined;
size_t argc = target->getBoundFunctionArgumentCount() + nativeCallInfo.argc();

Просмотреть файл

@ -703,7 +703,7 @@ FilterArgumentsOrEval(JSContext *cx, JSString *str)
void
PostWriteBarrier(JSRuntime *rt, JSObject *obj)
{
JS_ASSERT(!IsInsideNursery(rt, obj));
JS_ASSERT(!IsInsideNursery(obj));
rt->gc.storeBuffer.putWholeCellFromMainThread(obj);
}

Просмотреть файл

@ -490,7 +490,7 @@ MacroAssemblerARM::ma_mov(ImmGCPtr ptr, Register dest)
else
rs = L_LDR;
ma_movPatchable(Imm32(ptr.value), dest, Always, rs);
ma_movPatchable(Imm32(uintptr_t(ptr.value)), dest, Always, rs);
}
// Shifts (just a move with a shifting op2)

Просмотреть файл

@ -181,9 +181,9 @@ struct PatchedImmPtr {
// Used for immediates which require relocation.
struct ImmGCPtr
{
uintptr_t value;
const gc::Cell *value;
explicit ImmGCPtr(const gc::Cell *ptr) : value(reinterpret_cast<uintptr_t>(ptr))
explicit ImmGCPtr(const gc::Cell *ptr) : value(ptr)
{
JS_ASSERT(!IsPoisonedPtr(ptr));
JS_ASSERT_IF(ptr, ptr->isTenured());
@ -201,7 +201,7 @@ struct ImmMaybeNurseryPtr : public ImmGCPtr
{
explicit ImmMaybeNurseryPtr(gc::Cell *ptr)
{
this->value = reinterpret_cast<uintptr_t>(ptr);
this->value = ptr;
JS_ASSERT(!IsPoisonedPtr(ptr));
// asm.js shouldn't be creating GC things

Просмотреть файл

@ -327,7 +327,7 @@ class Assembler : public AssemblerX86Shared
movq(ImmWord(uintptr_t(imm.value)), dest);
}
void movq(ImmGCPtr ptr, Register dest) {
masm.movq_i64r(ptr.value, dest.code());
masm.movq_i64r(uintptr_t(ptr.value), dest.code());
writeDataRelocation(ptr);
}
void movq(const Operand &src, Register dest) {

Просмотреть файл

@ -186,7 +186,7 @@ class Assembler : public AssemblerX86Shared
// Actual assembly emitting functions.
void push(ImmGCPtr ptr) {
push(Imm32(ptr.value));
push(Imm32(uintptr_t(ptr.value)));
writeDataRelocation(ptr);
}
void push(const ImmWord imm) {
@ -219,21 +219,21 @@ class Assembler : public AssemblerX86Shared
}
void movl(ImmGCPtr ptr, Register dest) {
masm.movl_i32r(ptr.value, dest.code());
masm.movl_i32r(uintptr_t(ptr.value), dest.code());
writeDataRelocation(ptr);
}
void movl(ImmGCPtr ptr, const Operand &dest) {
switch (dest.kind()) {
case Operand::REG:
masm.movl_i32r(ptr.value, dest.reg());
masm.movl_i32r(uintptr_t(ptr.value), dest.reg());
writeDataRelocation(ptr);
break;
case Operand::MEM_REG_DISP:
masm.movl_i32m(ptr.value, dest.disp(), dest.base());
masm.movl_i32m(uintptr_t(ptr.value), dest.disp(), dest.base());
writeDataRelocation(ptr);
break;
case Operand::MEM_SCALE:
masm.movl_i32m(ptr.value, dest.disp(), dest.base(), dest.index(), dest.scale());
masm.movl_i32m(uintptr_t(ptr.value), dest.disp(), dest.base(), dest.index(), dest.scale());
writeDataRelocation(ptr);
break;
default:
@ -315,7 +315,7 @@ class Assembler : public AssemblerX86Shared
cmpl(src, ImmWord(uintptr_t(imm.value)));
}
void cmpl(const Register src, ImmGCPtr ptr) {
masm.cmpl_ir(ptr.value, src.code());
masm.cmpl_ir(uintptr_t(ptr.value), src.code());
writeDataRelocation(ptr);
}
void cmpl(Register lhs, Register rhs) {
@ -324,15 +324,15 @@ class Assembler : public AssemblerX86Shared
void cmpl(const Operand &op, ImmGCPtr imm) {
switch (op.kind()) {
case Operand::REG:
masm.cmpl_ir_force32(imm.value, op.reg());
masm.cmpl_ir_force32(uintptr_t(imm.value), op.reg());
writeDataRelocation(imm);
break;
case Operand::MEM_REG_DISP:
masm.cmpl_im_force32(imm.value, op.disp(), op.base());
masm.cmpl_im_force32(uintptr_t(imm.value), op.disp(), op.base());
writeDataRelocation(imm);
break;
case Operand::MEM_ADDRESS32:
masm.cmpl_im(imm.value, op.address());
masm.cmpl_im(uintptr_t(imm.value), op.address());
writeDataRelocation(imm);
break;
default:

Просмотреть файл

@ -26,9 +26,9 @@ BEGIN_TEST(testGCStoreBufferRemoval)
// Sanity check - objects start in the nursery and then become tenured.
JS_GC(cx->runtime());
JS::RootedObject obj(cx, NurseryObject());
CHECK(js::gc::IsInsideNursery(rt, obj.get()));
CHECK(js::gc::IsInsideNursery(obj.get()));
JS_GC(cx->runtime());
CHECK(!js::gc::IsInsideNursery(rt, obj.get()));
CHECK(!js::gc::IsInsideNursery(obj.get()));
JS::RootedObject tenuredObject(cx, obj);
// Hide the horrors herein from the static rooting analysis.

Просмотреть файл

@ -7,31 +7,28 @@
#include "jsapi-tests/tests.h"
#ifdef JSGC_GENERATIONAL
BEGIN_TEST(testIsInsideNursery)
{
/* Non-GC things are never inside the nursery. */
CHECK(!js::gc::IsInsideNursery(rt, rt));
CHECK(!js::gc::IsInsideNursery(rt, nullptr));
CHECK(!js::gc::IsInsideNursery(nullptr));
CHECK(!rt->gc.nursery.isInside(rt));
CHECK(!rt->gc.nursery.isInside((void *)nullptr));
JS_GC(rt);
JS::RootedObject object(cx, JS_NewObject(cx, nullptr, JS::NullPtr(), JS::NullPtr()));
#ifdef JSGC_GENERATIONAL
/* Objects are initially allocated in the nursery. */
CHECK(js::gc::IsInsideNursery(rt, object));
CHECK(js::gc::IsInsideNursery(object));
#else
CHECK(!js::gc::IsInsideNursery(rt, object));
CHECK(!js::gc::IsInsideNursery(object));
#endif
JS_GC(rt);
CHECK(!js::gc::IsInsideNursery(rt, object));
/* And are tenured if still live after a GC. */
CHECK(!js::gc::IsInsideNursery(object));
return true;
}
END_TEST(testIsInsideNursery)
#endif

Просмотреть файл

@ -225,12 +225,11 @@ JSCompartment::checkWrapperMapAfterMovingGC()
* wrapperMap that points into the nursery, and that the hash table entries
* are discoverable.
*/
JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromMainThread());
for (WrapperMap::Enum e(crossCompartmentWrappers); !e.empty(); e.popFront()) {
CrossCompartmentKey key = e.front().key();
JS_ASSERT(!IsInsideNursery(rt, key.debugger));
JS_ASSERT(!IsInsideNursery(rt, key.wrapped));
JS_ASSERT(!IsInsideNursery(rt, e.front().value().get().toGCThing()));
JS_ASSERT(!IsInsideNursery(key.debugger));
JS_ASSERT(!IsInsideNursery(key.wrapped));
JS_ASSERT(!IsInsideNursery(static_cast<Cell *>(e.front().value().get().toGCThing())));
WrapperMap::Ptr ptr = crossCompartmentWrappers.lookup(key);
JS_ASSERT(ptr.found() && &*ptr == &e.front());
@ -253,10 +252,9 @@ JSCompartment::putWrapper(JSContext *cx, const CrossCompartmentKey &wrapped, con
#ifdef JSGC_GENERATIONAL
/* There's no point allocating wrappers in the nursery since we will tenure them anyway. */
Nursery &nursery = cx->nursery();
JS_ASSERT(!nursery.isInside(wrapper.toGCThing()));
JS_ASSERT(!IsInsideNursery(static_cast<gc::Cell *>(wrapper.toGCThing())));
if (success && (nursery.isInside(wrapped.wrapped) || nursery.isInside(wrapped.debugger))) {
if (success && (IsInsideNursery(wrapped.wrapped) || IsInsideNursery(wrapped.debugger))) {
WrapperMapRef ref(&crossCompartmentWrappers, wrapped);
cx->runtime()->gc.storeBuffer.putGeneric(ref);
}

Просмотреть файл

@ -658,11 +658,10 @@ js::GCThingTraceKind(void *thing)
JS_FRIEND_API(void)
js::VisitGrayWrapperTargets(Zone *zone, GCThingCallback callback, void *closure)
{
JSRuntime *rt = zone->runtimeFromMainThread();
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next()) {
for (JSCompartment::WrapperEnum e(comp); !e.empty(); e.popFront()) {
gc::Cell *thing = e.front().key().wrapped;
if (!IsInsideNursery(rt, thing) && thing->isMarked(gc::GRAY))
if (!IsInsideNursery(thing) && thing->isMarked(gc::GRAY))
callback(closure, thing);
}
}
@ -785,7 +784,7 @@ DumpHeapVisitCell(JSRuntime *rt, void *data, void *thing,
static void
DumpHeapVisitChild(JSTracer *trc, void **thingp, JSGCTraceKind kind)
{
if (gc::IsInsideNursery(trc->runtime(), *thingp))
if (gc::IsInsideNursery((js::gc::Cell *)*thingp))
return;
DumpHeapTracer *dtrc = static_cast<DumpHeapTracer *>(trc);
@ -797,7 +796,7 @@ DumpHeapVisitChild(JSTracer *trc, void **thingp, JSGCTraceKind kind)
static void
DumpHeapVisitRoot(JSTracer *trc, void **thingp, JSGCTraceKind kind)
{
if (gc::IsInsideNursery(trc->runtime(), *thingp))
if (gc::IsInsideNursery((js::gc::Cell *)*thingp))
return;
DumpHeapTracer *dtrc = static_cast<DumpHeapTracer *>(trc);
@ -1228,7 +1227,7 @@ JS_StoreObjectPostBarrierCallback(JSContext* cx,
JSObject *key, void *data)
{
JSRuntime *rt = cx->runtime();
if (IsInsideNursery(rt, key))
if (IsInsideNursery(key))
rt->gc.storeBuffer.putCallback(callback, key, data);
}
@ -1238,7 +1237,7 @@ JS_StoreStringPostBarrierCallback(JSContext* cx,
JSString *key, void *data)
{
JSRuntime *rt = cx->runtime();
if (IsInsideNursery(rt, key))
if (IsInsideNursery(key))
rt->gc.storeBuffer.putCallback(callback, key, data);
}
#endif /* JSGC_GENERATIONAL */

Просмотреть файл

@ -5549,9 +5549,9 @@ AutoSuppressGC::AutoSuppressGC(JSRuntime *rt)
}
bool
js::UninlinedIsInsideNursery(JSRuntime *rt, const void *thing)
js::UninlinedIsInsideNursery(const gc::Cell *cell)
{
return IsInsideNursery(rt, thing);
return IsInsideNursery(cell);
}
#ifdef DEBUG
@ -5570,6 +5570,18 @@ JS::AssertGCThingMustBeTenured(JSObject *obj)
obj->isTenured());
}
JS_FRIEND_API(void)
js::gc::AssertGCThingHasType(js::gc::Cell *cell, JSGCTraceKind kind)
{
#ifdef DEBUG
JS_ASSERT(cell);
if (IsInsideNursery(cell))
JS_ASSERT(kind == JSTRACE_OBJECT);
else
JS_ASSERT(MapAllocToTraceKind(cell->tenuredGetAllocKind()) == kind);
#endif
}
JS_FRIEND_API(size_t)
JS::GetGCNumber()
{

Просмотреть файл

@ -1280,7 +1280,7 @@ PurgeJITCaches(JS::Zone *zone);
// This is the same as IsInsideNursery, but not inlined.
bool
UninlinedIsInsideNursery(JSRuntime *rt, const void *thing);
UninlinedIsInsideNursery(const gc::Cell *cell);
} /* namespace js */

Просмотреть файл

@ -56,7 +56,7 @@ ThreadSafeContext::isThreadLocal(T thing) const
if (!isForkJoinContext())
return true;
if (!IsInsideNursery(runtime_, thing) &&
if (!IsInsideNursery(thing) &&
allocator_->arenas.containsArena(runtime_, thing->arenaHeader()))
{
// GC should be suppressed in preparation for mutating thread local
@ -97,7 +97,7 @@ GetGCThingTraceKind(const void *thing)
JS_ASSERT(thing);
const Cell *cell = static_cast<const Cell *>(thing);
#ifdef JSGC_GENERATIONAL
if (IsInsideNursery(cell->runtimeFromAnyThread(), cell))
if (IsInsideNursery(cell))
return JSTRACE_OBJECT;
#endif
return MapAllocToTraceKind(cell->tenuredGetAllocKind());

Просмотреть файл

@ -2030,7 +2030,7 @@ TypeCompartment::newTypeObject(ExclusiveContext *cx, const Class *clasp, Handle<
JS_ASSERT_IF(proto.isObject(), cx->isInsideCurrentCompartment(proto.toObject()));
if (cx->isJSContext()) {
if (proto.isObject() && IsInsideNursery(cx->asJSContext()->runtime(), proto.toObject()))
if (proto.isObject() && IsInsideNursery(proto.toObject()))
initialFlags |= OBJECT_FLAG_NURSERY_PROTO;
}
@ -2754,7 +2754,7 @@ TypeObject::setProto(JSContext *cx, TaggedProto proto)
{
JS_ASSERT(singleton());
if (proto.isObject() && IsInsideNursery(cx->runtime(), proto.toObject()))
if (proto.isObject() && IsInsideNursery(proto.toObject()))
addFlags(OBJECT_FLAG_NURSERY_PROTO);
setProtoUnchecked(proto);
@ -3916,7 +3916,7 @@ ExclusiveContext::getNewType(const Class *clasp, TaggedProto proto, JSFunction *
return nullptr;
#ifdef JSGC_GENERATIONAL
if (proto.isObject() && hasNursery() && nursery().isInside(proto.toObject())) {
if (proto.isObject() && hasNursery() && IsInsideNursery(proto.toObject())) {
asJSContext()->runtime()->gc.storeBuffer.putGeneric(
NewTypeObjectsSetRef(&newTypeObjects, clasp, proto.toObject(), fun));
}
@ -3967,12 +3967,11 @@ JSCompartment::checkNewTypeObjectTableAfterMovingGC()
* newTypeObjects that points into the nursery, and that the hash table
* entries are discoverable.
*/
JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromMainThread());
for (TypeObjectWithNewScriptSet::Enum e(newTypeObjects); !e.empty(); e.popFront()) {
TypeObjectWithNewScriptEntry entry = e.front();
JS_ASSERT(!IsInsideNursery(rt, entry.newFunction));
JS_ASSERT(!IsInsideNursery(entry.newFunction));
TaggedProto proto = entry.object->proto();
JS_ASSERT_IF(proto.isObject(), !IsInsideNursery(rt, proto.toObject()));
JS_ASSERT_IF(proto.isObject(), !IsInsideNursery(proto.toObject()));
TypeObjectWithNewScriptEntry::Lookup
lookup(entry.object->clasp(), proto, entry.newFunction);
TypeObjectWithNewScriptSet::Ptr ptr = newTypeObjects.lookup(lookup);
@ -4573,7 +4572,7 @@ TypeObject::addTypedObjectAddendum(JSContext *cx, Handle<TypeDescr*> descr)
// Type descriptors are always pre-tenured. This is both because
// we expect them to live a long time and so that they can be
// safely accessed during ion compilation.
JS_ASSERT(!IsInsideNursery(cx->runtime(), descr));
JS_ASSERT(!IsInsideNursery(descr));
JS_ASSERT(descr);
if (flags() & OBJECT_FLAG_ADDENDUM_CLEARED)

Просмотреть файл

@ -21,6 +21,7 @@
#include "gc/Barrier.h"
#include "gc/Marking.h"
#include "js/GCAPI.h"
#include "js/HeapAPI.h"
#include "vm/ObjectImpl.h"
#include "vm/Shape.h"
#include "vm/Xdr.h"
@ -681,12 +682,11 @@ class JSObject : public js::ObjectImpl
*/
JS_ASSERT(dstStart + count <= getDenseCapacity());
#if defined(DEBUG) && defined(JSGC_GENERATIONAL)
JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromAnyThread());
JS_ASSERT(!js::gc::IsInsideNursery(rt, this));
JS_ASSERT(!js::gc::IsInsideNursery(this));
for (uint32_t index = 0; index < count; ++index) {
const JS::Value& value = src[index];
if (value.isMarkable())
JS_ASSERT(!js::gc::IsInsideNursery(rt, value.toGCThing()));
JS_ASSERT(!js::gc::IsInsideNursery(static_cast<js::gc::Cell *>(value.toGCThing())));
}
#endif
memcpy(&elements[dstStart], src, count * sizeof(js::HeapSlot));

Просмотреть файл

@ -355,8 +355,7 @@ JSObject::getDenseOrTypedArrayElement(uint32_t idx)
/* static */ inline bool
JSObject::setSingletonType(js::ExclusiveContext *cx, js::HandleObject obj)
{
JS_ASSERT_IF(cx->isJSContext(),
!IsInsideNursery(cx->asJSContext()->runtime(), obj.get()));
JS_ASSERT_IF(cx->isJSContext(), !IsInsideNursery(obj));
js::types::TypeObject *type = cx->getSingletonType(obj->getClass(), obj->getTaggedProto());
if (!type)

Просмотреть файл

@ -175,10 +175,6 @@ struct Runtime
bool needsBarrier_;
#ifdef JSGC_GENERATIONAL
/* Allow inlining of Nursery::isInside. */
uintptr_t gcNurseryStart_;
uintptr_t gcNurseryEnd_;
private:
js::gc::StoreBuffer *gcStoreBufferPtr_;
#endif
@ -191,8 +187,6 @@ struct Runtime
)
: needsBarrier_(false)
#ifdef JSGC_GENERATIONAL
, gcNurseryStart_(0)
, gcNurseryEnd_(0)
, gcStoreBufferPtr_(storeBuffer)
#endif
{}

Просмотреть файл

@ -360,7 +360,7 @@ WeakMapPostWriteBarrier(JSRuntime *rt, ObjectValueMap *weakMap, JSObject *key)
UnbarrieredMap *unbarrieredMap = reinterpret_cast<UnbarrieredMap *>(baseHashMap);
typedef HashKeyRef<UnbarrieredMap, JSObject *> Ref;
if (key && IsInsideNursery(rt, key))
if (key && IsInsideNursery(key))
rt->gc.storeBuffer.putGeneric(Ref((unbarrieredMap), key));
#endif
}

Просмотреть файл

@ -193,7 +193,7 @@ bool Wrapper::finalizeInBackground(Value priv)
* If the wrapped object is in the nursery then we know it doesn't have a
* finalizer, and so background finalization is ok.
*/
if (IsInsideNursery(priv.toObject().runtimeFromMainThread(), &priv.toObject()))
if (IsInsideNursery(&priv.toObject()))
return true;
return IsBackgroundFinalized(priv.toObject().tenuredGetAllocKind());
}

Просмотреть файл

@ -656,7 +656,7 @@ ArrayBufferObject::create(JSContext *cx, uint32_t nbytes, void *data /* = nullpt
JS_ASSERT(obj->getClass() == &class_);
JS_ASSERT(!gc::IsInsideNursery(cx->runtime(), obj));
JS_ASSERT(!gc::IsInsideNursery(obj));
if (data) {
obj->initialize(nbytes, data, OwnsData);

Просмотреть файл

@ -275,7 +275,7 @@ PostBarrierTypedArrayObject(JSObject *obj)
#ifdef JSGC_GENERATIONAL
JS_ASSERT(obj);
JSRuntime *rt = obj->runtimeFromMainThread();
if (!rt->isHeapBusy() && !IsInsideNursery(rt, obj))
if (!rt->isHeapBusy() && !IsInsideNursery(JS::AsCell(obj)))
rt->gc.storeBuffer.putWholeCellFromMainThread(obj);
#endif
}

Просмотреть файл

@ -468,16 +468,18 @@ JSRuntime::~JSRuntime()
void
NewObjectCache::clearNurseryObjects(JSRuntime *rt)
{
#ifdef JSGC_GENERATIONAL
for (unsigned i = 0; i < mozilla::ArrayLength(entries); ++i) {
Entry &e = entries[i];
JSObject *obj = reinterpret_cast<JSObject *>(&e.templateObject);
if (IsInsideNursery(rt, e.key) ||
IsInsideNursery(rt, obj->slots) ||
IsInsideNursery(rt, obj->elements))
if (IsInsideNursery(e.key) ||
rt->gc.nursery.isInside(obj->slots) ||
rt->gc.nursery.isInside(obj->elements))
{
PodZero(&e);
}
}
#endif
}
void

Просмотреть файл

@ -1683,7 +1683,7 @@ DebugScopes::proxiedScopesPostWriteBarrier(JSRuntime *rt, ObjectWeakMap *map,
UnbarrieredMap *unbarrieredMap = reinterpret_cast<UnbarrieredMap *>(baseHashMap);
typedef gc::HashKeyRef<UnbarrieredMap, JSObject *> Ref;
if (key && IsInsideNursery(rt, key))
if (key && IsInsideNursery(key))
rt->gc.storeBuffer.putGeneric(Ref(unbarrieredMap, key.get()));
#endif
}
@ -1714,7 +1714,7 @@ DebugScopes::missingScopesPostWriteBarrier(JSRuntime *rt, MissingScopeMap *map,
const ScopeIterKey &key)
{
#ifdef JSGC_GENERATIONAL
if (key.enclosingScope() && IsInsideNursery(rt, key.enclosingScope()))
if (key.enclosingScope() && IsInsideNursery(key.enclosingScope()))
rt->gc.storeBuffer.putGeneric(MissingScopesRef(map, key));
#endif
}
@ -1730,7 +1730,7 @@ DebugScopes::liveScopesPostWriteBarrier(JSRuntime *rt, LiveScopeMap *map, ScopeO
DefaultHasher<ScopeObject *>,
RuntimeAllocPolicy> UnbarrieredLiveScopeMap;
typedef gc::HashKeyRef<UnbarrieredLiveScopeMap, ScopeObject *> Ref;
if (key && IsInsideNursery(rt, key))
if (key && IsInsideNursery(key))
rt->gc.storeBuffer.putGeneric(Ref(reinterpret_cast<UnbarrieredLiveScopeMap *>(map), key));
#endif
}
@ -1820,20 +1820,19 @@ DebugScopes::checkHashTablesAfterMovingGC(JSRuntime *runtime)
* postbarriers have worked and that no hashtable keys (or values) are left
* pointing into the nursery.
*/
JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtime);
for (ObjectWeakMap::Range r = proxiedScopes.all(); !r.empty(); r.popFront()) {
JS_ASSERT(!IsInsideNursery(rt, r.front().key().get()));
JS_ASSERT(!IsInsideNursery(rt, r.front().value().get()));
JS_ASSERT(!IsInsideNursery(r.front().key().get()));
JS_ASSERT(!IsInsideNursery(r.front().value().get()));
}
for (MissingScopeMap::Range r = missingScopes.all(); !r.empty(); r.popFront()) {
JS_ASSERT(!IsInsideNursery(rt, r.front().key().cur()));
JS_ASSERT(!IsInsideNursery(rt, r.front().key().staticScope()));
JS_ASSERT(!IsInsideNursery(rt, r.front().value().get()));
JS_ASSERT(!IsInsideNursery(r.front().key().cur()));
JS_ASSERT(!IsInsideNursery(r.front().key().staticScope()));
JS_ASSERT(!IsInsideNursery(r.front().value().get()));
}
for (LiveScopeMap::Range r = liveScopes.all(); !r.empty(); r.popFront()) {
JS_ASSERT(!IsInsideNursery(rt, r.front().key()));
JS_ASSERT(!IsInsideNursery(rt, r.front().value().cur_.get()));
JS_ASSERT(!IsInsideNursery(rt, r.front().value().staticScope_.get()));
JS_ASSERT(!IsInsideNursery(r.front().key()));
JS_ASSERT(!IsInsideNursery(r.front().value().cur_.get()));
JS_ASSERT(!IsInsideNursery(r.front().value().staticScope_.get()));
}
}
#endif

Просмотреть файл

@ -1667,15 +1667,14 @@ JSCompartment::checkInitialShapesTableAfterMovingGC()
* initialShapes that points into the nursery, and that the hash table
* entries are discoverable.
*/
JS::shadow::Runtime *rt = JS::shadow::Runtime::asShadowRuntime(runtimeFromMainThread());
for (InitialShapeSet::Enum e(initialShapes); !e.empty(); e.popFront()) {
InitialShapeEntry entry = e.front();
TaggedProto proto = entry.proto;
Shape *shape = entry.shape.get();
JS_ASSERT_IF(proto.isObject(), !IsInsideNursery(rt, proto.toObject()));
JS_ASSERT(!IsInsideNursery(rt, shape->getObjectParent()));
JS_ASSERT(!IsInsideNursery(rt, shape->getObjectMetadata()));
JS_ASSERT_IF(proto.isObject(), !IsInsideNursery(proto.toObject()));
JS_ASSERT_IF(shape->getObjectParent(), !IsInsideNursery(shape->getObjectParent()));
JS_ASSERT_IF(shape->getObjectMetadata(), !IsInsideNursery(shape->getObjectMetadata()));
InitialShapeEntry::Lookup lookup(shape->getObjectClass(),
proto,
@ -1730,9 +1729,9 @@ EmptyShape::getInitialShape(ExclusiveContext *cx, const Class *clasp, TaggedProt
#ifdef JSGC_GENERATIONAL
if (cx->hasNursery()) {
if ((protoRoot.isObject() && cx->nursery().isInside(protoRoot.toObject())) ||
cx->nursery().isInside(parentRoot.get()) ||
cx->nursery().isInside(metadataRoot.get()))
if ((protoRoot.isObject() && IsInsideNursery(protoRoot.toObject())) ||
IsInsideNursery(parentRoot.get()) ||
IsInsideNursery(metadataRoot.get()))
{
InitialShapeSetRef ref(
&table, clasp, protoRoot, parentRoot, metadataRoot, nfixed, objectFlags);

Просмотреть файл

@ -54,13 +54,12 @@ RunTest(JSRuntime* rt, JSContext* cx, ArrayT* array)
*/
RootedValue value(cx);
const char* property = "foo";
JS::shadow::Runtime* srt = reinterpret_cast<JS::shadow::Runtime*>(rt);
for (size_t i = 0; i < ElementCount; ++i) {
RootedObject obj(cx, JS_NewObject(cx, nullptr, JS::NullPtr(), JS::NullPtr()));
#ifdef JSGC_GENERATIONAL
ASSERT_TRUE(js::gc::IsInsideNursery(srt, obj));
ASSERT_TRUE(js::gc::IsInsideNursery(AsCell(obj)));
#else
ASSERT_FALSE(js::gc::IsInsideNursery(srt, obj));
ASSERT_FALSE(js::gc::IsInsideNursery(AsCell(obj)));
#endif
value = Int32Value(i);
ASSERT_TRUE(JS_SetProperty(cx, obj, property, value));
@ -78,7 +77,7 @@ RunTest(JSRuntime* rt, JSContext* cx, ArrayT* array)
*/
for (size_t i = 0; i < ElementCount; ++i) {
RootedObject obj(cx, array->ElementAt(i));
ASSERT_FALSE(js::gc::IsInsideNursery(srt, obj));
ASSERT_FALSE(js::gc::IsInsideNursery(AsCell(obj)));
ASSERT_TRUE(JS_GetProperty(cx, obj, property, &value));
ASSERT_TRUE(value.isInt32());
ASSERT_EQ(static_cast<int32_t>(i), value.toInt32());