Backed out 20 changesets (bug 903519) for detected memory leaks on a CLOSED TREE

Backed out changeset 3f72f8747e29 (bug 903519)
Backed out changeset bb2cc298a155 (bug 903519)
Backed out changeset cc56f32ddae8 (bug 903519)
Backed out changeset ec5b307a28aa (bug 903519)
Backed out changeset 38f4e0426bdd (bug 903519)
Backed out changeset 71831e232df2 (bug 903519)
Backed out changeset 6f3666e9540e (bug 903519)
Backed out changeset c62e5867d763 (bug 903519)
Backed out changeset 7854bfe5d683 (bug 903519)
Backed out changeset f5f72c93adf9 (bug 903519)
Backed out changeset 7d56db668369 (bug 903519)
Backed out changeset 7c96258a6459 (bug 903519)
Backed out changeset 11b3f0fda4ad (bug 903519)
Backed out changeset 2bc9d427f427 (bug 903519)
Backed out changeset fdb6431ea4ff (bug 903519)
Backed out changeset 6d7d15b25489 (bug 903519)
Backed out changeset 457008b194a8 (bug 903519)
Backed out changeset 80b9d97bf1fe (bug 903519)
Backed out changeset 1fc5ee0d0116 (bug 903519)
Backed out changeset 9316d8f7b92a (bug 903519)
This commit is contained in:
Noemi Erli 2018-01-11 22:18:23 +02:00
Родитель 7661c6a4ed
Коммит cea0d65a57
81 изменённых файлов: 479 добавлений и 1813 удалений

Просмотреть файл

@ -24,12 +24,6 @@ namespace gc {
struct Cell;
/*
* The low bit is set so this should never equal a normal pointer, and the high
* bit is set so this should never equal the upper 32 bits of a 64-bit pointer.
*/
const uint32_t Relocated = uintptr_t(0xbad0bad1);
const size_t ArenaShift = 12;
const size_t ArenaSize = size_t(1) << ArenaShift;
const size_t ArenaMask = ArenaSize - 1;
@ -67,7 +61,7 @@ const size_t ChunkMarkBitmapBits = 129024;
const size_t ChunkRuntimeOffset = ChunkSize - sizeof(void*);
const size_t ChunkTrailerSize = 2 * sizeof(uintptr_t) + sizeof(uint64_t);
const size_t ChunkLocationOffset = ChunkSize - ChunkTrailerSize;
const size_t ChunkStoreBufferOffset = ChunkSize - ChunkTrailerSize + sizeof(uint64_t);
const size_t ChunkStoreBufferOffset = ChunkLocationOffset + sizeof(uint64_t);
const size_t ArenaZoneOffset = sizeof(size_t);
const size_t ArenaHeaderSize = sizeof(size_t) + 2 * sizeof(uintptr_t) +
sizeof(size_t) + sizeof(uintptr_t);
@ -466,15 +460,10 @@ GetTenuredGCThingZone(GCCellPtr thing)
return js::gc::detail::GetGCThingZone(thing.unsafeAsUIntPtr());
}
extern JS_PUBLIC_API(Zone*)
GetNurseryStringZone(JSString* str);
static MOZ_ALWAYS_INLINE Zone*
GetStringZone(JSString* str)
{
if (!js::gc::IsInsideNursery(reinterpret_cast<js::gc::Cell*>(str)))
return js::gc::detail::GetGCThingZone(reinterpret_cast<uintptr_t>(str));
return GetNurseryStringZone(str);
return js::gc::detail::GetGCThingZone(uintptr_t(str));
}
extern JS_PUBLIC_API(Zone*)
@ -494,12 +483,6 @@ GCThingIsMarkedGray(GCCellPtr thing)
extern JS_PUBLIC_API(JS::TraceKind)
GCThingTraceKind(void* thing);
extern JS_PUBLIC_API(void)
EnableNurseryStrings(JSContext* cx);
extern JS_PUBLIC_API(void)
DisableNurseryStrings(JSContext* cx);
/*
* Returns true when writes to GC thing pointers (and reads from weak pointers)
* must call an incremental barrier. This is generally only true when running

Просмотреть файл

@ -199,7 +199,6 @@ template <typename T> class PersistentRooted;
JS_FRIEND_API(bool) isGCEnabled();
JS_FRIEND_API(void) HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next);
JS_FRIEND_API(void) HeapStringPostBarrier(JSString** objp, JSString* prev, JSString* next);
#ifdef JS_DEBUG
/**
@ -209,12 +208,12 @@ JS_FRIEND_API(void) HeapStringPostBarrier(JSString** objp, JSString* prev, JSStr
extern JS_FRIEND_API(void)
AssertGCThingMustBeTenured(JSObject* obj);
extern JS_FRIEND_API(void)
AssertGCThingIsNotNurseryAllocable(js::gc::Cell* cell);
AssertGCThingIsNotAnObjectSubclass(js::gc::Cell* cell);
#else
inline void
AssertGCThingMustBeTenured(JSObject* obj) {}
inline void
AssertGCThingIsNotNurseryAllocable(js::gc::Cell* cell) {}
AssertGCThingIsNotAnObjectSubclass(js::gc::Cell* cell) {}
#endif
/**
@ -625,7 +624,7 @@ struct BarrierMethods<T*>
}
static void postBarrier(T** vp, T* prev, T* next) {
if (next)
JS::AssertGCThingIsNotNurseryAllocable(reinterpret_cast<js::gc::Cell*>(next));
JS::AssertGCThingIsNotAnObjectSubclass(reinterpret_cast<js::gc::Cell*>(next));
}
static void exposeToJS(T* t) {
if (t)
@ -673,21 +672,6 @@ struct BarrierMethods<JSFunction*>
}
};
template <>
struct BarrierMethods<JSString*>
{
static JSString* initial() { return nullptr; }
static gc::Cell* asGCThingOrNull(JSString* v) {
if (!v)
return nullptr;
MOZ_ASSERT(uintptr_t(v) > 32);
return reinterpret_cast<gc::Cell*>(v);
}
static void postBarrier(JSString** vp, JSString* prev, JSString* next) {
JS::HeapStringPostBarrier(vp, prev, next);
}
};
// Provide hash codes for Cell kinds that may be relocated and, thus, not have
// a stable address to use as the base for a hash code. Instead of the address,
// this hasher uses Cell::getUniqueId to provide exact matches and as a base

Просмотреть файл

@ -51,7 +51,7 @@ namespace gc {
D(OBJECT16, Object, JSObject, JSObject_Slots16, false, false) \
D(OBJECT16_BACKGROUND, Object, JSObject, JSObject_Slots16, true, true)
#define FOR_EACH_NONOBJECT_NONNURSERY_ALLOCKIND(D) \
#define FOR_EACH_NONOBJECT_ALLOCKIND(D) \
/* AllocKind TraceKind TypeName SizedType BGFinal Nursery */ \
D(SCRIPT, Script, JSScript, JSScript, false, false) \
D(LAZY_SCRIPT, LazyScript, js::LazyScript, js::LazyScript, true, false) \
@ -59,6 +59,8 @@ namespace gc {
D(ACCESSOR_SHAPE, Shape, js::AccessorShape, js::AccessorShape, true, false) \
D(BASE_SHAPE, BaseShape, js::BaseShape, js::BaseShape, true, false) \
D(OBJECT_GROUP, ObjectGroup, js::ObjectGroup, js::ObjectGroup, true, false) \
D(FAT_INLINE_STRING, String, JSFatInlineString, JSFatInlineString, true, false) \
D(STRING, String, JSString, JSString, true, false) \
D(EXTERNAL_STRING, String, JSExternalString, JSExternalString, true, false) \
D(FAT_INLINE_ATOM, String, js::FatInlineAtom, js::FatInlineAtom, true, false) \
D(ATOM, String, js::NormalAtom, js::NormalAtom, true, false) \
@ -67,15 +69,7 @@ namespace gc {
D(SCOPE, Scope, js::Scope, js::Scope, true, false) \
D(REGEXP_SHARED, RegExpShared, js::RegExpShared, js::RegExpShared, true, false)
#define FOR_EACH_NURSERY_STRING_ALLOCKIND(D) \
D(FAT_INLINE_STRING, String, JSFatInlineString, JSFatInlineString, true, true) \
D(STRING, String, JSString, JSString, true, true)
#define FOR_EACH_NONOBJECT_ALLOCKIND(D) \
FOR_EACH_NONOBJECT_NONNURSERY_ALLOCKIND(D) \
FOR_EACH_NURSERY_STRING_ALLOCKIND(D)
#define FOR_EACH_ALLOCKIND(D) \
#define FOR_EACH_ALLOCKIND(D) \
FOR_EACH_OBJECT_ALLOCKIND(D) \
FOR_EACH_NONOBJECT_ALLOCKIND(D)

Просмотреть файл

@ -75,9 +75,8 @@ template JSObject* js::Allocate<JSObject, CanGC>(JSContext* cx, gc::AllocKind ki
size_t nDynamicSlots, gc::InitialHeap heap,
const Class* clasp);
// Attempt to allocate a new JSObject out of the nursery. If there is not
// enough room in the nursery or there is an OOM, this method will return
// nullptr.
// Attempt to allocate a new GC thing out of the nursery. If there is not enough
// room in the nursery or there is an OOM, this method will return nullptr.
template <AllowGC allowGC>
JSObject*
GCRuntime::tryNewNurseryObject(JSContext* cx, size_t thingSize, size_t nDynamicSlots, const Class* clasp)
@ -128,84 +127,6 @@ GCRuntime::tryNewTenuredObject(JSContext* cx, AllocKind kind, size_t thingSize,
return obj;
}
// Attempt to allocate a new string out of the nursery. If there is not enough
// room in the nursery or there is an OOM, this method will return nullptr.
template <AllowGC allowGC>
JSString*
GCRuntime::tryNewNurseryString(JSContext* cx, size_t thingSize, AllocKind kind)
{
MOZ_ASSERT(IsNurseryAllocable(kind));
MOZ_ASSERT(cx->isNurseryAllocAllowed());
MOZ_ASSERT(!cx->helperThread());
MOZ_ASSERT(!IsAtomsCompartment(cx->compartment()));
Cell* cell = cx->nursery().allocateString(cx, cx->zone(), thingSize, kind);
if (cell)
return static_cast<JSString*>(cell);
if (allowGC && !cx->suppressGC) {
cx->runtime()->gc.minorGC(JS::gcreason::OUT_OF_NURSERY);
// Exceeding gcMaxBytes while tenuring can disable the Nursery.
if (cx->nursery().isEnabled()) {
cell = cx->nursery().allocateString(cx, cx->zone(), thingSize, kind);
MOZ_ASSERT(cell);
return static_cast<JSString*>(cell);
}
}
return nullptr;
}
template <typename StringAllocT, AllowGC allowGC /* = CanGC */>
StringAllocT*
js::AllocateString(JSContext* cx, InitialHeap heap)
{
static_assert(mozilla::IsConvertible<StringAllocT*, JSString*>::value, "must be JSString derived");
AllocKind kind = MapTypeToFinalizeKind<StringAllocT>::kind;
size_t size = sizeof(StringAllocT);
MOZ_ASSERT(size == Arena::thingSize(kind));
MOZ_ASSERT(size == sizeof(JSString) || size == sizeof(JSFatInlineString));
// Off-thread alloc cannot trigger GC or make runtime assertions.
if (cx->helperThread()) {
StringAllocT* str = GCRuntime::tryNewTenuredThing<StringAllocT, NoGC>(cx, kind, size);
if (MOZ_UNLIKELY(allowGC && !str))
ReportOutOfMemory(cx);
return str;
}
JSRuntime* rt = cx->runtime();
if (!rt->gc.checkAllocatorState<allowGC>(cx, kind))
return nullptr;
if (cx->nursery().isEnabled() &&
heap != TenuredHeap &&
cx->nursery().canAllocateStrings() &&
cx->zone()->allocNurseryStrings)
{
auto str = static_cast<StringAllocT*>(rt->gc.tryNewNurseryString<allowGC>(cx, size, kind));
if (str)
return str;
// Our most common non-jit allocation path is NoGC; thus, if we fail the
// alloc and cannot GC, we *must* return nullptr here so that the caller
// will do a CanGC allocation to clear the nursery. Failing to do so will
// cause all allocations on this path to land in Tenured, and we will not
// get the benefit of the nursery.
if (!allowGC)
return nullptr;
}
return GCRuntime::tryNewTenuredThing<StringAllocT, allowGC>(cx, kind, size);
}
#define DECL_ALLOCATOR_INSTANCES(allocKind, traceKind, type, sizedType, bgfinal, nursery) \
template type* js::AllocateString<type, NoGC>(JSContext* cx, InitialHeap heap);\
template type* js::AllocateString<type, CanGC>(JSContext* cx, InitialHeap heap);
FOR_EACH_NURSERY_STRING_ALLOCKIND(DECL_ALLOCATOR_INSTANCES)
#undef DECL_ALLOCATOR_INSTANCES
template <typename T, AllowGC allowGC /* = CanGC */>
T*
js::Allocate(JSContext* cx)
@ -229,7 +150,7 @@ js::Allocate(JSContext* cx)
#define DECL_ALLOCATOR_INSTANCES(allocKind, traceKind, type, sizedType, bgFinal, nursery) \
template type* js::Allocate<type, NoGC>(JSContext* cx);\
template type* js::Allocate<type, CanGC>(JSContext* cx);
FOR_EACH_NONOBJECT_NONNURSERY_ALLOCKIND(DECL_ALLOCATOR_INSTANCES)
FOR_EACH_NONOBJECT_ALLOCKIND(DECL_ALLOCATOR_INSTANCES)
#undef DECL_ALLOCATOR_INSTANCES
template <typename T, AllowGC allowGC>

Просмотреть файл

@ -10,8 +10,6 @@
#include "gc/Heap.h"
#include "js/RootingAPI.h"
class JSFatInlineString;
namespace js {
struct Class;
@ -20,49 +18,19 @@ struct Class;
// fully initialize the thing before calling any function that can potentially
// trigger GC. This will ensure that GC tracing never sees junk values stored
// in the partially initialized thing.
//
// Note that JSObject allocation must use the longer signature below that
// includes slot, heap, and finalizer information in support of various
// object-specific optimizations.
template <typename T, AllowGC allowGC = CanGC>
T*
Allocate(JSContext* cx);
// Use for JSObject. A longer signature that includes additional information in
// support of various optimizations.
template <typename, AllowGC allowGC = CanGC>
JSObject*
Allocate(JSContext* cx, gc::AllocKind kind, size_t nDynamicSlots, gc::InitialHeap heap,
const Class* clasp);
// Internal function used for nursery-allocatable strings.
template <typename StringAllocT, AllowGC allowGC = CanGC>
StringAllocT*
AllocateString(JSContext* cx, gc::InitialHeap heap);
// Use for nursery-allocatable strings. Returns a value cast to the correct
// type.
template <typename StringT, AllowGC allowGC = CanGC>
StringT*
Allocate(JSContext* cx, gc::InitialHeap heap)
{
return static_cast<StringT*>(js::AllocateString<JSString, allowGC>(cx, heap));
}
// Specialization for JSFatInlineString that must use a different allocation
// type. Note that we have to explicitly specialize for both values of AllowGC
// because partial function specialization is not allowed.
template <>
inline JSFatInlineString*
Allocate<JSFatInlineString, CanGC>(JSContext* cx, gc::InitialHeap heap)
{
return static_cast<JSFatInlineString*>(js::AllocateString<JSFatInlineString, CanGC>(cx, heap));
}
template <>
inline JSFatInlineString*
Allocate<JSFatInlineString, NoGC>(JSContext* cx, gc::InitialHeap heap)
{
return static_cast<JSFatInlineString*>(js::AllocateString<JSFatInlineString, NoGC>(cx, heap));
}
} // namespace js
#endif // gc_Allocator_h

Просмотреть файл

@ -43,8 +43,7 @@ AtomMarkingRuntime::inlinedMarkAtom(JSContext* cx, T* thing)
"Should only be called with JSAtom* or JS::Symbol* argument");
MOZ_ASSERT(thing);
js::gc::TenuredCell* cell = &thing->asTenured();
MOZ_ASSERT(cell->zoneFromAnyThread()->isAtomsZone());
MOZ_ASSERT(thing->zoneFromAnyThread()->isAtomsZone());
// The context's zone will be null during initialization of the runtime.
if (!cx->zone())
@ -54,7 +53,7 @@ AtomMarkingRuntime::inlinedMarkAtom(JSContext* cx, T* thing)
if (ThingIsPermanent(thing))
return;
size_t bit = GetAtomBit(cell);
size_t bit = GetAtomBit(thing);
MOZ_ASSERT(bit / JS_BITS_PER_WORD < allocatedWords);
cx->zone()->markedAtoms().setBit(bit);

Просмотреть файл

@ -223,7 +223,7 @@ AtomMarkingRuntime::atomIsMarked(Zone* zone, T* thing)
return true;
}
size_t bit = GetAtomBit(&thing->asTenured());
size_t bit = GetAtomBit(thing);
return zone->markedAtoms().getBit(bit);
}
@ -239,9 +239,7 @@ AtomMarkingRuntime::atomIsMarked(Zone* zone, TenuredCell* thing)
if (thing->is<JSString>()) {
JSString* str = thing->as<JSString>();
if (!str->isAtom())
return true;
return atomIsMarked(zone, &str->asAtom());
return str->isAtom() ? atomIsMarked(zone, &str->asAtom()) : true;
}
if (thing->is<JS::Symbol>())

Просмотреть файл

@ -220,13 +220,6 @@ JS::HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next)
js::InternalBarrierMethods<JSObject*>::postBarrier(objp, prev, next);
}
JS_PUBLIC_API(void)
JS::HeapStringPostBarrier(JSString** strp, JSString* prev, JSString* next)
{
MOZ_ASSERT(strp);
js::InternalBarrierMethods<JSString*>::postBarrier(strp, prev, next);
}
JS_PUBLIC_API(void)
JS::HeapValuePostBarrier(JS::Value* valuep, const Value& prev, const Value& next)
{

Просмотреть файл

@ -289,18 +289,18 @@ struct InternalBarrierMethods<Value>
// If the target needs an entry, add it.
js::gc::StoreBuffer* sb;
if ((next.isObject() || next.isString()) && (sb = next.toGCThing()->storeBuffer())) {
if (next.isObject() && (sb = reinterpret_cast<gc::Cell*>(&next.toObject())->storeBuffer())) {
// If we know that the prev has already inserted an entry, we can
// skip doing the lookup to add the new entry. Note that we cannot
// safely assert the presence of the entry because it may have been
// added via a different store buffer.
if ((prev.isObject() || prev.isString()) && prev.toGCThing()->storeBuffer())
if (prev.isObject() && reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer())
return;
sb->putValue(vp);
return;
}
// Remove the prev entry if the new value does not need it.
if ((prev.isObject() || prev.isString()) && (sb = prev.toGCThing()->storeBuffer()))
if (prev.isObject() && (sb = reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()))
sb->unputValue(vp);
}
@ -687,8 +687,8 @@ class HeapSlot : public WriteBarrieredBase<Value>
#ifdef DEBUG
assertPreconditionForWriteBarrierPost(owner, kind, slot, target);
#endif
if (this->value.isObject() || this->value.isString()) {
gc::Cell* cell = this->value.toGCThing();
if (this->value.isObject()) {
gc::Cell* cell = reinterpret_cast<gc::Cell*>(&this->value.toObject());
if (cell->storeBuffer())
cell->storeBuffer()->putSlot(owner, kind, slot, 1);
}

Просмотреть файл

@ -242,11 +242,7 @@ Cell::storeBuffer() const
inline JS::TraceKind
Cell::getTraceKind() const
{
if (isTenured())
return asTenured().getTraceKind();
if (js::shadow::String::nurseryCellIsString(this))
return JS::TraceKind::String;
return JS::TraceKind::Object;
return isTenured() ? asTenured().getTraceKind() : JS::TraceKind::Object;
}
/* static */ MOZ_ALWAYS_INLINE bool
@ -420,8 +416,7 @@ static MOZ_ALWAYS_INLINE void
AssertValidToSkipBarrier(TenuredCell* thing)
{
MOZ_ASSERT(!IsInsideNursery(thing));
MOZ_ASSERT_IF(thing, MapAllocToTraceKind(thing->getAllocKind()) != JS::TraceKind::Object &&
MapAllocToTraceKind(thing->getAllocKind()) != JS::TraceKind::String);
MOZ_ASSERT_IF(thing, MapAllocToTraceKind(thing->getAllocKind()) != JS::TraceKind::Object);
}
/* static */ MOZ_ALWAYS_INLINE void

Просмотреть файл

@ -247,6 +247,20 @@ IsOOMReason(JS::gcreason::Reason reason)
reason == JS::gcreason::MEM_PRESSURE;
}
inline void
RelocationOverlay::forwardTo(Cell* cell)
{
MOZ_ASSERT(!isForwarded());
// The location of magic_ is important because it must never be valid to see
// the value Relocated there in a GC thing that has not been moved.
static_assert(offsetof(RelocationOverlay, magic_) == offsetof(JSObject, group_) &&
offsetof(RelocationOverlay, magic_) == offsetof(js::Shape, base_) &&
offsetof(RelocationOverlay, magic_) == offsetof(JSString, d.u1.flags),
"RelocationOverlay::magic_ is in the wrong location");
magic_ = Relocated;
newLocation_ = cell;
}
} /* namespace gc */
} /* namespace js */

Просмотреть файл

@ -796,8 +796,6 @@ class GCRuntime
void traceRuntime(JSTracer* trc, AutoLockForExclusiveAccess& lock);
void traceRuntimeForMinorGC(JSTracer* trc, AutoLockForExclusiveAccess& lock);
void purgeRuntimeForMinorGC();
void shrinkBuffers();
void onOutOfMallocMemory();
void onOutOfMallocMemory(const AutoLockGC& lock);
@ -1000,8 +998,6 @@ class GCRuntime
size_t nDynamicSlots);
template <typename T, AllowGC allowGC>
static T* tryNewTenuredThing(JSContext* cx, AllocKind kind, size_t thingSize);
template <AllowGC allowGC>
JSString* tryNewNurseryString(JSContext* cx, size_t thingSize, AllocKind kind);
static TenuredCell* refillFreeListInGC(Zone* zone, AllocKind thingKind);
void bufferGrayRoots();
@ -1503,9 +1499,6 @@ class GCRuntime
const void* addressOfNurseryCurrentEnd() {
return nursery_.refNoCheck().addressOfCurrentEnd();
}
const void* addressOfStringNurseryCurrentEnd() {
return nursery_.refNoCheck().addressOfCurrentStringEnd();
}
void minorGC(JS::gcreason::Reason reason,
gcstats::PhaseKind phase = gcstats::PhaseKind::MINOR_GC) JS_HAZ_GC_CALL;

Просмотреть файл

@ -133,13 +133,6 @@ js::gc::TraceNurseryAlloc(Cell* thing, size_t size)
}
}
void
js::gc::TraceNurseryAlloc(Cell* thing, AllocKind kind)
{
if (thing)
TraceEvent(TraceEventNurseryAlloc, uint64_t(thing), kind);
}
void
js::gc::TraceTenuredAlloc(Cell* thing, AllocKind kind)
{

Просмотреть файл

@ -21,7 +21,6 @@ extern MOZ_MUST_USE bool InitTrace(GCRuntime& gc);
extern void FinishTrace();
extern bool TraceEnabled();
extern void TraceNurseryAlloc(Cell* thing, size_t size);
extern void TraceNurseryAlloc(Cell* thing, AllocKind kind);
extern void TraceTenuredAlloc(Cell* thing, AllocKind kind);
extern void TraceCreateObject(JSObject* object);
extern void TraceMinorGCStart();
@ -38,7 +37,6 @@ inline MOZ_MUST_USE bool InitTrace(GCRuntime& gc) { return true; }
inline void FinishTrace() {}
inline bool TraceEnabled() { return false; }
inline void TraceNurseryAlloc(Cell* thing, size_t size) {}
inline void TraceNurseryAlloc(Cell* thing, AllocKind kind) {}
inline void TraceTenuredAlloc(Cell* thing, AllocKind kind) {}
inline void TraceCreateObject(JSObject* object) {}
inline void TraceMinorGCStart() {}

Просмотреть файл

@ -888,8 +888,6 @@ InFreeList(Arena* arena, void* thing)
static const int32_t ChunkLocationOffsetFromLastByte =
int32_t(gc::ChunkLocationOffset) - int32_t(gc::ChunkMask);
static const int32_t ChunkStoreBufferOffsetFromLastByte =
int32_t(gc::ChunkStoreBufferOffset) - int32_t(gc::ChunkMask);
} /* namespace gc */

Просмотреть файл

@ -34,9 +34,9 @@ struct MightBeForwarded
template <typename T>
inline bool
IsForwarded(const T* t)
IsForwarded(T* t)
{
const RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
if (!MightBeForwarded<T>::value) {
MOZ_ASSERT(!overlay->isForwarded());
return false;
@ -46,7 +46,7 @@ IsForwarded(const T* t)
}
struct IsForwardedFunctor : public BoolDefaultAdaptor<Value, false> {
template <typename T> bool operator()(const T* t) { return IsForwarded(t); }
template <typename T> bool operator()(T* t) { return IsForwarded(t); }
};
inline bool
@ -57,15 +57,15 @@ IsForwarded(const JS::Value& value)
template <typename T>
inline T*
Forwarded(const T* t)
Forwarded(T* t)
{
const RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
MOZ_ASSERT(overlay->isForwarded());
return reinterpret_cast<T*>(overlay->forwardingAddress());
}
struct ForwardedFunctor : public IdentityDefaultAdaptor<Value> {
template <typename T> inline Value operator()(const T* t) {
template <typename T> inline Value operator()(T* t) {
return js::gc::RewrapTaggedPointer<Value, T>::wrap(Forwarded(t));
}
};
@ -86,22 +86,6 @@ MaybeForwarded(T t)
return t;
}
inline void
RelocationOverlay::forwardTo(Cell* cell)
{
MOZ_ASSERT(!isForwarded());
// The location of magic_ is important because it must never be valid to see
// the value Relocated there in a GC thing that has not been moved.
static_assert(offsetof(RelocationOverlay, magic_) == offsetof(JSObject, group_) + sizeof(uint32_t),
"RelocationOverlay::magic_ is in the wrong location");
static_assert(offsetof(RelocationOverlay, magic_) == offsetof(js::Shape, base_) + sizeof(uint32_t),
"RelocationOverlay::magic_ is in the wrong location");
static_assert(offsetof(RelocationOverlay, magic_) == offsetof(JSString, d.u1.length),
"RelocationOverlay::magic_ is in the wrong location");
magic_ = Relocated;
newLocation_ = cell;
}
#ifdef JSGC_HASH_TABLE_CHECKS
template <typename T>

Просмотреть файл

@ -625,7 +625,7 @@ js::TraceProcessGlobalRoot(JSTracer* trc, T* thing, const char* name)
// permanent atoms, so likewise require no subsquent marking.
CheckTracedThing(trc, *ConvertToBase(&thing));
if (trc->isMarkingTracer())
thing->asTenured().markIfUnmarked(gc::MarkColor::Black);
thing->markIfUnmarked(gc::MarkColor::Black);
else
DoCallback(trc->asCallbackTracer(), ConvertToBase(&thing), name);
}
@ -797,18 +797,6 @@ ShouldMark<JSObject*>(GCMarker* gcmarker, JSObject* obj)
return obj->asTenured().zone()->shouldMarkInZone();
}
// JSStrings can also be in the nursery. See ShouldMark<JSObject*> for comments.
template <>
bool
ShouldMark<JSString*>(GCMarker* gcmarker, JSString* str)
{
if (IsOwnedByOtherRuntime(gcmarker->runtime(), str))
return false;
if (IsInsideNursery(str))
return false;
return str->asTenured().zone()->shouldMarkInZone();
}
template <typename T>
void
DoMarking(GCMarker* gcmarker, T* thing)
@ -2706,13 +2694,12 @@ TenuringTracer::traverse(JSObject** objp)
// We only ever visit the internals of objects after moving them to tenured.
MOZ_ASSERT(!nursery().isInside(objp));
Cell** cellp = reinterpret_cast<Cell**>(objp);
if (!IsInsideNursery(*cellp) || nursery().getForwardedPointer(cellp))
JSObject* obj = *objp;
if (!IsInsideNursery(obj) || nursery().getForwardedPointer(objp))
return;
// Take a fast path for tenuring a plain object which is by far the most
// common case.
JSObject* obj = *objp;
if (obj->is<PlainObject>()) {
*objp = movePlainObjectToTenured(&obj->as<PlainObject>());
return;
@ -2721,18 +2708,6 @@ TenuringTracer::traverse(JSObject** objp)
*objp = moveToTenuredSlow(obj);
}
template <>
void
TenuringTracer::traverse(JSString** strp)
{
// We only ever visit the internals of strings after moving them to tenured.
MOZ_ASSERT(!nursery().isInside(strp));
Cell** cellp = reinterpret_cast<Cell**>(strp);
if (IsInsideNursery(*cellp) && !nursery().getForwardedPointer(cellp))
*strp = moveToTenured(*strp);
}
template <typename S>
struct TenuringTraversalFunctor : public IdentityDefaultAdaptor<S> {
template <typename T> S operator()(T* t, TenuringTracer* trc) {
@ -2821,12 +2796,6 @@ TraceWholeCell(TenuringTracer& mover, JSObject* object)
}
}
static inline void
TraceWholeCell(TenuringTracer& mover, JSString* str)
{
str->traceChildren(&mover);
}
static inline void
TraceWholeCell(TenuringTracer& mover, JSScript* script)
{
@ -2866,9 +2835,6 @@ js::gc::StoreBuffer::traceWholeCells(TenuringTracer& mover)
case JS::TraceKind::Object:
TraceBufferedCells<JSObject>(mover, arena, cells);
break;
case JS::TraceKind::String:
TraceBufferedCells<JSString>(mover, arena, cells);
break;
case JS::TraceKind::Script:
TraceBufferedCells<JSScript>(mover, arena, cells);
break;
@ -2890,22 +2856,8 @@ js::gc::StoreBuffer::CellPtrEdge::trace(TenuringTracer& mover) const
return;
MOZ_ASSERT(IsCellPointerValid(*edge));
#ifdef DEBUG
auto traceKind = (*edge)->getTraceKind();
MOZ_ASSERT(traceKind == JS::TraceKind::Object || traceKind == JS::TraceKind::String);
#endif
// Bug 1376646: Make separate store buffers for strings and objects, and
// only check IsInsideNursery once.
if (!IsInsideNursery(*edge))
return;
if (JSString::nurseryCellIsString(*edge))
mover.traverse(reinterpret_cast<JSString**>(edge));
else
mover.traverse(reinterpret_cast<JSObject**>(edge));
MOZ_ASSERT((*edge)->getTraceKind() == JS::TraceKind::Object);
mover.traverse(reinterpret_cast<JSObject**>(edge));
}
void
@ -2915,7 +2867,6 @@ js::gc::StoreBuffer::ValueEdge::trace(TenuringTracer& mover) const
mover.traverse(edge);
}
struct TenuringFunctor
{
template <typename T>
@ -2973,12 +2924,6 @@ js::TenuringTracer::traceSlots(JS::Value* vp, uint32_t nslots)
traceSlots(vp, vp + nslots);
}
void
js::TenuringTracer::traceString(JSString* str)
{
str->traceChildren(this);
}
#ifdef DEBUG
static inline ptrdiff_t
OffsetToChunkEnd(void* p)
@ -2989,10 +2934,10 @@ OffsetToChunkEnd(void* p)
/* Insert the given relocation entry into the list of things to visit. */
inline void
js::TenuringTracer::insertIntoObjectFixupList(RelocationOverlay* entry) {
*objTail = entry;
objTail = &entry->nextRef();
*objTail = nullptr;
js::TenuringTracer::insertIntoFixupList(RelocationOverlay* entry) {
*tail = entry;
tail = &entry->nextRef();
*tail = nullptr;
}
template <typename T>
@ -3079,7 +3024,7 @@ js::TenuringTracer::moveToTenuredSlow(JSObject* src)
RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
overlay->forwardTo(dst);
insertIntoObjectFixupList(overlay);
insertIntoFixupList(overlay);
TracePromoteToTenured(src, dst);
return dst;
@ -3111,7 +3056,7 @@ js::TenuringTracer::movePlainObjectToTenured(PlainObject* src)
RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
overlay->forwardTo(dst);
insertIntoObjectFixupList(overlay);
insertIntoFixupList(overlay);
TracePromoteToTenured(src, dst);
return dst;
@ -3195,44 +3140,10 @@ js::TenuringTracer::moveElementsToTenured(NativeObject* dst, NativeObject* src,
return nslots * sizeof(HeapSlot);
}
inline void
js::TenuringTracer::insertIntoStringFixupList(RelocationOverlay* entry) {
*stringTail = entry;
stringTail = &entry->nextRef();
*stringTail = nullptr;
}
JSString*
js::TenuringTracer::moveToTenured(JSString* src)
{
MOZ_ASSERT(IsInsideNursery(src));
MOZ_ASSERT(!src->zone()->usedByHelperThread());
AllocKind dstKind = src->getAllocKind();
Zone* zone = src->zone();
TenuredCell* t = zone->arenas.allocateFromFreeList(dstKind, Arena::thingSize(dstKind));
if (!t) {
AutoEnterOOMUnsafeRegion oomUnsafe;
t = runtime()->gc.refillFreeListInGC(zone, dstKind);
if (!t)
oomUnsafe.crash(ChunkSize, "Failed to allocate string while tenuring.");
}
JSString* dst = reinterpret_cast<JSString*>(t);
tenuredSize += moveStringToTenured(dst, src, dstKind);
RelocationOverlay* overlay = RelocationOverlay::fromCell(src);
overlay->forwardTo(dst);
insertIntoStringFixupList(overlay);
TracePromoteToTenured(src, dst);
return dst;
}
void
js::Nursery::collectToFixedPoint(TenuringTracer& mover, TenureCountCache& tenureCounts)
{
for (RelocationOverlay* p = mover.objHead; p; p = p->next()) {
for (RelocationOverlay* p = mover.head; p; p = p->next()) {
JSObject* obj = static_cast<JSObject*>(p->forwardingAddress());
mover.traceObject(obj);
@ -3244,30 +3155,6 @@ js::Nursery::collectToFixedPoint(TenuringTracer& mover, TenureCountCache& tenure
entry.count = 1;
}
}
for (RelocationOverlay* p = mover.stringHead; p; p = p->next())
mover.traceString(static_cast<JSString*>(p->forwardingAddress()));
}
size_t
js::TenuringTracer::moveStringToTenured(JSString* dst, JSString* src, AllocKind dstKind)
{
size_t size = Arena::thingSize(dstKind);
// At the moment, strings always have the same AllocKind between src and
// dst. This may change in the future.
MOZ_ASSERT(dst->asTenured().getAllocKind() == src->getAllocKind());
// Copy the Cell contents.
MOZ_ASSERT(OffsetToChunkEnd(src) >= ptrdiff_t(size));
js_memcpy(dst, src, size);
if (src->isLinear() && !src->isInline() && !src->hasBase()) {
void* chars = src->asLinear().nonInlineCharsRaw();
nursery().removeMallocedBuffer(chars);
}
return size;
}
@ -3333,8 +3220,7 @@ IsMarkedInternal(JSRuntime* rt, JSObject** thingp)
if (IsInsideNursery(*thingp)) {
MOZ_ASSERT(CurrentThreadCanAccessRuntime(rt));
Cell** cellp = reinterpret_cast<Cell**>(thingp);
return Nursery::getForwardedPointer(cellp);
return Nursery::getForwardedPointer(thingp);
}
return IsMarkedInternalCommon(thingp);
}
@ -3380,7 +3266,7 @@ IsAboutToBeFinalizedInternal(T** thingp)
if (IsInsideNursery(thing)) {
return JS::CurrentThreadIsHeapMinorCollecting() &&
!Nursery::getForwardedPointer(reinterpret_cast<Cell**>(thingp));
!Nursery::getForwardedPointer(reinterpret_cast<JSObject**>(thingp));
}
Zone* zone = thing->asTenured().zoneFromAnyThread();

Просмотреть файл

@ -164,11 +164,11 @@ namespace gc {
// to allow slots to be accessed.
template <typename T>
inline bool IsForwarded(const T* t);
inline bool IsForwarded(T* t);
inline bool IsForwarded(const JS::Value& value);
template <typename T>
inline T* Forwarded(const T* t);
inline T* Forwarded(T* t);
inline Value Forwarded(const JS::Value& value);

Просмотреть файл

@ -27,14 +27,14 @@ js::Nursery::isInside(const SharedMem<T>& p) const
}
MOZ_ALWAYS_INLINE /* static */ bool
js::Nursery::getForwardedPointer(js::gc::Cell** ref)
js::Nursery::getForwardedPointer(JSObject** ref)
{
MOZ_ASSERT(ref);
MOZ_ASSERT(IsInsideNursery(*ref));
const gc::RelocationOverlay* overlay = reinterpret_cast<const gc::RelocationOverlay*>(*ref);
if (!overlay->isForwarded())
return false;
*ref = overlay->forwardingAddress();
*ref = static_cast<JSObject*>(overlay->forwardingAddress());
return true;
}

Просмотреть файл

@ -114,7 +114,6 @@ js::Nursery::Nursery(JSRuntime* rt)
, currentStartChunk_(0)
, currentStartPosition_(0)
, currentEnd_(0)
, currentStringEnd_(0)
, currentChunk_(0)
, maxChunkCount_(0)
, chunkCountLimit_(0)
@ -122,7 +121,6 @@ js::Nursery::Nursery(JSRuntime* rt)
, previousPromotionRate_(0)
, profileThreshold_(0)
, enableProfiling_(false)
, canAllocateStrings_(true)
, reportTenurings_(0)
, minorGCTriggerReason_(JS::gcreason::NO_REASON)
, minorGcCount_(0)
@ -130,11 +128,7 @@ js::Nursery::Nursery(JSRuntime* rt)
#ifdef JS_GC_ZEAL
, lastCanary_(nullptr)
#endif
{
const char* env = getenv("MOZ_DISABLE_NURSERY_STRINGS");
if (env && *env)
canAllocateStrings_ = false;
}
{}
bool
js::Nursery::init(uint32_t maxNurseryBytes, AutoLockGCBgAlloc& lock)
@ -235,26 +229,10 @@ js::Nursery::disable()
maxChunkCount_ = 0;
currentEnd_ = 0;
currentStringEnd_ = 0;
runtime()->gc.storeBuffer().disable();
}
void
js::Nursery::enableStrings()
{
MOZ_ASSERT(isEmpty());
canAllocateStrings_ = true;
currentStringEnd_ = currentEnd_;
}
void
js::Nursery::disableStrings()
{
MOZ_ASSERT(isEmpty());
canAllocateStrings_ = false;
currentStringEnd_ = 0;
}
bool
js::Nursery::isEmpty() const
{
@ -321,23 +299,6 @@ js::Nursery::allocateObject(JSContext* cx, size_t size, size_t numDynamic, const
return obj;
}
Cell*
js::Nursery::allocateString(JSContext* cx, Zone* zone, size_t size, AllocKind kind)
{
/* Ensure there's enough space to replace the contents with a RelocationOverlay. */
MOZ_ASSERT(size >= sizeof(RelocationOverlay));
size_t allocSize = JS_ROUNDUP(sizeof(StringLayout) - 1 + size, CellAlignBytes);
auto header = static_cast<StringLayout*>(allocate(allocSize));
if (!header)
return nullptr;
header->zone = zone;
auto cell = reinterpret_cast<Cell*>(&header->cell);
TraceNurseryAlloc(cell, kind);
return cell;
}
void*
js::Nursery::allocate(size_t size)
{
@ -407,7 +368,7 @@ js::Nursery::allocateBuffer(Zone* zone, size_t nbytes)
}
void* buffer = zone->pod_malloc<uint8_t>(nbytes);
if (buffer && !registerMallocedBuffer(buffer)) {
if (buffer && !mallocedBuffers.putNew(buffer)) {
js_free(buffer);
return nullptr;
}
@ -530,10 +491,8 @@ js::TenuringTracer::TenuringTracer(JSRuntime* rt, Nursery* nursery)
: JSTracer(rt, JSTracer::TracerKindTag::Tenuring, TraceWeakMapKeysValues)
, nursery_(*nursery)
, tenuredSize(0)
, objHead(nullptr)
, objTail(&objHead)
, stringHead(nullptr)
, stringTail(&stringHead)
, head(nullptr)
, tail(&head)
{
}
@ -734,40 +693,21 @@ js::Nursery::collect(JS::gcreason::Reason reason)
bool validPromotionRate;
const float promotionRate = calcPromotionRate(&validPromotionRate);
uint32_t pretenureCount = 0;
bool shouldPretenure = (validPromotionRate && promotionRate > 0.6) ||
IsFullStoreBufferReason(reason);
if (shouldPretenure) {
JSContext* cx = TlsContext.get();
for (auto& entry : tenureCounts.entries) {
if (entry.count >= 3000) {
ObjectGroup* group = entry.group;
if (group->canPreTenure() && group->zone()->group()->canEnterWithoutYielding(cx)) {
AutoCompartment ac(cx, group);
group->setShouldPreTenure(cx);
pretenureCount++;
if (validPromotionRate) {
if (promotionRate > 0.8 || IsFullStoreBufferReason(reason)) {
JSContext* cx = TlsContext.get();
for (auto& entry : tenureCounts.entries) {
if (entry.count >= 3000) {
ObjectGroup* group = entry.group;
if (group->canPreTenure() && group->zone()->group()->canEnterWithoutYielding(cx)) {
AutoCompartment ac(cx, group);
group->setShouldPreTenure(cx);
pretenureCount++;
}
}
}
}
}
for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
if (shouldPretenure && zone->allocNurseryStrings && zone->tenuredStrings >= 30 * 1000) {
JSRuntime::AutoProhibitActiveContextChange apacc(rt);
CancelOffThreadIonCompile(zone);
bool preserving = zone->isPreservingCode();
zone->setPreservingCode(false);
zone->discardJitCode(rt->defaultFreeOp());
zone->setPreservingCode(preserving);
for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
if (jit::JitCompartment* jitComp = c->jitCompartment()) {
jitComp->discardStubs();
jitComp->stringsCanBeInNursery = false;
}
}
zone->allocNurseryStrings = false;
}
zone->tenuredStrings = 0;
}
endProfile(ProfileKey::Pretenure);
// We ignore gcMaxBytes when allocating for minor collection. However, if we
@ -875,9 +815,9 @@ js::Nursery::doCollection(JS::gcreason::Reason reason,
}
endProfile(ProfileKey::MarkDebugger);
startProfile(ProfileKey::SweepCaches);
rt->gc.purgeRuntimeForMinorGC();
endProfile(ProfileKey::SweepCaches);
startProfile(ProfileKey::ClearNewObjectCache);
rt->caches().newObjectCache.clearNurseryObjects(rt);
endProfile(ProfileKey::ClearNewObjectCache);
// Most of the work is done here. This loop iterates over objects that have
// been moved to the major heap. If these objects have any outgoing pointers
@ -950,13 +890,6 @@ js::Nursery::FreeMallocedBuffersTask::run()
buffers_.clear();
}
bool
js::Nursery::registerMallocedBuffer(void* buffer)
{
MOZ_ASSERT(buffer);
return mallocedBuffers.putNew(buffer);
}
void
js::Nursery::freeMallocedBuffers()
{
@ -1060,8 +993,6 @@ js::Nursery::setCurrentChunk(unsigned chunkno)
currentChunk_ = chunkno;
position_ = chunk(chunkno).start();
currentEnd_ = chunk(chunkno).end();
if (canAllocateStrings_)
currentStringEnd_ = currentEnd_;
chunk(chunkno).poisonAndInit(runtime(), JS_FRESH_NURSERY_PATTERN);
}
@ -1101,7 +1032,7 @@ js::Nursery::setStartPosition()
void
js::Nursery::maybeResizeNursery(JS::gcreason::Reason reason)
{
static const double GrowThreshold = 0.03;
static const double GrowThreshold = 0.05;
static const double ShrinkThreshold = 0.01;
unsigned newMaxNurseryChunks;
@ -1220,20 +1151,3 @@ js::Nursery::sweepDictionaryModeObjects()
}
dictionaryModeObjects_.clear();
}
JS_PUBLIC_API(void)
JS::EnableNurseryStrings(JSContext* cx)
{
AutoEmptyNursery empty(cx);
ReleaseAllJITCode(cx->runtime()->defaultFreeOp());
cx->runtime()->gc.nursery().enableStrings();
}
JS_PUBLIC_API(void)
JS::DisableNurseryStrings(JSContext* cx)
{
AutoEmptyNursery empty(cx);
ReleaseAllJITCode(cx->runtime()->defaultFreeOp());
cx->runtime()->gc.nursery().disableStrings();
}

Просмотреть файл

@ -28,7 +28,7 @@
_(CheckHashTables, "ckTbls") \
_(MarkRuntime, "mkRntm") \
_(MarkDebugger, "mkDbgr") \
_(SweepCaches, "swpCch") \
_(ClearNewObjectCache, "clrNOC") \
_(CollectToFP, "collct") \
_(ObjectsTenuredCallback, "tenCB") \
_(Sweep, "sweep") \
@ -80,19 +80,18 @@ class TenuringTracer : public JSTracer
// Amount of data moved to the tenured generation during collection.
size_t tenuredSize;
// These lists are threaded through the Nursery using the space from
// already moved things. The lists are used to fix up the moved things and
// to find things held live by intra-Nursery pointers.
gc::RelocationOverlay* objHead;
gc::RelocationOverlay** objTail;
gc::RelocationOverlay* stringHead;
gc::RelocationOverlay** stringTail;
// This list is threaded through the Nursery using the space from already
// moved things. The list is used to fix up the moved things and to find
// things held live by intra-Nursery pointers.
gc::RelocationOverlay* head;
gc::RelocationOverlay** tail;
TenuringTracer(JSRuntime* rt, Nursery* nursery);
public:
Nursery& nursery() { return nursery_; }
const Nursery& nursery() const { return nursery_; }
// Returns true if the pointer was updated.
template <typename T> void traverse(T** thingp);
template <typename T> void traverse(T* thingp);
@ -100,21 +99,18 @@ class TenuringTracer : public JSTracer
void traceObject(JSObject* src);
void traceObjectSlots(NativeObject* nobj, uint32_t start, uint32_t length);
void traceSlots(JS::Value* vp, uint32_t nslots);
void traceString(JSString* src);
private:
inline void insertIntoObjectFixupList(gc::RelocationOverlay* entry);
inline void insertIntoStringFixupList(gc::RelocationOverlay* entry);
Nursery& nursery() { return nursery_; }
inline void insertIntoFixupList(gc::RelocationOverlay* entry);
template <typename T>
inline T* allocTenured(JS::Zone* zone, gc::AllocKind kind);
inline JSObject* movePlainObjectToTenured(PlainObject* src);
JSObject* moveToTenuredSlow(JSObject* src);
JSString* moveToTenured(JSString* src);
size_t moveElementsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
size_t moveSlotsToTenured(NativeObject* dst, NativeObject* src, gc::AllocKind dstKind);
size_t moveStringToTenured(JSString* dst, JSString* src, gc::AllocKind dstKind);
void traceSlots(JS::Value* vp, JS::Value* end);
};
@ -139,15 +135,6 @@ class Nursery
static const size_t Alignment = gc::ChunkSize;
static const size_t ChunkShift = gc::ChunkShift;
struct alignas(gc::CellAlignBytes) CellAlignedByte {
char byte;
};
struct StringLayout {
JS::Zone* zone;
CellAlignedByte cell;
};
explicit Nursery(JSRuntime* rt);
~Nursery();
@ -170,10 +157,6 @@ class Nursery
void disable();
bool isEnabled() const { return maxChunkCount() != 0; }
void enableStrings();
void disableStrings();
bool canAllocateStrings() const { return canAllocateStrings_; }
/* Return true if no allocations have been made since the last collection. */
bool isEmpty() const;
@ -199,29 +182,6 @@ class Nursery
*/
JSObject* allocateObject(JSContext* cx, size_t size, size_t numDynamic, const js::Class* clasp);
/*
* Allocate and return a pointer to a new string. Returns nullptr if the
* Nursery is full.
*/
gc::Cell* allocateString(JSContext* cx, JS::Zone* zone, size_t size, gc::AllocKind kind);
/*
* String zones are stored just before the string in nursery memory.
*/
static JS::Zone* getStringZone(const JSString* str) {
#ifdef DEBUG
auto cell = reinterpret_cast<const js::gc::Cell*>(str); // JSString type is incomplete here
MOZ_ASSERT(js::gc::IsInsideNursery(cell), "getStringZone must be passed a nursery string");
#endif
auto layout = reinterpret_cast<const uint8_t*>(str) - offsetof(StringLayout, cell);
return reinterpret_cast<const StringLayout*>(layout)->zone;
}
static size_t stringHeaderSize() {
return offsetof(StringLayout, cell);
}
/* Allocate a buffer for a given zone, using the nursery if possible. */
void* allocateBuffer(JS::Zone* zone, size_t nbytes);
@ -252,11 +212,11 @@ class Nursery
void collect(JS::gcreason::Reason reason);
/*
* If the thing at |*ref| in the Nursery has been forwarded, set |*ref| to
* the new location and return true. Otherwise return false and leave
* |*ref| unset.
* Check if the thing at |*ref| in the Nursery has been forwarded. If so,
* sets |*ref| to the new location of the object and returns true. Otherwise
* returns false and leaves |*ref| unset.
*/
MOZ_ALWAYS_INLINE MOZ_MUST_USE static bool getForwardedPointer(js::gc::Cell** ref);
MOZ_ALWAYS_INLINE MOZ_MUST_USE static bool getForwardedPointer(JSObject** ref);
/* Forward a slots/elements pointer stored in an Ion frame. */
void forwardBufferPointer(HeapSlot** pSlotsElems);
@ -264,13 +224,6 @@ class Nursery
inline void maybeSetForwardingPointer(JSTracer* trc, void* oldData, void* newData, bool direct);
inline void setForwardingPointerWhileTenuring(void* oldData, void* newData, bool direct);
/*
* Register a malloced buffer that is held by a nursery object, which
* should be freed at the end of a minor GC. Buffers are unregistered when
* their owning objects are tenured.
*/
bool registerMallocedBuffer(void* buffer);
/* Mark a malloced buffer as no longer needing to be freed. */
void removeMallocedBuffer(void* buffer) {
mallocedBuffers.remove(buffer);
@ -328,7 +281,6 @@ class Nursery
void* addressOfCurrentEnd() const { return (void*)&currentEnd_; }
void* addressOfPosition() const { return (void*)&position_; }
void* addressOfCurrentStringEnd() const { return (void*)&currentStringEnd_; }
void requestMinorGC(JS::gcreason::Reason reason) const;
@ -365,12 +317,6 @@ class Nursery
/* Pointer to the last byte of space in the current chunk. */
uintptr_t currentEnd_;
/*
* Pointer to the last byte of space in the current chunk, or nullptr if we
* are not allocating strings in the nursery.
*/
uintptr_t currentStringEnd_;
/* The index of the chunk that is currently being allocated from. */
unsigned currentChunk_;
@ -396,10 +342,7 @@ class Nursery
mozilla::TimeDuration profileThreshold_;
bool enableProfiling_;
/* Whether we will nursery-allocate strings. */
bool canAllocateStrings_;
/* Report ObjectGroups with at least this many instances tenured. */
/* Report ObjectGroups with at lest this many instances tenured. */
int64_t reportTenurings_;
/*
@ -518,6 +461,9 @@ class Nursery
JSRuntime* runtime() const { return runtime_; }
/* Allocates a new GC thing from the tenured generation during minor GC. */
gc::TenuredCell* allocateFromTenured(JS::Zone* zone, gc::AllocKind thingKind);
/* Common internal allocator function. */
void* allocate(size_t size);

Просмотреть файл

@ -148,18 +148,14 @@ class NurseryAwareHashMap
// Update and relocate the key, if the value is still needed.
//
// Non-string Values will contain a strong reference to Key, as per
// its use in the CrossCompartmentWrapperMap, so the key will never
// be dying here. Strings do *not* have any sort of pointer from
// wrapper to wrappee, as they are just copies. The wrapper map
// entry is merely used as a cache to avoid re-copying the string,
// and currently that entire cache is flushed on major GC.
// Note that this currently assumes that all Value will contain a
// strong reference to Key, as per its use as the
// CrossCompartmentWrapperMap. We may need to make the following
// behavior more dynamic if we use this map in other nursery-aware
// contexts.
Key copy(key);
bool sweepKey = JS::GCPolicy<Key>::needsSweep(&copy);
if (sweepKey) {
map.remove(key);
continue;
}
mozilla::DebugOnly<bool> sweepKey = JS::GCPolicy<Key>::needsSweep(&copy);
MOZ_ASSERT(!sweepKey);
map.rekeyIfMoved(key, copy);
}
nurseryEntries.clear();

Просмотреть файл

@ -15,11 +15,6 @@
#include <stdint.h>
#include "jsobj.h"
#include "js/HeapAPI.h"
#include "vm/Shape.h"
namespace js {
namespace gc {
@ -31,22 +26,11 @@ struct Cell;
*/
class RelocationOverlay
{
/* See comment in js/public/HeapAPI.h. */
static const uint32_t Relocated = js::gc::Relocated;
#if MOZ_LITTLE_ENDIAN
/*
* Keep the low 32 bits untouched. Use them to distinguish strings from
* objects in the nursery.
*/
uint32_t preserve_;
/* The low bit is set so this should never equal a normal pointer. */
static const uintptr_t Relocated = uintptr_t(0xbad0bad1);
/* Set to Relocated when moved. */
uint32_t magic_;
#else
uint32_t magic_;
uint32_t preserve_;
#endif
uintptr_t magic_;
/* The location |this| was moved to. */
Cell* newLocation_;
@ -55,16 +39,11 @@ class RelocationOverlay
RelocationOverlay* next_;
public:
static const RelocationOverlay* fromCell(const Cell* cell) {
return reinterpret_cast<const RelocationOverlay*>(cell);
}
static RelocationOverlay* fromCell(Cell* cell) {
return reinterpret_cast<RelocationOverlay*>(cell);
}
bool isForwarded() const {
(void) preserve_; // Suppress warning
return magic_ == Relocated;
}
@ -85,7 +64,7 @@ class RelocationOverlay
return next_;
}
static bool isCellForwarded(const Cell* cell) {
static bool isCellForwarded(Cell* cell) {
return fromCell(cell)->isForwarded();
}
};

Просмотреть файл

@ -405,8 +405,6 @@ class StoreBuffer
void clear();
const Nursery& nursery() const { return nursery_; }
/* Get the overflowed status. */
bool isAboutToOverflow() const { return aboutToOverflow_; }

Просмотреть файл

@ -529,14 +529,7 @@ HeapCheckTracerBase::onChild(const JS::GCCellPtr& thing)
return;
// Don't trace into GC in zones being used by helper threads.
Zone* zone;
if (thing.is<JSObject>())
zone = thing.as<JSObject>().zone();
else if (thing.is<JSString>())
zone = thing.as<JSString>().zone();
else
zone = cell->asTenured().zone();
Zone* zone = thing.is<JSObject>() ? thing.as<JSObject>().zone() : cell->asTenured().zone();
if (zone->group() && zone->group()->usedByHelperThread())
return;

Просмотреть файл

@ -46,8 +46,6 @@ JS::Zone::Zone(JSRuntime* rt, ZoneGroup* group)
usage(&rt->gc.usage),
threshold(),
gcDelayBytes(0),
tenuredStrings(group, 0),
allocNurseryStrings(group, true),
propertyTree_(group, this),
baseShapes_(group, this),
initialShapes_(group, this),

Просмотреть файл

@ -521,9 +521,6 @@ struct Zone : public JS::shadow::Zone,
// the current GC.
js::UnprotectedData<size_t> gcDelayBytes;
js::ZoneGroupData<uint32_t> tenuredStrings;
js::ZoneGroupData<bool> allocNurseryStrings;
private:
// Shared Shape property tree.
js::ZoneGroupData<js::PropertyTree> propertyTree_;

Просмотреть файл

@ -17,7 +17,7 @@ class JSStringTypeCache(object):
def __init__(self, cache):
dummy = gdb.Value(0).cast(cache.JSString_ptr_t)
self.ROPE_FLAGS = dummy['ROPE_FLAGS']
self.NON_ATOM_BIT = dummy['NON_ATOM_BIT']
self.ATOM_BIT = dummy['ATOM_BIT']
self.INLINE_CHARS_BIT = dummy['INLINE_CHARS_BIT']
self.TYPE_FLAGS_MASK = dummy['TYPE_FLAGS_MASK']
self.LATIN1_CHARS_BIT = dummy['LATIN1_CHARS_BIT']

Просмотреть файл

@ -13,41 +13,23 @@ var config = getBuildConfiguration();
if (!config['moz-memory'])
quit(0);
gczeal(0); // Need to control when tenuring happens
// Ion eager runs much of this code in Ion, and Ion nursery-allocates more
// aggressively than other modes.
if (getJitCompilerOptions()["ion.warmup.trigger"] <= 100)
setJitCompilerOption("ion.warmup.trigger", 100);
if (config['pointer-byte-size'] == 4)
var s = (s32, s64) => s32
else
var s = (s32, s64) => s64
// Convert an input string, which is probably an atom because it's a literal in
// the source text, to a nursery-allocated string with the same contents.
function copyString(str) {
if (str.length == 0)
return str; // Nothing we can do here
return ensureFlatString(str.substr(0, 1) + str.substr(1));
}
// Return the nursery byte size of |str|.
function nByteSize(str) {
// Strings that appear in the source will always be atomized and therefore
// will never be in the nursery.
return byteSize(copyString(str));
}
// Return the tenured byte size of |str|.
function tByteSize(str) {
// Strings that appear in the source will always be atomized and therefore
// will never be in the nursery. But we'll make them get tenured instead of
// using the atom.
str = copyString(str);
// Return the byte size of |obj|, ensuring that the size is not affected by
// being tenured. (We use 'survives a GC' as an approximation for 'tenuring'.)
function tByteSize(obj) {
var nurserySize = byteSize(obj);
minorgc();
return byteSize(str);
var tenuredSize = byteSize(obj);
if (nurserySize != tenuredSize) {
print("nursery size: " + nurserySize + " tenured size: " + tenuredSize);
return -1; // make the stack trace point at the real test
}
return tenuredSize;
}
// There are four representations of flat strings, with the following capacities
@ -62,84 +44,38 @@ function tByteSize(str) {
// JSExtensibleString - limited by available memory - X
// JSUndependedString - same as JSExtensibleString -
// Notes:
// - labels are suffixed with A for atoms and N for non-atoms
// - atoms are 8 bytes larger than non-atoms, to store the atom's hash code.
// - Nursery-allocated strings require a header that stores the zone.
// Expected sizes based on type of string
const m32 = (config['pointer-byte-size'] == 4);
const TA = m32 ? 24 : 32; // ThinInlineString atom, includes a hash value
const TN = m32 ? 16 : 24; // ThinInlineString
const FN = m32 ? 32 : 32; // FatInlineString
const XN = m32 ? 16 : 24; // ExtensibleString, has additional storage buffer
const RN = m32 ? 16 : 24; // Rope
const DN = m32 ? 16 : 24; // DependentString
// A function that pads out a tenured size to the nursery size. We store a zone
// pointer in the nursery just before the string (4 bytes on 32-bit, 8 bytes on
// 64-bit), and the string struct itself must be 8-byte aligned (resulting in
// +4 bytes on 32-bit, +0 bytes on 64-bit). The end result? Nursery strings are
// 8 bytes larger.
const Nursery = m32 ? s => s + 4 + 4 : s => s + 8 + 0;
// Note that atoms are 8 bytes larger than non-atoms, to store the atom's hash code.
// Latin-1
assertEq(tByteSize(""), s(TA, TA));
assertEq(tByteSize("1"), s(TA, TA));
assertEq(tByteSize("1234567"), s(TN, TN));
assertEq(tByteSize("12345678"), s(FN, TN));
assertEq(tByteSize("123456789.12345"), s(FN, TN));
assertEq(tByteSize("123456789.123456"), s(FN, FN));
assertEq(tByteSize("123456789.123456789.123"), s(FN, FN));
assertEq(tByteSize("123456789.123456789.1234"), s(XN+32, XN+32));
assertEq(tByteSize("123456789.123456789.123456789.1"), s(XN+32, XN+32));
assertEq(tByteSize("123456789.123456789.123456789.12"), s(XN+64, XN+64));
assertEq(nByteSize(""), s(TA, TA));
assertEq(nByteSize("1"), s(TA, TA));
assertEq(nByteSize("1234567"), s(Nursery(TN), Nursery(TN)));
assertEq(nByteSize("12345678"), s(Nursery(FN), Nursery(TN)));
assertEq(nByteSize("123456789.12345"), s(Nursery(FN), Nursery(TN)));
assertEq(nByteSize("123456789.123456"), s(Nursery(FN), Nursery(FN)));
assertEq(nByteSize("123456789.123456789.123"), s(Nursery(FN), Nursery(FN)));
assertEq(nByteSize("123456789.123456789.1234"), s(Nursery(XN)+32,Nursery(XN)+32));
assertEq(nByteSize("123456789.123456789.123456789.1"), s(Nursery(XN)+32,Nursery(XN)+32));
assertEq(nByteSize("123456789.123456789.123456789.12"), s(Nursery(XN)+64,Nursery(XN)+64));
assertEq(tByteSize(""), s(24, 32)); // T, T
assertEq(tByteSize("1"), s(24, 32)); // T, T
assertEq(tByteSize("1234567"), s(24, 32)); // T, T
assertEq(tByteSize("12345678"), s(40, 32)); // F, T
assertEq(tByteSize("123456789.12345"), s(40, 32)); // F, T
assertEq(tByteSize("123456789.123456"), s(40, 40)); // F, F
assertEq(tByteSize("123456789.123456789.123"), s(40, 40)); // F, F
assertEq(tByteSize("123456789.123456789.1234"), s(56, 64)); // X, X
assertEq(tByteSize("123456789.123456789.123456789.1"), s(56, 64)); // X, X
assertEq(tByteSize("123456789.123456789.123456789.12"), s(72, 80)); // X, X
// Inline char16_t atoms.
// "Impassionate gods have never seen the red that is the Tatsuta River."
// - Ariwara no Narihira
assertEq(tByteSize("千"), s(TA, TA));
assertEq(tByteSize("千早"), s(TN, TN));
assertEq(tByteSize("千早ぶ"), s(TN, TN));
assertEq(tByteSize("千早ぶる"), s(FN, TN));
assertEq(tByteSize("千早ぶる神"), s(FN, TN));
assertEq(tByteSize("千早ぶる神代"), s(FN, TN));
assertEq(tByteSize("千早ぶる神代も"), s(FN, TN));
assertEq(tByteSize("千早ぶる神代もき"), s(FN, FN));
assertEq(tByteSize("千早ぶる神代もきかず龍"), s(FN, FN));
assertEq(tByteSize("千早ぶる神代もきかず龍田"), s(XN+32, XN+32));
assertEq(tByteSize("千早ぶる神代もきかず龍田川 か"), s(XN+32, XN+32));
assertEq(tByteSize("千早ぶる神代もきかず龍田川 から"), s(XN+64, XN+64));
assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水く"), s(XN+64, XN+64));
assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くく"), s(XN+64, XN+64));
assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くくるとは"), s(XN+64, XN+64));
assertEq(nByteSize("千"), s(TA, TA));
assertEq(nByteSize("千早"), s(Nursery(TN), Nursery(TN)));
assertEq(nByteSize("千早ぶ"), s(Nursery(TN), Nursery(TN)));
assertEq(nByteSize("千早ぶる"), s(Nursery(FN), Nursery(TN)));
assertEq(nByteSize("千早ぶる神"), s(Nursery(FN), Nursery(TN)));
assertEq(nByteSize("千早ぶる神代"), s(Nursery(FN), Nursery(TN)));
assertEq(nByteSize("千早ぶる神代も"), s(Nursery(FN), Nursery(TN)));
assertEq(nByteSize("千早ぶる神代もき"), s(Nursery(FN), Nursery(FN)));
assertEq(nByteSize("千早ぶる神代もきかず龍"), s(Nursery(FN), Nursery(FN)));
assertEq(nByteSize("千早ぶる神代もきかず龍田"), s(Nursery(XN)+32, Nursery(XN)+32));
assertEq(nByteSize("千早ぶる神代もきかず龍田川 か"), s(Nursery(XN)+32, Nursery(XN)+32));
assertEq(nByteSize("千早ぶる神代もきかず龍田川 から"), s(Nursery(XN)+64, Nursery(XN)+64));
assertEq(nByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水く"), s(Nursery(XN)+64, Nursery(XN)+64));
assertEq(nByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くく"), s(Nursery(XN)+64, Nursery(XN)+64));
assertEq(nByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くくるとは"), s(Nursery(XN)+64, Nursery(XN)+64));
assertEq(tByteSize("千"), s(24, 32)); // T, T
assertEq(tByteSize("千早"), s(24, 32)); // T, T
assertEq(tByteSize("千早ぶ"), s(24, 32)); // T, T
assertEq(tByteSize("千早ぶる"), s(40, 32)); // F, T
assertEq(tByteSize("千早ぶる神"), s(40, 32)); // F, T
assertEq(tByteSize("千早ぶる神代"), s(40, 32)); // F, T
assertEq(tByteSize("千早ぶる神代も"), s(40, 32)); // F, T
assertEq(tByteSize("千早ぶる神代もき"), s(40, 40)); // F, F
assertEq(tByteSize("千早ぶる神代もきかず龍"), s(40, 40)); // F, F
assertEq(tByteSize("千早ぶる神代もきかず龍田"), s(56, 64)); // X, X
assertEq(tByteSize("千早ぶる神代もきかず龍田川 か"), s(56, 64)); // X, X
assertEq(tByteSize("千早ぶる神代もきかず龍田川 から"), s(72, 80)); // X, X
assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水く"), s(72, 80)); // X, X
assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くく"), s(88, 96)); // X, X
assertEq(tByteSize("千早ぶる神代もきかず龍田川 からくれなゐに水くくるとは"), s(88, 96)); // X, X
// A Latin-1 rope. This changes size when flattened.
// "In a village of La Mancha, the name of which I have no desire to call to mind"
@ -148,24 +84,21 @@ var fragment8 = "En un lugar de la Mancha, de cuyo nombre no quiero acordarme";
var rope8 = fragment8;
for (var i = 0; i < 10; i++) // 1024 repetitions
rope8 = rope8 + rope8;
assertEq(byteSize(rope8), s(Nursery(RN), Nursery(RN)));
minorgc();
assertEq(byteSize(rope8), s(RN, RN));
assertEq(tByteSize(rope8), s(16, 24));
var matches8 = rope8.match(/(de cuyo nombre no quiero acordarme)/);
assertEq(byteSize(rope8), s(XN + 65536, XN + 65536));
assertEq(tByteSize(rope8), s(16 + 65536, 24 + 65536));
// Test extensible strings.
//
// Appending another copy of the fragment should yield another rope.
//
// Flatting that should turn the original rope into a dependent string, and
// yield a new linear string, of the same size as the original.
// yield a new linear string, of the some size as the original.
rope8a = rope8 + fragment8;
assertEq(byteSize(rope8a), s(Nursery(RN), Nursery(RN)));
assertEq(tByteSize(rope8a), s(16, 24));
rope8a.match(/x/, function() { assertEq(true, false); });
assertEq(byteSize(rope8a), s(Nursery(XN) + 65536, Nursery(XN) + 65536));
assertEq(byteSize(rope8), s(RN, RN));
assertEq(tByteSize(rope8a), s(16 + 65536, 24 + 65536));
assertEq(tByteSize(rope8), s(16, 24));
// A char16_t rope. This changes size when flattened.
@ -175,17 +108,17 @@ var fragment16 = "μουσάων Ἑλικωνιάδων ἀρχώμεθ᾽ ἀ
var rope16 = fragment16;
for (var i = 0; i < 10; i++) // 1024 repetitions
rope16 = rope16 + rope16;
assertEq(byteSize(rope16), s(Nursery(RN), Nursery(RN)));
assertEq(tByteSize(rope16), s(16, 24));
let matches16 = rope16.match(/(Ἑλικωνιάδων ἀρχώμεθ᾽)/);
assertEq(byteSize(rope16), s(Nursery(RN) + 131072, Nursery(RN) + 131072));
assertEq(tByteSize(rope16), s(16 + 131072, 24 + 131072));
// Latin-1 and char16_t dependent strings.
assertEq(byteSize(rope8.substr(1000, 2000)), s(Nursery(DN), Nursery(DN)));
assertEq(byteSize(rope16.substr(1000, 2000)), s(Nursery(DN), Nursery(DN)));
assertEq(byteSize(matches8[0]), s(Nursery(DN), Nursery(DN)));
assertEq(byteSize(matches8[1]), s(Nursery(DN), Nursery(DN)));
assertEq(byteSize(matches16[0]), s(Nursery(DN), Nursery(DN)));
assertEq(byteSize(matches16[1]), s(Nursery(DN), Nursery(DN)));
assertEq(tByteSize(rope8.substr(1000, 2000)), s(16, 24));
assertEq(tByteSize(rope16.substr(1000, 2000)), s(16, 24));
assertEq(tByteSize(matches8[0]), s(16, 24));
assertEq(tByteSize(matches8[1]), s(16, 24));
assertEq(tByteSize(matches16[0]), s(16, 24));
assertEq(tByteSize(matches16[1]), s(16, 24));
// Test extensible strings.
//
@ -194,7 +127,7 @@ assertEq(byteSize(matches16[1]), s(Nurser
// Flatting that should turn the original rope into a dependent string, and
// yield a new linear string, of the some size as the original.
rope16a = rope16 + fragment16;
assertEq(byteSize(rope16a), s(Nursery(RN), Nursery(RN)));
assertEq(tByteSize(rope16a), s(16, 24));
rope16a.match(/x/, function() { assertEq(true, false); });
assertEq(byteSize(rope16a), s(Nursery(XN) + 131072, Nursery(XN) + 131072));
assertEq(byteSize(rope16), s(Nursery(XN), Nursery(XN)));
assertEq(tByteSize(rope16a), s(16 + 131072, 24 + 131072));
assertEq(tByteSize(rope16), s(16, 24));

Просмотреть файл

@ -356,8 +356,8 @@ BaselineCacheIRCompiler::emitGuardSpecificAtom()
// The pointers are not equal, so if the input string is also an atom it
// must be a different string.
masm.branchTest32(Assembler::Zero, Address(str, JSString::offsetOfFlags()),
Imm32(JSString::NON_ATOM_BIT), failure->label());
masm.branchTest32(Assembler::NonZero, Address(str, JSString::offsetOfFlags()),
Imm32(JSString::ATOM_BIT), failure->label());
// Check the length.
masm.loadPtr(atomAddr, scratch);
@ -1266,8 +1266,9 @@ BaselineCacheIRCompiler::emitStoreTypedObjectReferenceProperty()
Address dest(scratch1, 0);
emitStoreTypedObjectReferenceProp(val, type, dest, scratch2);
emitPostBarrierSlot(obj, val, scratch1);
if (type != ReferenceTypeDescr::TYPE_STRING)
emitPostBarrierSlot(obj, val, scratch1);
return true;
}

Просмотреть файл

@ -2792,7 +2792,7 @@ BaselineCompiler::emit_JSOP_SETALIASEDVAR()
Label skipBarrier;
masm.branchPtrInNurseryChunk(Assembler::Equal, objReg, temp, &skipBarrier);
masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
masm.branchValueIsNurseryObject(Assembler::NotEqual, R0, temp, &skipBarrier);
masm.call(&postBarrierSlot_); // Won't clobber R0
@ -3216,7 +3216,7 @@ BaselineCompiler::emitFormalArgAccess(uint32_t arg, bool get)
Label skipBarrier;
masm.branchPtrInNurseryChunk(Assembler::Equal, reg, temp, &skipBarrier);
masm.branchValueIsNurseryCell(Assembler::NotEqual, R0, temp, &skipBarrier);
masm.branchValueIsNurseryObject(Assembler::NotEqual, R0, temp, &skipBarrier);
masm.call(&postBarrierSlot_);

Просмотреть файл

@ -1835,7 +1835,7 @@ CacheIRCompiler::emitLoadStringCharResult()
// Bounds check, load string char.
masm.branch32(Assembler::BelowOrEqual, Address(str, JSString::offsetOfLength()),
index, failure->label());
masm.loadStringChar(str, index, scratch2, scratch1, failure->label());
masm.loadStringChar(str, index, scratch1, failure->label());
// Load StaticString for this char.
masm.branch32(Assembler::AboveOrEqual, scratch1, Imm32(StaticStrings::UNIT_STATIC_LIMIT),
@ -2339,8 +2339,6 @@ void
CacheIRCompiler::emitStoreTypedObjectReferenceProp(ValueOperand val, ReferenceTypeDescr::Type type,
const Address& dest, Register scratch)
{
// Callers will post-barrier this store.
switch (type) {
case ReferenceTypeDescr::TYPE_ANY:
EmitPreBarrier(masm, dest, MIRType::Value);
@ -2393,19 +2391,18 @@ CacheIRCompiler::emitPostBarrierShared(Register obj, const ConstantOrRegister& v
return;
if (val.constant()) {
MOZ_ASSERT_IF(val.value().isGCThing(), !IsInsideNursery(val.value().toGCThing()));
MOZ_ASSERT_IF(val.value().isObject(), !IsInsideNursery(&val.value().toObject()));
return;
}
TypedOrValueRegister reg = val.reg();
if (reg.hasTyped()) {
if (reg.type() != MIRType::Object && reg.type() != MIRType::String)
return;
}
if (reg.hasTyped() && reg.type() != MIRType::Object)
return;
Label skipBarrier;
if (reg.hasValue()) {
masm.branchValueIsNurseryCell(Assembler::NotEqual, reg.valueReg(), scratch, &skipBarrier);
masm.branchValueIsNurseryObject(Assembler::NotEqual, reg.valueReg(), scratch,
&skipBarrier);
} else {
masm.branchPtrInNurseryChunk(Assembler::NotEqual, reg.typedReg().gpr(), scratch,
&skipBarrier);

Просмотреть файл

@ -1193,83 +1193,6 @@ CodeGenerator::visitValueToObjectOrNull(LValueToObjectOrNull* lir)
masm.bind(ool->rejoin());
}
static void
EmitStoreBufferMutation(MacroAssembler& masm, Register strbase, int32_t strofs,
Register buffer,
LiveGeneralRegisterSet& liveVolatiles,
void (*fun)(js::gc::StoreBuffer*, js::gc::Cell**))
{
Label callVM;
Label exit;
// Call into the VM to barrier the write. The only registers that need to
// be preserved are those in liveVolatiles, so once they are saved on the
// stack all volatile registers are available for use.
masm.bind(&callVM);
masm.PushRegsInMask(liveVolatiles);
AllocatableGeneralRegisterSet regs(GeneralRegisterSet::Volatile());
regs.takeUnchecked(buffer);
regs.takeUnchecked(strbase);
Register addrReg = regs.takeAny();
masm.computeEffectiveAddress(Address(strbase, strofs), addrReg);
bool needExtraReg = !regs.hasAny<GeneralRegisterSet::DefaultType>();
if (needExtraReg) {
masm.push(strbase);
masm.setupUnalignedABICall(strbase);
} else {
masm.setupUnalignedABICall(regs.takeAny());
}
masm.passABIArg(buffer);
masm.passABIArg(addrReg);
masm.callWithABI(JS_FUNC_TO_DATA_PTR(void*, fun), MoveOp::GENERAL,
CheckUnsafeCallWithABI::DontCheckOther);
if (needExtraReg)
masm.pop(strbase);
masm.PopRegsInMask(liveVolatiles);
masm.bind(&exit);
}
// Warning: this function modifies prev and next.
static void
EmitPostWriteBarrierS(MacroAssembler& masm,
Register strbase, int32_t strofs,
Register prev, Register next,
LiveGeneralRegisterSet& liveVolatiles)
{
Label exit;
Label checkRemove, putCell;
// if (next && (buffer = next->storeBuffer()))
// but we never pass in nullptr for next.
Register storebuffer = next;
masm.loadStoreBuffer(next, storebuffer);
masm.branchPtr(Assembler::Equal, storebuffer, ImmWord(0), &checkRemove);
// if (prev && prev->storeBuffer())
masm.branchPtr(Assembler::Equal, prev, ImmWord(0), &putCell);
masm.loadStoreBuffer(prev, prev);
masm.branchPtr(Assembler::NotEqual, prev, ImmWord(0), &exit);
// buffer->putCell(cellp)
masm.bind(&putCell);
EmitStoreBufferMutation(masm, strbase, strofs, storebuffer, liveVolatiles,
JSString::addCellAddressToStoreBuffer);
masm.jump(&exit);
// if (prev && (buffer = prev->storeBuffer()))
masm.bind(&checkRemove);
masm.branchPtr(Assembler::Equal, prev, ImmWord(0), &exit);
masm.loadStoreBuffer(prev, storebuffer);
masm.branchPtr(Assembler::Equal, storebuffer, ImmWord(0), &exit);
EmitStoreBufferMutation(masm, strbase, strofs, storebuffer, liveVolatiles,
JSString::removeCellAddressFromStoreBuffer);
masm.bind(&exit);
}
typedef JSObject* (*CloneRegExpObjectFn)(JSContext*, Handle<RegExpObject*>);
static const VMFunction CloneRegExpObjectInfo =
FunctionInfo<CloneRegExpObjectFn>(CloneRegExpObject, "CloneRegExpObject");
@ -1512,22 +1435,8 @@ PrepareAndExecuteRegExp(JSContext* cx, MacroAssembler& masm, Register regexp, Re
masm.guardedCallPreBarrier(matchesInputAddress, MIRType::String);
masm.guardedCallPreBarrier(lazySourceAddress, MIRType::String);
if (temp1.volatile_())
volatileRegs.add(temp1);
// Writing into RegExpStatics tenured memory; must post-barrier.
masm.loadPtr(pendingInputAddress, temp2);
masm.storePtr(input, pendingInputAddress);
masm.movePtr(input, temp3);
EmitPostWriteBarrierS(masm, temp1, RegExpStatics::offsetOfPendingInput(),
temp2 /* prev */, temp3 /* next */, volatileRegs);
masm.loadPtr(matchesInputAddress, temp2);
masm.storePtr(input, matchesInputAddress);
masm.movePtr(input, temp3);
EmitPostWriteBarrierS(masm, temp1, RegExpStatics::offsetOfMatchesInput(),
temp2 /* prev */, temp3 /* next */, volatileRegs);
masm.storePtr(lastIndex, Address(temp1, RegExpStatics::offsetOfLazyIndex()));
masm.store32(Imm32(1), Address(temp1, RegExpStatics::offsetOfPendingLazyEvaluation()));
@ -1570,7 +1479,6 @@ public:
bool latin1, Register string,
Register base, Register temp1, Register temp2,
BaseIndex startIndexAddress, BaseIndex limitIndexAddress,
bool stringsCanBeInNursery,
Label* failure);
// Generate fallback path for creating DependentString.
@ -1582,7 +1490,6 @@ CreateDependentString::generate(MacroAssembler& masm, const JSAtomState& names,
bool latin1, Register string,
Register base, Register temp1, Register temp2,
BaseIndex startIndexAddress, BaseIndex limitIndexAddress,
bool stringsCanBeInNursery,
Label* failure)
{
string_ = string;
@ -1620,7 +1527,7 @@ CreateDependentString::generate(MacroAssembler& masm, const JSAtomState& names,
masm.branch32(Assembler::Above, temp1, Imm32(maxThinInlineLength), &fatInline);
int32_t thinFlags = (latin1 ? JSString::LATIN1_CHARS_BIT : 0) | JSString::INIT_THIN_INLINE_FLAGS;
masm.newGCString(string, temp2, &fallbacks_[FallbackKind::InlineString], stringsCanBeInNursery);
masm.newGCString(string, temp2, &fallbacks_[FallbackKind::InlineString]);
masm.bind(&joins_[FallbackKind::InlineString]);
masm.store32(Imm32(thinFlags), Address(string, JSString::offsetOfFlags()));
masm.jump(&stringAllocated);
@ -1628,7 +1535,7 @@ CreateDependentString::generate(MacroAssembler& masm, const JSAtomState& names,
masm.bind(&fatInline);
int32_t fatFlags = (latin1 ? JSString::LATIN1_CHARS_BIT : 0) | JSString::INIT_FAT_INLINE_FLAGS;
masm.newGCFatInlineString(string, temp2, &fallbacks_[FallbackKind::FatInlineString], stringsCanBeInNursery);
masm.newGCFatInlineString(string, temp2, &fallbacks_[FallbackKind::FatInlineString]);
masm.bind(&joins_[FallbackKind::FatInlineString]);
masm.store32(Imm32(fatFlags), Address(string, JSString::offsetOfFlags()));
@ -1673,9 +1580,7 @@ CreateDependentString::generate(MacroAssembler& masm, const JSAtomState& names,
// Make a dependent string.
int32_t flags = (latin1 ? JSString::LATIN1_CHARS_BIT : 0) | JSString::DEPENDENT_FLAGS;
masm.newGCString(string, temp2, &fallbacks_[FallbackKind::NotInlineString], stringsCanBeInNursery);
// Warning: string may be tenured (if the fallback case is hit), so
// stores into it must be post barriered.
masm.newGCString(string, temp2, &fallbacks_[FallbackKind::NotInlineString]);
masm.bind(&joins_[FallbackKind::NotInlineString]);
masm.store32(Imm32(flags), Address(string, JSString::offsetOfFlags()));
masm.store32(temp1, Address(string, JSString::offsetOfLength()));
@ -1688,7 +1593,6 @@ CreateDependentString::generate(MacroAssembler& masm, const JSAtomState& names,
masm.computeEffectiveAddress(BaseIndex(temp1, temp2, TimesTwo), temp1);
masm.storePtr(temp1, Address(string, JSString::offsetOfNonInlineChars()));
masm.storePtr(base, Address(string, JSDependentString::offsetOfBase()));
masm.movePtr(base, temp1);
// Follow any base pointer if the input is itself a dependent string.
// Watch for undepended strings, which have a base pointer but don't
@ -1701,17 +1605,6 @@ CreateDependentString::generate(MacroAssembler& masm, const JSAtomState& names,
masm.loadPtr(Address(base, JSDependentString::offsetOfBase()), temp1);
masm.storePtr(temp1, Address(string, JSDependentString::offsetOfBase()));
masm.bind(&noBase);
// Post-barrier the base store, whether it was the direct or indirect
// base (both will end up in temp1 here).
masm.movePtr(ImmWord(0), temp2);
LiveGeneralRegisterSet saveRegs(GeneralRegisterSet::Volatile());
if (temp1.volatile_())
saveRegs.takeUnchecked(temp1);
if (temp2.volatile_())
saveRegs.takeUnchecked(temp1);
EmitPostWriteBarrierS(masm, string, JSDependentString::offsetOfBase(),
temp2 /* prev */, temp1 /* next */, saveRegs);
}
masm.bind(&done);
@ -1721,14 +1614,14 @@ static void*
AllocateString(JSContext* cx)
{
AutoUnsafeCallWithABI unsafe;
return js::Allocate<JSString, NoGC>(cx, js::gc::TenuredHeap);
return js::Allocate<JSString, NoGC>(cx);
}
static void*
AllocateFatInlineString(JSContext* cx)
{
AutoUnsafeCallWithABI unsafe;
return js::Allocate<JSFatInlineString, NoGC>(cx, js::gc::TenuredHeap);
return js::Allocate<JSFatInlineString, NoGC>(cx);
}
void
@ -1934,12 +1827,10 @@ JitCompartment::generateRegExpMatcherStub(JSContext* cx)
masm.branch32(Assembler::LessThan, stringIndexAddress, Imm32(0), &isUndefined);
depStr[isLatin].generate(masm, cx->names(), isLatin, temp3, input, temp4, temp5,
stringIndexAddress, stringLimitAddress,
stringsCanBeInNursery,
failure);
stringIndexAddress, stringLimitAddress, failure);
masm.storeValue(JSVAL_TYPE_STRING, temp3, stringAddress);
// Storing into nursery-allocated results object's elements; no post barrier.
masm.jump(&storeDone);
masm.bind(&isUndefined);
@ -1984,9 +1875,7 @@ JitCompartment::generateRegExpMatcherStub(JSContext* cx)
masm.load32(pairsVectorAddress, temp3);
masm.storeValue(JSVAL_TYPE_INT32, temp3, Address(temp2, 0));
Address inputSlotAddress(temp2, sizeof(Value));
masm.storeValue(JSVAL_TYPE_STRING, input, inputSlotAddress);
// No post barrier needed (inputSlotAddress is within nursery object.)
masm.storeValue(JSVAL_TYPE_STRING, input, Address(temp2, sizeof(Value)));
// All done!
masm.tagValue(JSVAL_TYPE_OBJECT, object, result);
@ -2571,7 +2460,7 @@ CodeGenerator::visitGetFirstDollarIndex(LGetFirstDollarIndex* ins)
OutOfLineCode* ool = oolCallVM(GetFirstDollarIndexRawInfo, ins, ArgList(str),
StoreRegisterTo(output));
masm.branchIfRope(str, temp0, ool->entry());
masm.branchIfRope(str, ool->entry());
masm.loadStringLength(str, len);
Label isLatin1, done;
@ -2908,8 +2797,6 @@ CodeGenerator::emitLambdaInit(Register output, Register envChain,
masm.storePtr(ImmGCPtr(info.scriptOrLazyScript),
Address(output, JSFunction::offsetOfScriptOrLazyScript()));
masm.storePtr(envChain, Address(output, JSFunction::offsetOfEnvironment()));
// No post barrier needed because output is guaranteed to be allocated in
// the nursery.
masm.storePtr(ImmGCPtr(info.fun->displayAtom()), Address(output, JSFunction::offsetOfAtom()));
}
@ -3847,11 +3734,12 @@ class OutOfLineCallPostWriteBarrier : public OutOfLineCodeBase<CodeGenerator>
};
static void
EmitStoreBufferCheckForConstant(MacroAssembler& masm, const gc::TenuredCell* cell,
EmitStoreBufferCheckForConstant(MacroAssembler& masm, JSObject* object,
AllocatableGeneralRegisterSet& regs, Label* exit, Label* callVM)
{
Register temp = regs.takeAny();
const gc::TenuredCell* cell = &object->asTenured();
gc::Arena* arena = cell->arena();
Register cells = temp;
@ -3889,7 +3777,7 @@ EmitPostWriteBarrier(MacroAssembler& masm, CompileRuntime* runtime, Register obj
// We already have a fast path to check whether a global is in the store
// buffer.
if (!isGlobal && maybeConstant)
EmitStoreBufferCheckForConstant(masm, &maybeConstant->asTenured(), regs, &exit, &callVM);
EmitStoreBufferCheckForConstant(masm, maybeConstant, regs, &exit, &callVM);
// Call into the VM to barrier the write.
masm.bind(&callVM);
@ -3964,17 +3852,16 @@ CodeGenerator::maybeEmitGlobalBarrierCheck(const LAllocation* maybeGlobal, OutOf
masm.branch32(Assembler::NotEqual, addr, Imm32(0), ool->rejoin());
}
template <class LPostBarrierType, MIRType nurseryType>
template <class LPostBarrierType>
void
CodeGenerator::visitPostWriteBarrierCommon(LPostBarrierType* lir, OutOfLineCode* ool)
CodeGenerator::visitPostWriteBarrierCommonO(LPostBarrierType* lir, OutOfLineCode* ool)
{
addOutOfLineCode(ool, lir->mir());
Register temp = ToTempRegisterOrInvalid(lir->temp());
if (lir->object()->isConstant()) {
// Constant nursery objects cannot appear here, see
// LIRGenerator::visitPostWriteElementBarrier.
// Constant nursery objects cannot appear here, see LIRGenerator::visitPostWriteElementBarrier.
MOZ_ASSERT(!IsInsideNursery(&lir->object()->toConstant()->toObject()));
} else {
masm.branchPtrInNurseryChunk(Assembler::Equal, ToRegister(lir->object()), temp,
@ -3983,17 +3870,12 @@ CodeGenerator::visitPostWriteBarrierCommon(LPostBarrierType* lir, OutOfLineCode*
maybeEmitGlobalBarrierCheck(lir->object(), ool);
Register value = ToRegister(lir->value());
if (nurseryType == MIRType::Object) {
if (lir->mir()->value()->type() == MIRType::ObjectOrNull)
masm.branchTestPtr(Assembler::Zero, value, value, ool->rejoin());
else
MOZ_ASSERT(lir->mir()->value()->type() == MIRType::Object);
} else {
MOZ_ASSERT(nurseryType == MIRType::String);
MOZ_ASSERT(lir->mir()->value()->type() == MIRType::String);
}
masm.branchPtrInNurseryChunk(Assembler::Equal, value, temp, ool->entry());
Register valueObj = ToRegister(lir->value());
if (lir->mir()->value()->type() == MIRType::ObjectOrNull)
masm.branchTestPtr(Assembler::Zero, valueObj, valueObj, ool->rejoin());
else
MOZ_ASSERT(lir->mir()->value()->type() == MIRType::Object);
masm.branchPtrInNurseryChunk(Assembler::Equal, valueObj, temp, ool->entry());
masm.bind(ool->rejoin());
}
@ -4017,9 +3899,7 @@ CodeGenerator::visitPostWriteBarrierCommonV(LPostBarrierType* lir, OutOfLineCode
maybeEmitGlobalBarrierCheck(lir->object(), ool);
ValueOperand value = ToValue(lir, LPostBarrierType::Input);
// Bug 1386094 - most callers only need to check for object or string, not
// both.
masm.branchValueIsNurseryCell(Assembler::Equal, value, temp, ool->entry());
masm.branchValueIsNurseryObject(Assembler::Equal, value, temp, ool->entry());
masm.bind(ool->rejoin());
}
@ -4028,14 +3908,7 @@ void
CodeGenerator::visitPostWriteBarrierO(LPostWriteBarrierO* lir)
{
auto ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
visitPostWriteBarrierCommon<LPostWriteBarrierO, MIRType::Object>(lir, ool);
}
void
CodeGenerator::visitPostWriteBarrierS(LPostWriteBarrierS* lir)
{
auto ool = new(alloc()) OutOfLineCallPostWriteBarrier(lir, lir->object());
visitPostWriteBarrierCommon<LPostWriteBarrierS, MIRType::String>(lir, ool);
visitPostWriteBarrierCommonO(lir, ool);
}
void
@ -4115,14 +3988,7 @@ void
CodeGenerator::visitPostWriteElementBarrierO(LPostWriteElementBarrierO* lir)
{
auto ool = new(alloc()) OutOfLineCallPostWriteElementBarrier(lir, lir->object(), lir->index());
visitPostWriteBarrierCommon<LPostWriteElementBarrierO, MIRType::Object>(lir, ool);
}
void
CodeGenerator::visitPostWriteElementBarrierS(LPostWriteElementBarrierS* lir)
{
auto ool = new(alloc()) OutOfLineCallPostWriteElementBarrier(lir, lir->object(), lir->index());
visitPostWriteBarrierCommon<LPostWriteElementBarrierS, MIRType::String>(lir, ool);
visitPostWriteBarrierCommonO(lir, ool);
}
void
@ -6823,16 +6689,20 @@ CodeGenerator::emitLoadIteratorValues<ValueMap>(Register result, Register temp,
masm.storeValue(keyAddress, keyElemAddress, temp);
masm.storeValue(valueAddress, valueElemAddress, temp);
Label emitBarrier, skipBarrier;
masm.branchValueIsNurseryCell(Assembler::Equal, keyAddress, temp, &emitBarrier);
masm.branchValueIsNurseryCell(Assembler::NotEqual, valueAddress, temp, &skipBarrier);
Label keyIsNotObject, valueIsNotNurseryObject, emitBarrier;
masm.branchTestObject(Assembler::NotEqual, keyAddress, &keyIsNotObject);
masm.branchValueIsNurseryObject(Assembler::Equal, keyAddress, temp, &emitBarrier);
masm.bind(&keyIsNotObject);
masm.branchTestObject(Assembler::NotEqual, valueAddress, &valueIsNotNurseryObject);
masm.branchValueIsNurseryObject(Assembler::NotEqual, valueAddress, temp,
&valueIsNotNurseryObject);
{
masm.bind(&emitBarrier);
saveVolatile(temp);
emitPostWriteBarrier(result);
restoreVolatile(temp);
}
masm.bind(&skipBarrier);
masm.bind(&valueIsNotNurseryObject);
}
template <>
@ -6846,14 +6716,15 @@ CodeGenerator::emitLoadIteratorValues<ValueSet>(Register result, Register temp,
masm.guardedCallPreBarrier(keyElemAddress, MIRType::Value);
masm.storeValue(keyAddress, keyElemAddress, temp);
Label skipBarrier;
masm.branchValueIsNurseryCell(Assembler::NotEqual, keyAddress, temp, &skipBarrier);
Label keyIsNotObject;
masm.branchTestObject(Assembler::NotEqual, keyAddress, &keyIsNotObject);
masm.branchValueIsNurseryObject(Assembler::NotEqual, keyAddress, temp, &keyIsNotObject);
{
saveVolatile(temp);
emitPostWriteBarrier(result);
restoreVolatile(temp);
}
masm.bind(&skipBarrier);
masm.bind(&keyIsNotObject);
}
template <class IteratorObject, class OrderedHashTable>
@ -7775,14 +7646,13 @@ CopyStringCharsMaybeInflate(MacroAssembler& masm, Register input, Register destC
static void
ConcatInlineString(MacroAssembler& masm, Register lhs, Register rhs, Register output,
Register temp1, Register temp2, Register temp3,
bool stringsCanBeInNursery,
Label* failure, Label* failurePopTemps, bool isTwoByte)
{
// State: result length in temp2.
// Ensure both strings are linear.
masm.branchIfRope(lhs, temp1, failure);
masm.branchIfRope(rhs, temp1, failure);
masm.branchIfRope(lhs, failure);
masm.branchIfRope(rhs, failure);
// Allocate a JSThinInlineString or JSFatInlineString.
size_t maxThinInlineLength;
@ -7797,7 +7667,7 @@ ConcatInlineString(MacroAssembler& masm, Register lhs, Register rhs, Register ou
uint32_t flags = JSString::INIT_THIN_INLINE_FLAGS;
if (!isTwoByte)
flags |= JSString::LATIN1_CHARS_BIT;
masm.newGCString(output, temp1, failure, stringsCanBeInNursery);
masm.newGCString(output, temp1, failure);
masm.store32(Imm32(flags), Address(output, JSString::offsetOfFlags()));
masm.jump(&allocDone);
}
@ -7806,7 +7676,7 @@ ConcatInlineString(MacroAssembler& masm, Register lhs, Register rhs, Register ou
uint32_t flags = JSString::INIT_FAT_INLINE_FLAGS;
if (!isTwoByte)
flags |= JSString::LATIN1_CHARS_BIT;
masm.newGCFatInlineString(output, temp1, failure, stringsCanBeInNursery);
masm.newGCFatInlineString(output, temp1, failure);
masm.store32(Imm32(flags), Address(output, JSString::offsetOfFlags()));
}
masm.bind(&allocDone);
@ -7892,7 +7762,7 @@ CodeGenerator::visitSubstr(LSubstr* lir)
// Handle inlined strings by creating a FatInlineString.
masm.branchTest32(Assembler::Zero, stringFlags, Imm32(JSString::INLINE_CHARS_BIT), &notInline);
masm.newGCFatInlineString(output, temp, slowPath, stringsCanBeInNursery());
masm.newGCFatInlineString(output, temp, slowPath);
masm.store32(length, Address(output, JSString::offsetOfLength()));
Address stringStorage(string, JSInlineString::offsetOfInlineStorage());
Address outputStorage(output, JSInlineString::offsetOfInlineStorage());
@ -7934,7 +7804,7 @@ CodeGenerator::visitSubstr(LSubstr* lir)
// Handle other cases with a DependentString.
masm.bind(&notInline);
masm.newGCString(output, temp, slowPath, gen->stringsCanBeInNursery());
masm.newGCString(output, temp, slowPath);
masm.store32(length, Address(output, JSString::offsetOfLength()));
masm.storePtr(string, Address(output, JSDependentString::offsetOfBase()));
@ -8014,16 +7884,14 @@ JitCompartment::generateStringConcatStub(JSContext* cx)
// Ensure result length <= JSString::MAX_LENGTH.
masm.branch32(Assembler::Above, temp2, Imm32(JSString::MAX_LENGTH), &failure);
// Allocate a new rope, guaranteed to be in the nursery.
masm.newGCString(output, temp3, &failure, stringsCanBeInNursery);
// Allocate a new rope.
masm.newGCString(output, temp3, &failure);
// Store rope length and flags. temp1 still holds the result of AND'ing the
// lhs and rhs flags, so we just have to clear the other flags and set
// NON_ATOM_BIT to get our rope flags (Latin1 if both lhs and rhs are
// Latin1).
static_assert(JSString::ROPE_FLAGS == JSString::NON_ATOM_BIT, "Rope flags must be NON_ATOM_BIT only");
// lhs and rhs flags, so we just have to clear the other flags to get our
// rope flags (Latin1 if both lhs and rhs are Latin1).
static_assert(JSString::ROPE_FLAGS == 0, "Rope flags must be 0");
masm.and32(Imm32(JSString::LATIN1_CHARS_BIT), temp1);
masm.or32(Imm32(JSString::NON_ATOM_BIT), temp1);
masm.store32(temp1, Address(output, JSString::offsetOfFlags()));
masm.store32(temp2, Address(output, JSString::offsetOfLength()));
@ -8042,11 +7910,11 @@ JitCompartment::generateStringConcatStub(JSContext* cx)
masm.bind(&isFatInlineTwoByte);
ConcatInlineString(masm, lhs, rhs, output, temp1, temp2, temp3,
stringsCanBeInNursery, &failure, &failurePopTemps, true);
&failure, &failurePopTemps, true);
masm.bind(&isFatInlineLatin1);
ConcatInlineString(masm, lhs, rhs, output, temp1, temp2, temp3,
stringsCanBeInNursery, &failure, &failurePopTemps, false);
&failure, &failurePopTemps, false);
masm.bind(&failurePopTemps);
masm.pop(temp2);
@ -8247,7 +8115,7 @@ CodeGenerator::visitCharCodeAt(LCharCodeAt* lir)
Register output = ToRegister(lir->output());
OutOfLineCode* ool = oolCallVM(CharCodeAtInfo, lir, ArgList(str, index), StoreRegisterTo(output));
masm.loadStringChar(str, index, ToRegister(lir->temp()), output, ool->entry());
masm.loadStringChar(str, index, output, ool->entry());
masm.bind(ool->rejoin());
}
@ -8315,7 +8183,7 @@ CodeGenerator::visitFromCodePoint(LFromCodePoint* lir)
"JSThinInlineString can hold a supplementary code point");
uint32_t flags = JSString::INIT_THIN_INLINE_FLAGS;
masm.newGCString(output, temp1, ool->entry(), gen->stringsCanBeInNursery());
masm.newGCString(output, temp1, ool->entry());
masm.store32(Imm32(flags), Address(output, JSString::offsetOfFlags()));
}
@ -10078,7 +9946,7 @@ CodeGenerator::link(JSContext* cx, CompilerConstraintList* constraints)
ionScript->copyConstants(vp);
for (size_t i = 0; i < graph.numConstants(); i++) {
const Value& v = vp[i];
if ((v.isObject() || v.isString()) && IsInsideNursery(v.toGCThing())) {
if (v.isObject() && IsInsideNursery(&v.toObject())) {
cx->zone()->group()->storeBuffer().putWholeCell(script);
break;
}

Просмотреть файл

@ -164,15 +164,12 @@ class CodeGenerator final : public CodeGeneratorSpecific
void visitMonitorTypes(LMonitorTypes* lir);
void emitPostWriteBarrier(const LAllocation* obj);
void emitPostWriteBarrier(Register objreg);
void emitPostWriteBarrierS(Address address, Register prev, Register next);
template <class LPostBarrierType, MIRType nurseryType>
void visitPostWriteBarrierCommon(LPostBarrierType* lir, OutOfLineCode* ool);
template <class LPostBarrierType>
void visitPostWriteBarrierCommonO(LPostBarrierType* lir, OutOfLineCode* ool);
template <class LPostBarrierType>
void visitPostWriteBarrierCommonV(LPostBarrierType* lir, OutOfLineCode* ool);
void visitPostWriteBarrierO(LPostWriteBarrierO* lir);
void visitPostWriteElementBarrierO(LPostWriteElementBarrierO* lir);
void visitPostWriteBarrierS(LPostWriteBarrierS* lir);
void visitPostWriteElementBarrierS(LPostWriteElementBarrierS* lir);
void visitPostWriteBarrierV(LPostWriteBarrierV* lir);
void visitPostWriteElementBarrierV(LPostWriteElementBarrierV* lir);
void visitOutOfLineCallPostWriteBarrier(OutOfLineCallPostWriteBarrier* ool);

Просмотреть файл

@ -182,36 +182,16 @@ CompileZone::addressOfNurseryPosition()
return zone()->runtimeFromAnyThread()->gc.addressOfNurseryPosition();
}
const void*
CompileZone::addressOfStringNurseryPosition()
{
// Objects and strings share a nursery, for now at least.
return zone()->runtimeFromAnyThread()->gc.addressOfNurseryPosition();
}
const void*
CompileZone::addressOfNurseryCurrentEnd()
{
return zone()->runtimeFromAnyThread()->gc.addressOfNurseryCurrentEnd();
}
const void*
CompileZone::addressOfStringNurseryCurrentEnd()
{
return zone()->runtimeFromAnyThread()->gc.addressOfStringNurseryCurrentEnd();
}
bool
CompileZone::canNurseryAllocateStrings()
{
return nurseryExists() &&
zone()->group()->nursery().canAllocateStrings() &&
zone()->allocNurseryStrings;
}
bool
CompileZone::nurseryExists()
{
MOZ_ASSERT(CurrentThreadCanAccessZone(zone()));
return zone()->group()->nursery().exists();
}

Просмотреть файл

@ -76,12 +76,9 @@ class CompileZone
const void* addressOfNeedsIncrementalBarrier();
const void* addressOfFreeList(gc::AllocKind allocKind);
const void* addressOfNurseryPosition();
const void* addressOfStringNurseryPosition();
const void* addressOfNurseryCurrentEnd();
const void* addressOfStringNurseryCurrentEnd();
bool nurseryExists();
bool canNurseryAllocateStrings();
void setMinorGCShouldCancelIonCompilations();
};

Просмотреть файл

@ -455,8 +455,6 @@ JitCompartment::initialize(JSContext* cx)
return false;
}
stringsCanBeInNursery = cx->nursery().canAllocateStrings();
return true;
}

Просмотреть файл

@ -7393,14 +7393,9 @@ IonBuilder::loadStaticSlot(JSObject* staticObject, BarrierKind barrier, Temporar
bool
IonBuilder::needsPostBarrier(MDefinition* value)
{
CompileZone* zone = compartment->zone();
if (!zone->nurseryExists())
if (!compartment->zone()->nurseryExists())
return false;
if (value->mightBeType(MIRType::Object))
return true;
if (value->mightBeType(MIRType::String) && zone->canNurseryAllocateStrings())
return true;
return false;
return value->mightBeType(MIRType::Object);
}
AbortReasonOr<Ok>
@ -11923,7 +11918,7 @@ IonBuilder::storeUnboxedValue(MDefinition* obj, MDefinition* elements, int32_t e
break;
case JSVAL_TYPE_STRING:
store = MStoreUnboxedString::New(alloc(), elements, scaledOffset, value, obj,
store = MStoreUnboxedString::New(alloc(), elements, scaledOffset, value,
elementsOffset, preBarrier);
break;
@ -13636,9 +13631,9 @@ IonBuilder::setPropTryReferenceTypedObjectValue(bool* emitted,
store = MStoreUnboxedObjectOrNull::New(alloc(), elements, scaledOffset, value, typedObj, adjustment);
break;
case ReferenceTypeDescr::TYPE_STRING:
// See previous comment. The StoreUnboxedString type policy may insert
// ToString instructions that require a post barrier.
store = MStoreUnboxedString::New(alloc(), elements, scaledOffset, value, typedObj, adjustment);
// Strings are not nursery allocated, so these writes do not need post
// barriers.
store = MStoreUnboxedString::New(alloc(), elements, scaledOffset, value, adjustment);
break;
}

Просмотреть файл

@ -753,8 +753,8 @@ IonCacheIRCompiler::emitGuardSpecificAtom()
// The pointers are not equal, so if the input string is also an atom it
// must be a different string.
masm.branchTest32(Assembler::Zero, Address(str, JSString::offsetOfFlags()),
Imm32(JSString::NON_ATOM_BIT), failure->label());
masm.branchTest32(Assembler::NonZero, Address(str, JSString::offsetOfFlags()),
Imm32(JSString::ATOM_BIT), failure->label());
// Check the length.
masm.branch32(Assembler::NotEqual, Address(str, JSString::offsetOfLength()),

Просмотреть файл

@ -610,13 +610,6 @@ class JitCompartment
return stringConcatStub_;
}
void discardStubs() {
stringConcatStub_ = nullptr;
regExpMatcherStub_ = nullptr;
regExpSearcherStub_ = nullptr;
regExpTesterStub_ = nullptr;
}
JitCode* regExpMatcherStubNoBarrier() const {
return regExpMatcherStub_;
}
@ -651,8 +644,6 @@ class JitCompartment
}
size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) const;
bool stringsCanBeInNursery;
};
// Called from JSCompartment::discardJitCode().

Просмотреть файл

@ -116,7 +116,7 @@ TryToUseImplicitInterruptCheck(MIRGraph& graph, MBasicBlock* backedge)
continue;
}
MOZ_ASSERT_IF(iter->isPostWriteBarrierO() || iter->isPostWriteBarrierV() || iter->isPostWriteBarrierS(),
MOZ_ASSERT_IF(iter->isPostWriteBarrierO() || iter->isPostWriteBarrierV(),
iter->safepoint());
if (iter->safepoint())
@ -1960,9 +1960,7 @@ LIRGenerator::visitCharCodeAt(MCharCodeAt* ins)
MOZ_ASSERT(str->type() == MIRType::String);
MOZ_ASSERT(idx->type() == MIRType::Int32);
LCharCodeAt* lir = new(alloc()) LCharCodeAt(useRegister(str),
useRegister(idx),
temp(LDefinition::INT32));
LCharCodeAt* lir = new(alloc()) LCharCodeAt(useRegister(str), useRegister(idx));
define(lir, ins);
assignSafepoint(lir, ins);
}
@ -2854,17 +2852,6 @@ LIRGenerator::visitPostWriteBarrier(MPostWriteBarrier* ins)
assignSafepoint(lir, ins);
break;
}
case MIRType::String: {
LDefinition tmp = needTempForPostBarrier() ? temp() : LDefinition::BogusTemp();
LPostWriteBarrierS* lir =
new(alloc()) LPostWriteBarrierS(useConstantObject
? useOrConstant(ins->object())
: useRegister(ins->object()),
useRegister(ins->value()), tmp);
add(lir, ins);
assignSafepoint(lir, ins);
break;
}
case MIRType::Value: {
LDefinition tmp = needTempForPostBarrier() ? temp() : LDefinition::BogusTemp();
LPostWriteBarrierV* lir =
@ -2878,8 +2865,8 @@ LIRGenerator::visitPostWriteBarrier(MPostWriteBarrier* ins)
break;
}
default:
// Currently, only objects and strings can be in the nursery. Other
// instruction types cannot hold nursery pointers.
// Currently, only objects can be in the nursery. Other instruction
// types cannot hold nursery pointers.
break;
}
}
@ -2913,19 +2900,6 @@ LIRGenerator::visitPostWriteElementBarrier(MPostWriteElementBarrier* ins)
assignSafepoint(lir, ins);
break;
}
case MIRType::String: {
LDefinition tmp = needTempForPostBarrier() ? temp() : LDefinition::BogusTemp();
LPostWriteElementBarrierS* lir =
new(alloc()) LPostWriteElementBarrierS(useConstantObject
? useOrConstant(ins->object())
: useRegister(ins->object()),
useRegister(ins->value()),
useRegister(ins->index()),
tmp);
add(lir, ins);
assignSafepoint(lir, ins);
break;
}
case MIRType::Value: {
LDefinition tmp = needTempForPostBarrier() ? temp() : LDefinition::BogusTemp();
LPostWriteElementBarrierV* lir =
@ -2940,8 +2914,8 @@ LIRGenerator::visitPostWriteElementBarrier(MPostWriteElementBarrier* ins)
break;
}
default:
// Currently, only objects and strings can be in the nursery. Other
// instruction types cannot hold nursery pointers.
// Currently, only objects can be in the nursery. Other instruction
// types cannot hold nursery pointers.
break;
}
}

Просмотреть файл

@ -9905,7 +9905,7 @@ class MFallibleStoreElement
};
// Store an unboxed object or null pointer to an elements vector.
// Store an unboxed object or null pointer to a v\ector.
class MStoreUnboxedObjectOrNull
: public MQuaternaryInstruction,
public StoreUnboxedObjectOrNullPolicy::Data
@ -9948,30 +9948,28 @@ class MStoreUnboxedObjectOrNull
ALLOW_CLONE(MStoreUnboxedObjectOrNull)
};
// Store an unboxed string to an elements vector.
// Store an unboxed object or null pointer to a vector.
class MStoreUnboxedString
: public MQuaternaryInstruction,
public StoreUnboxedStringPolicy::Data
: public MTernaryInstruction,
public MixPolicy<SingleObjectPolicy, ConvertToStringPolicy<2> >::Data
{
int32_t offsetAdjustment_;
bool preBarrier_;
MStoreUnboxedString(MDefinition* elements, MDefinition* index,
MDefinition* value, MDefinition* typedObj,
MStoreUnboxedString(MDefinition* elements, MDefinition* index, MDefinition* value,
int32_t offsetAdjustment = 0, bool preBarrier = true)
: MQuaternaryInstruction(classOpcode, elements, index, value, typedObj),
: MTernaryInstruction(classOpcode, elements, index, value),
offsetAdjustment_(offsetAdjustment),
preBarrier_(preBarrier)
{
MOZ_ASSERT(IsValidElementsType(elements, offsetAdjustment));
MOZ_ASSERT(index->type() == MIRType::Int32);
MOZ_ASSERT(typedObj->type() == MIRType::Object);
}
public:
INSTRUCTION_HEADER(StoreUnboxedString)
TRIVIAL_NEW_WRAPPERS
NAMED_OPERANDS((0, elements), (1, index), (2, value), (3, typedObj));
NAMED_OPERANDS((0, elements), (1, index), (2, value))
int32_t offsetAdjustment() const {
return offsetAdjustment_;
@ -9983,12 +9981,6 @@ class MStoreUnboxedString
return AliasSet::Store(AliasSet::UnboxedElement);
}
// For StoreUnboxedStringPolicy, to replace the original output with the
// output of a post barrier (if one is needed.)
void setValue(MDefinition* def) {
replaceOperand(2, def);
}
ALLOW_CLONE(MStoreUnboxedString)
};

Просмотреть файл

@ -109,10 +109,6 @@ class MIRGenerator
!JitOptions.disableOptimizationTracking;
}
bool stringsCanBeInNursery() const {
return stringsCanBeInNursery_;
}
bool safeForMinorGC() const {
return safeForMinorGC_;
}
@ -202,7 +198,6 @@ class MIRGenerator
bool instrumentedProfiling_;
bool instrumentedProfilingIsCached_;
bool safeForMinorGC_;
bool stringsCanBeInNursery_;
void addAbortedPreliminaryGroup(ObjectGroup* group);

Просмотреть файл

@ -38,7 +38,6 @@ MIRGenerator::MIRGenerator(CompileCompartment* compartment, const JitCompileOpti
instrumentedProfiling_(false),
instrumentedProfilingIsCached_(false),
safeForMinorGC_(true),
stringsCanBeInNursery_(compartment ? compartment->zone()->canNurseryAllocateStrings() : false),
minWasmHeapLength_(0),
options(options),
gs_(alloc)

Просмотреть файл

@ -424,13 +424,11 @@ MacroAssembler::branchIfTrueBool(Register reg, Label* label)
}
void
MacroAssembler::branchIfRope(Register str, Register temp, Label* label)
MacroAssembler::branchIfRope(Register str, Label* label)
{
Address flags(str, JSString::offsetOfFlags());
move32(Imm32(JSString::TYPE_FLAGS_MASK), temp);
and32(flags, temp);
branch32(Assembler::Equal, temp, Imm32(JSString::ROPE_FLAGS), label);
static_assert(JSString::ROPE_FLAGS == 0, "Rope type flags must be 0");
branchTest32(Assembler::Zero, flags, Imm32(JSString::TYPE_FLAGS_MASK), label);
}
void
@ -440,18 +438,18 @@ MacroAssembler::branchIfRopeOrExternal(Register str, Register temp, Label* label
move32(Imm32(JSString::TYPE_FLAGS_MASK), temp);
and32(flags, temp);
branch32(Assembler::Equal, temp, Imm32(JSString::ROPE_FLAGS), label);
static_assert(JSString::ROPE_FLAGS == 0, "Rope type flags must be 0");
branchTest32(Assembler::Zero, temp, temp, label);
branch32(Assembler::Equal, temp, Imm32(JSString::EXTERNAL_FLAGS), label);
}
void
MacroAssembler::branchIfNotRope(Register str, Register temp, Label* label)
MacroAssembler::branchIfNotRope(Register str, Label* label)
{
Address flags(str, JSString::offsetOfFlags());
move32(Imm32(JSString::TYPE_FLAGS_MASK), temp);
and32(flags, temp);
branch32(Assembler::NotEqual, temp, Imm32(JSString::ROPE_FLAGS), label);
static_assert(JSString::ROPE_FLAGS == 0, "Rope type flags must be 0");
branchTest32(Assembler::NonZero, flags, Imm32(JSString::TYPE_FLAGS_MASK), label);
}
void

Просмотреть файл

@ -726,6 +726,7 @@ MacroAssembler::checkAllocatorState(Label* fail)
jump(fail);
}
// Inline version of ShouldNurseryAllocate.
bool
MacroAssembler::shouldNurseryAllocate(gc::AllocKind allocKind, gc::InitialHeap initialHeap)
{
@ -740,10 +741,11 @@ MacroAssembler::shouldNurseryAllocate(gc::AllocKind allocKind, gc::InitialHeap i
// Inline version of Nursery::allocateObject. If the object has dynamic slots,
// this fills in the slots_ pointer.
void
MacroAssembler::nurseryAllocateObject(Register result, Register temp, gc::AllocKind allocKind,
size_t nDynamicSlots, Label* fail)
MacroAssembler::nurseryAllocate(Register result, Register temp, gc::AllocKind allocKind,
size_t nDynamicSlots, gc::InitialHeap initialHeap, Label* fail)
{
MOZ_ASSERT(IsNurseryAllocable(allocKind));
MOZ_ASSERT(initialHeap != gc::TenuredHeap);
// We still need to allocate in the nursery, per the comment in
// shouldNurseryAllocate; however, we need to insert into the
@ -863,10 +865,8 @@ MacroAssembler::allocateObject(Register result, Register temp, gc::AllocKind all
checkAllocatorState(fail);
if (shouldNurseryAllocate(allocKind, initialHeap)) {
MOZ_ASSERT(initialHeap == gc::DefaultHeap);
return nurseryAllocateObject(result, temp, allocKind, nDynamicSlots, fail);
}
if (shouldNurseryAllocate(allocKind, initialHeap))
return nurseryAllocate(result, temp, allocKind, nDynamicSlots, initialHeap, fail);
if (!nDynamicSlots)
return freeListAllocate(result, temp, allocKind, fail);
@ -926,82 +926,16 @@ MacroAssembler::allocateNonObject(Register result, Register temp, gc::AllocKind
freeListAllocate(result, temp, allocKind, fail);
}
// Inline version of Nursery::allocateString.
void
MacroAssembler::nurseryAllocateString(Register result, Register temp, gc::AllocKind allocKind,
Label* fail)
MacroAssembler::newGCString(Register result, Register temp, Label* fail)
{
MOZ_ASSERT(IsNurseryAllocable(allocKind));
// No explicit check for nursery.isEnabled() is needed, as the comparison
// with the nursery's end will always fail in such cases.
CompileZone* zone = GetJitContext()->compartment->zone();
int thingSize = int(gc::Arena::thingSize(allocKind));
int totalSize = js::Nursery::stringHeaderSize() + thingSize;
MOZ_ASSERT(totalSize % gc::CellAlignBytes == 0);
// The nursery position (allocation pointer) and the nursery end are stored
// very close to each other. In practice, the zone will probably be close
// (within 32 bits) as well. If so, use relative offsets between them, to
// avoid multiple 64-bit immediate loads.
auto nurseryPosAddr = intptr_t(zone->addressOfStringNurseryPosition());
auto nurseryEndAddr = intptr_t(zone->addressOfStringNurseryCurrentEnd());
auto zoneAddr = intptr_t(zone);
intptr_t maxOffset = std::max(std::abs(nurseryPosAddr - zoneAddr),
std::abs(nurseryEndAddr - zoneAddr));
if (maxOffset < (1 << 31)) {
movePtr(ImmPtr(zone), temp); // temp holds the Zone pointer from here on.
loadPtr(Address(temp, nurseryPosAddr - zoneAddr), result);
addPtr(Imm32(totalSize), result); // result points past this allocation.
branchPtr(Assembler::Below, Address(temp, nurseryEndAddr - zoneAddr), result, fail);
storePtr(result, Address(temp, nurseryPosAddr - zoneAddr)); // Update position.
subPtr(Imm32(thingSize), result); // Point result at Cell data.
storePtr(temp, Address(result, -js::Nursery::stringHeaderSize())); // Store Zone*
} else {
// Otherwise, the zone is far from the nursery pointers. But the
// nursery pos/end pointers are still near each other.
movePtr(ImmPtr(zone->addressOfNurseryPosition()), temp);
loadPtr(Address(temp, 0), result);
addPtr(Imm32(totalSize), result);
branchPtr(Assembler::Below, Address(temp, nurseryEndAddr - nurseryPosAddr), result, fail);
storePtr(result, Address(temp, 0));
subPtr(Imm32(thingSize), result);
storePtr(ImmPtr(zone), Address(result, -js::Nursery::stringHeaderSize()));
}
}
// Inlined equivalent of gc::AllocateString, jumping to fail if nursery
// allocation requested but unsuccessful.
void
MacroAssembler::allocateString(Register result, Register temp, gc::AllocKind allocKind,
gc::InitialHeap initialHeap, Label* fail)
{
MOZ_ASSERT(allocKind == gc::AllocKind::STRING || allocKind == gc::AllocKind::FAT_INLINE_STRING);
checkAllocatorState(fail);
if (shouldNurseryAllocate(allocKind, initialHeap)) {
MOZ_ASSERT(initialHeap == gc::DefaultHeap);
return nurseryAllocateString(result, temp, allocKind, fail);
}
freeListAllocate(result, temp, allocKind, fail);
allocateNonObject(result, temp, js::gc::AllocKind::STRING, fail);
}
void
MacroAssembler::newGCString(Register result, Register temp, Label* fail, bool attemptNursery)
MacroAssembler::newGCFatInlineString(Register result, Register temp, Label* fail)
{
allocateString(result, temp, js::gc::AllocKind::STRING,
attemptNursery ? gc::DefaultHeap : gc::TenuredHeap, fail);
}
void
MacroAssembler::newGCFatInlineString(Register result, Register temp, Label* fail, bool attemptNursery)
{
allocateString(result, temp, js::gc::AllocKind::FAT_INLINE_STRING,
attemptNursery ? gc::DefaultHeap : gc::TenuredHeap, fail);
allocateNonObject(result, temp, js::gc::AllocKind::FAT_INLINE_STRING, fail);
}
void
@ -1409,9 +1343,9 @@ MacroAssembler::compareStrings(JSOp op, Register left, Register right, Register
Label notAtom;
// Optimize the equality operation to a pointer compare for two atoms.
Imm32 nonAtomBit(JSString::NON_ATOM_BIT);
branchTest32(Assembler::NonZero, Address(left, JSString::offsetOfFlags()), nonAtomBit, &notAtom);
branchTest32(Assembler::NonZero, Address(right, JSString::offsetOfFlags()), nonAtomBit, &notAtom);
Imm32 atomBit(JSString::ATOM_BIT);
branchTest32(Assembler::Zero, Address(left, JSString::offsetOfFlags()), atomBit, &notAtom);
branchTest32(Assembler::Zero, Address(right, JSString::offsetOfFlags()), atomBit, &notAtom);
cmpPtrSet(JSOpToCondition(MCompare::Compare_String, op), left, right, result);
jump(&done);
@ -1442,7 +1376,7 @@ MacroAssembler::loadStringChars(Register str, Register dest)
}
void
MacroAssembler::loadStringChar(Register str, Register index, Register temp, Register output, Label* fail)
MacroAssembler::loadStringChar(Register str, Register index, Register output, Label* fail)
{
MOZ_ASSERT(str != output);
MOZ_ASSERT(index != output);
@ -1451,7 +1385,7 @@ MacroAssembler::loadStringChar(Register str, Register index, Register temp, Regi
// This follows JSString::getChar.
Label notRope;
branchIfNotRope(str, temp, &notRope);
branchIfNotRope(str, &notRope);
// Load leftChild.
loadPtr(Address(str, JSRope::offsetOfLeft()), output);
@ -1461,7 +1395,7 @@ MacroAssembler::loadStringChar(Register str, Register index, Register temp, Regi
branch32(Assembler::BelowOrEqual, Address(output, JSString::offsetOfLength()), index, fail);
// If the left side is another rope, give up.
branchIfRope(output, temp, fail);
branchIfRope(output, fail);
bind(&notRope);
@ -3225,7 +3159,7 @@ MacroAssembler::emitPreBarrierFastPath(JSRuntime* rt, MIRType type, Register tem
andPtr(temp1, temp2);
// If the GC thing is in the nursery, we don't need to barrier it.
if (type == MIRType::Value || type == MIRType::Object || type == MIRType::String) {
if (type == MIRType::Value || type == MIRType::Object) {
branch32(Assembler::Equal, Address(temp2, gc::ChunkLocationOffset),
Imm32(int32_t(gc::ChunkLocation::Nursery)), noBarrier);
} else {

Просмотреть файл

@ -1065,10 +1065,6 @@ class MacroAssembler : public MacroAssemblerSpecific
inline void branchPtr(Condition cond, wasm::SymbolicAddress lhs, Register rhs, Label* label)
DEFINED_ON(arm, arm64, mips_shared, x86, x64);
// Given a pointer to a GC Cell, retrieve the StoreBuffer pointer from its
// chunk trailer, or nullptr if it is in the tenured heap.
void loadStoreBuffer(Register ptr, Register buffer) PER_ARCH;
template <typename T>
inline CodeOffsetJump branchPtrWithPatch(Condition cond, Register lhs, T rhs, RepatchLabel* label) PER_SHARED_ARCH;
template <typename T>
@ -1078,9 +1074,8 @@ class MacroAssembler : public MacroAssemblerSpecific
DEFINED_ON(arm, arm64, mips_shared, x86, x64);
void branchPtrInNurseryChunk(Condition cond, const Address& address, Register temp, Label* label)
DEFINED_ON(x86);
void branchValueIsNurseryObject(Condition cond, const Address& address, Register temp, Label* label) PER_ARCH;
void branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp, Label* label) PER_ARCH;
void branchValueIsNurseryCell(Condition cond, const Address& address, Register temp, Label* label) PER_ARCH;
void branchValueIsNurseryCell(Condition cond, ValueOperand value, Register temp, Label* label) PER_ARCH;
// This function compares a Value (lhs) which is having a private pointer
// boxed inside a js::Value, with a raw pointer (rhs).
@ -1149,10 +1144,10 @@ class MacroAssembler : public MacroAssemblerSpecific
// Branches to |label| if |reg| is true. |reg| should be a C++ bool.
inline void branchIfTrueBool(Register reg, Label* label);
inline void branchIfRope(Register str, Register temp, Label* label);
inline void branchIfRope(Register str, Label* label);
inline void branchIfRopeOrExternal(Register str, Register temp, Label* label);
inline void branchIfNotRope(Register str, Register temp, Label* label);
inline void branchIfNotRope(Register str, Label* label);
inline void branchLatin1String(Register string, Label* label);
inline void branchTwoByteString(Register string, Label* label);
@ -1299,7 +1294,7 @@ class MacroAssembler : public MacroAssemblerSpecific
void branchPtrInNurseryChunkImpl(Condition cond, Register ptr, Label* label)
DEFINED_ON(x86);
template <typename T>
void branchValueIsNurseryCellImpl(Condition cond, const T& value, Register temp, Label* label)
void branchValueIsNurseryObjectImpl(Condition cond, const T& value, Register temp, Label* label)
DEFINED_ON(arm64, mips64, x64);
template <typename T>
@ -1779,7 +1774,7 @@ class MacroAssembler : public MacroAssemblerSpecific
}
void loadStringChars(Register str, Register dest);
void loadStringChar(Register str, Register index, Register temp, Register output, Label* fail);
void loadStringChar(Register str, Register index, Register output, Label* fail);
void loadStringIndexValue(Register str, Register dest, Label* fail);
@ -2020,15 +2015,11 @@ class MacroAssembler : public MacroAssemblerSpecific
private:
void checkAllocatorState(Label* fail);
bool shouldNurseryAllocate(gc::AllocKind allocKind, gc::InitialHeap initialHeap);
void nurseryAllocateObject(Register result, Register temp, gc::AllocKind allocKind,
size_t nDynamicSlots, Label* fail);
void nurseryAllocate(Register result, Register temp, gc::AllocKind allocKind,
size_t nDynamicSlots, gc::InitialHeap initialHeap, Label* fail);
void freeListAllocate(Register result, Register temp, gc::AllocKind allocKind, Label* fail);
void allocateObject(Register result, Register temp, gc::AllocKind allocKind,
uint32_t nDynamicSlots, gc::InitialHeap initialHeap, Label* fail);
void nurseryAllocateString(Register result, Register temp, gc::AllocKind allocKind,
Label* fail);
void allocateString(Register result, Register temp, gc::AllocKind allocKind,
gc::InitialHeap initialHeap, Label* fail);
void allocateNonObject(Register result, Register temp, gc::AllocKind allocKind, Label* fail);
void copySlotsFromTemplate(Register obj, const NativeObject* templateObj,
uint32_t start, uint32_t end);
@ -2054,8 +2045,8 @@ class MacroAssembler : public MacroAssemblerSpecific
void initUnboxedObjectContents(Register object, UnboxedPlainObject* templateObject);
void newGCString(Register result, Register temp, Label* fail, bool attemptNursery);
void newGCFatInlineString(Register result, Register temp, Label* fail, bool attemptNursery);
void newGCString(Register result, Register temp, Label* fail);
void newGCFatInlineString(Register result, Register temp, Label* fail);
// Compares two strings for equality based on the JSOP.
// This checks for identical pointers, atoms and length and fails for everything else.

Просмотреть файл

@ -1096,33 +1096,6 @@ StoreUnboxedObjectOrNullPolicy::adjustInputs(TempAllocator& alloc, MInstruction*
return true;
}
bool
StoreUnboxedStringPolicy::adjustInputs(TempAllocator& alloc, MInstruction* ins)
{
if (!ObjectPolicy<0>::staticAdjustInputs(alloc, ins))
return false;
// Change the value input to a ToString instruction if it might be
// a non-null primitive.
if (!ConvertToStringPolicy<2>::staticAdjustInputs(alloc, ins))
return false;
if (!ObjectPolicy<3>::staticAdjustInputs(alloc, ins))
return false;
// Insert a post barrier for the instruction's object and whatever its new
// value is.
MStoreUnboxedString* store = ins->toStoreUnboxedString();
MOZ_ASSERT(store->typedObj()->type() == MIRType::Object);
MDefinition* value = store->value();
MOZ_ASSERT(value->type() == MIRType::String);
MInstruction* barrier = MPostWriteBarrier::New(alloc, store->typedObj(), value);
store->block()->insertBefore(store, barrier);
return true;
}
bool
ClampPolicy::adjustInputs(TempAllocator& alloc, MInstruction* ins)
{
@ -1238,7 +1211,6 @@ FilterTypeSetPolicy::adjustInputs(TempAllocator& alloc, MInstruction* ins)
_(StoreTypedArrayHolePolicy) \
_(StoreUnboxedScalarPolicy) \
_(StoreUnboxedObjectOrNullPolicy) \
_(StoreUnboxedStringPolicy) \
_(TestPolicy) \
_(AllDoublePolicy) \
_(ToDoublePolicy) \

Просмотреть файл

@ -492,13 +492,6 @@ class StoreUnboxedObjectOrNullPolicy final : public TypePolicy
virtual MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) override;
};
class StoreUnboxedStringPolicy final : public TypePolicy
{
public:
EMPTY_DATA_;
virtual MOZ_MUST_USE bool adjustInputs(TempAllocator& alloc, MInstruction* def) override;
};
// Accepts integers and doubles. Everything else is boxed.
class ClampPolicy final : public TypePolicy
{

Просмотреть файл

@ -1335,7 +1335,6 @@ AssertValidObjectPtr(JSContext* cx, JSObject* obj)
// Check what we can, so that we'll hopefully assert/crash if we get a
// bogus object (pointer).
MOZ_ASSERT(obj->compartment() == cx->compartment());
MOZ_ASSERT(obj->zoneFromAnyThread() == cx->zone());
MOZ_ASSERT(obj->runtimeFromActiveCooperatingThread() == cx->runtime());
MOZ_ASSERT_IF(!obj->hasLazyGroup() && obj->maybeShape(),
@ -1345,6 +1344,7 @@ AssertValidObjectPtr(JSContext* cx, JSObject* obj)
MOZ_ASSERT(obj->isAligned());
gc::AllocKind kind = obj->asTenured().getAllocKind();
MOZ_ASSERT(gc::IsObjectAllocKind(kind));
MOZ_ASSERT(obj->asTenured().zone() == cx->zone());
}
#endif
}

Просмотреть файл

@ -4814,66 +4814,33 @@ MacroAssembler::moveValue(const Value& src, const ValueOperand& dest)
// ===============================================================
// Branch functions
void
MacroAssembler::loadStoreBuffer(Register ptr, Register buffer)
{
ma_lsr(Imm32(gc::ChunkShift), ptr, buffer);
ma_lsl(Imm32(gc::ChunkShift), buffer, buffer);
load32(Address(buffer, gc::ChunkStoreBufferOffset), buffer);
}
void
MacroAssembler::branchPtrInNurseryChunk(Condition cond, Register ptr, Register temp,
Label* label)
{
Maybe<SecondScratchRegisterScope> scratch2;
if (temp == Register::Invalid()) {
scratch2.emplace(*this);
temp = scratch2.ref();
}
SecondScratchRegisterScope scratch2(*this);
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
MOZ_ASSERT(ptr != temp);
MOZ_ASSERT(ptr != scratch2);
ma_lsr(Imm32(gc::ChunkShift), ptr, temp);
ma_lsl(Imm32(gc::ChunkShift), temp, temp);
load32(Address(temp, gc::ChunkLocationOffset), temp);
branch32(cond, temp, Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
ma_lsr(Imm32(gc::ChunkShift), ptr, scratch2);
ma_lsl(Imm32(gc::ChunkShift), scratch2, scratch2);
load32(Address(scratch2, gc::ChunkLocationOffset), scratch2);
branch32(cond, scratch2, Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address,
Register temp, Label* label)
MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address,
Register temp, Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
Label done, checkAddress;
Register tag = temp;
extractTag(address, tag);
branchTestObject(Assembler::Equal, tag, &checkAddress);
branchTestString(Assembler::NotEqual, tag, cond == Assembler::Equal ? &done : label);
Label done;
branchTestObject(Assembler::NotEqual, address, cond == Assembler::Equal ? &done : label);
bind(&checkAddress);
loadPtr(ToPayload(address), temp);
SecondScratchRegisterScope scratch2(*this);
branchPtrInNurseryChunk(cond, temp, scratch2, label);
bind(&done);
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value,
Register temp, Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
Label done, checkAddress;
branchTestObject(Assembler::Equal, value.typeReg(), &checkAddress);
branchTestString(Assembler::NotEqual, value.typeReg(),
cond == Assembler::Equal ? &done : label);
bind(&checkAddress);
branchPtrInNurseryChunk(cond, value.payloadReg(), temp, label);
loadPtr(address, temp);
branchPtrInNurseryChunk(cond, temp, InvalidReg, label);
bind(&done);
}
@ -4883,10 +4850,11 @@ MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value,
Register temp, Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
Label done;
Label done;
branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
branchPtrInNurseryChunk(cond, value.payloadReg(), temp, label);
branchPtrInNurseryChunk(cond, value.payloadReg(), InvalidReg, label);
bind(&done);
}

Просмотреть файл

@ -741,15 +741,6 @@ MacroAssembler::moveValue(const Value& src, const ValueOperand& dest)
// ===============================================================
// Branch functions
void
MacroAssembler::loadStoreBuffer(Register ptr, Register buffer)
{
if (ptr != buffer)
movePtr(ptr, buffer);
orPtr(Imm32(gc::ChunkMask), buffer);
loadPtr(Address(buffer, gc::ChunkStoreBufferOffsetFromLastByte), buffer);
}
void
MacroAssembler::branchPtrInNurseryChunk(Condition cond, Register ptr, Register temp,
Label* label)
@ -766,44 +757,23 @@ MacroAssembler::branchPtrInNurseryChunk(Condition cond, Register ptr, Register t
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address, Register temp,
Label* label)
MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address, Register temp,
Label* label)
{
branchValueIsNurseryCellImpl(cond, address, temp, label);
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value, Register temp,
Label* label)
{
branchValueIsNurseryCellImpl(cond, value, temp, label);
}
template <typename T>
void
MacroAssembler::branchValueIsNurseryCellImpl(Condition cond, const T& value, Register temp,
Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
MOZ_ASSERT(temp != ScratchReg && temp != ScratchReg2); // Both may be used internally.
Label done, checkAddress;
bool testNursery = (cond == Assembler::Equal);
branchTestObject(Assembler::Equal, value, &checkAddress);
branchTestString(Assembler::NotEqual, value, testNursery ? &done : label);
bind(&checkAddress);
unboxNonDouble(value, temp);
orPtr(Imm32(gc::ChunkMask), temp);
branch32(cond, Address(temp, gc::ChunkLocationOffsetFromLastByte),
Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
bind(&done);
branchValueIsNurseryObjectImpl(cond, address, temp, label);
}
void
MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp,
Label* label)
{
branchValueIsNurseryObjectImpl(cond, value, temp, label);
}
template <typename T>
void
MacroAssembler::branchValueIsNurseryObjectImpl(Condition cond, const T& value, Register temp,
Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
MOZ_ASSERT(temp != ScratchReg && temp != ScratchReg2); // Both may be used internally.

Просмотреть файл

@ -2369,16 +2369,14 @@ MacroAssembler::moveValue(const Value& src, const ValueOperand& dest)
// Branch functions
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address,
Register temp, Label* label)
MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address,
Register temp, Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
Label done, checkAddress;
branchTestObject(Assembler::Equal, address, &checkAddress);
branchTestString(Assembler::NotEqual, address, cond == Assembler::Equal ? &done : label);
Label done;
bind(&checkAddress);
branchTestObject(Assembler::NotEqual, address, cond == Assembler::Equal ? &done : label);
loadPtr(address, temp);
branchPtrInNurseryChunk(cond, temp, InvalidReg, label);
@ -2386,16 +2384,14 @@ MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address,
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value,
Register temp, Label* label)
MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value,
Register temp, Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
Label done, checkAddress;
branchTestObject(Assembler::Equal, value, &checkAddress);
branchTestString(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
Label done;
bind(&checkAddress);
branchTestObject(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
branchPtrInNurseryChunk(cond, value.payloadReg(), temp, label);
bind(&done);

Просмотреть файл

@ -2493,9 +2493,24 @@ MacroAssembler::moveValue(const Value& src, const ValueOperand& dest)
// ===============================================================
// Branch functions
void
MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address, Register temp,
Label* label)
{
branchValueIsNurseryObjectImpl(cond, address, temp, label);
}
void
MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value,
Register temp, Label* label)
{
branchValueIsNurseryObjectImpl(cond, value, temp, label);
}
template <typename T>
void
MacroAssembler::branchValueIsNurseryObjectImpl(Condition cond, const T& value, Register temp,
Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
@ -2510,40 +2525,6 @@ MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value,
bind(&done);
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address, Register temp,
Label* label)
{
branchValueIsNurseryCellImpl(cond, address, temp, label);
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value,
Register temp, Label* label)
{
branchValueIsNurseryCellImpl(cond, value, temp, label);
}
template <typename T>
void
MacroAssembler::branchValueIsNurseryCellImpl(Condition cond, const T& value, Register temp,
Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
Label done, checkAddress;
branchTestObject(Assembler::Equal, value, &checkAddress);
branchTestString(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
bind(&checkAddress);
extractCell(value, SecondScratchReg);
orPtr(Imm32(gc::ChunkMask), SecondScratchReg);
branch32(cond, Address(SecondScratchReg, gc::ChunkLocationOffsetFromLastByte),
Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
bind(&done);
}
void
MacroAssembler::branchTestValue(Condition cond, const ValueOperand& lhs,
const Value& rhs, Label* label)

Просмотреть файл

@ -405,13 +405,6 @@ class MacroAssemblerMIPS64Compat : public MacroAssemblerMIPS64
unboxObject(value, scratch);
return scratch;
}
Register extractCell(const Address& address, Register scratch) {
return extractObject(address, scratch);
}
Register extractCell(const ValueOperand& value, Register scratch) {
unboxNonDouble(value, scratch);
return scratch;
}
Register extractInt32(const ValueOperand& value, Register scratch) {
unboxInt32(value, scratch);
return scratch;

Просмотреть файл

@ -147,10 +147,6 @@ class CodeGeneratorShared : public LElementVisitor
return gen->isProfilerInstrumentationEnabled();
}
bool stringsCanBeInNursery() const {
return gen->stringsCanBeInNursery();
}
js::Vector<NativeToTrackedOptimizations, 0, SystemAllocPolicy> trackedOptimizations_;
uint8_t* trackedOptimizationsMap_;
uint32_t trackedOptimizationsMapSize_;

Просмотреть файл

@ -4066,15 +4066,14 @@ class LConcat : public LInstructionHelper<1, 2, 5>
};
// Get uint16 character code from a string.
class LCharCodeAt : public LInstructionHelper<1, 2, 1>
class LCharCodeAt : public LInstructionHelper<1, 2, 0>
{
public:
LIR_HEADER(CharCodeAt)
LCharCodeAt(const LAllocation& str, const LAllocation& index, const LDefinition& temp) {
LCharCodeAt(const LAllocation& str, const LAllocation& index) {
setOperand(0, str);
setOperand(1, index);
setTemp(0, temp);
}
const LAllocation* str() {
@ -4083,9 +4082,6 @@ class LCharCodeAt : public LInstructionHelper<1, 2, 1>
const LAllocation* index() {
return this->getOperand(1);
}
const LDefinition* temp() {
return getTemp(0);
}
};
// Convert uint16 character code to a string.
@ -7746,33 +7742,6 @@ class LPostWriteBarrierO : public LInstructionHelper<0, 2, 1>
}
};
// Generational write barrier used when writing a string to an object.
class LPostWriteBarrierS : public LInstructionHelper<0, 2, 1>
{
public:
LIR_HEADER(PostWriteBarrierS)
LPostWriteBarrierS(const LAllocation& obj, const LAllocation& value,
const LDefinition& temp) {
setOperand(0, obj);
setOperand(1, value);
setTemp(0, temp);
}
const MPostWriteBarrier* mir() const {
return mir_->toPostWriteBarrier();
}
const LAllocation* object() {
return getOperand(0);
}
const LAllocation* value() {
return getOperand(1);
}
const LDefinition* temp() {
return getTemp(0);
}
};
// Generational write barrier used when writing a value to another object.
class LPostWriteBarrierV : public LInstructionHelper<0, 1 + BOX_PIECES, 1>
{
@ -7835,42 +7804,6 @@ class LPostWriteElementBarrierO : public LInstructionHelper<0, 3, 1>
}
};
// Generational write barrier used when writing a string to an object's
// elements.
class LPostWriteElementBarrierS : public LInstructionHelper<0, 3, 1>
{
public:
LIR_HEADER(PostWriteElementBarrierS)
LPostWriteElementBarrierS(const LAllocation& obj, const LAllocation& value,
const LAllocation& index, const LDefinition& temp) {
setOperand(0, obj);
setOperand(1, value);
setOperand(2, index);
setTemp(0, temp);
}
const MPostWriteElementBarrier* mir() const {
return mir_->toPostWriteElementBarrier();
}
const LAllocation* object() {
return getOperand(0);
}
const LAllocation* value() {
return getOperand(1);
}
const LAllocation* index() {
return getOperand(2);
}
const LDefinition* temp() {
return getTemp(0);
}
};
// Generational write barrier used when writing a value to another object's
// elements.
class LPostWriteElementBarrierV : public LInstructionHelper<0, 2 + BOX_PIECES, 1>

Просмотреть файл

@ -268,10 +268,8 @@
_(TypeBarrierO) \
_(MonitorTypes) \
_(PostWriteBarrierO) \
_(PostWriteBarrierS) \
_(PostWriteBarrierV) \
_(PostWriteElementBarrierO) \
_(PostWriteElementBarrierS) \
_(PostWriteElementBarrierV) \
_(InitializedLength) \
_(SetInitializedLength) \

Просмотреть файл

@ -616,15 +616,6 @@ MacroAssembler::moveValue(const Value& src, const ValueOperand& dest)
// ===============================================================
// Branch functions
void
MacroAssembler::loadStoreBuffer(Register ptr, Register buffer)
{
if (ptr != buffer)
movePtr(ptr, buffer);
orPtr(Imm32(gc::ChunkMask), buffer);
loadPtr(Address(buffer, gc::ChunkStoreBufferOffsetFromLastByte), buffer);
}
void
MacroAssembler::branchPtrInNurseryChunk(Condition cond, Register ptr, Register temp, Label* label)
{
@ -640,9 +631,24 @@ MacroAssembler::branchPtrInNurseryChunk(Condition cond, Register ptr, Register t
Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
}
void
MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address, Register temp,
Label* label)
{
branchValueIsNurseryObjectImpl(cond, address, temp, label);
}
void
MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp,
Label* label)
{
branchValueIsNurseryObjectImpl(cond, value, temp, label);
}
template <typename T>
void
MacroAssembler::branchValueIsNurseryObjectImpl(Condition cond, const T& value, Register temp,
Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
MOZ_ASSERT(temp != InvalidReg);
@ -658,43 +664,6 @@ MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value, R
bind(&done);
}
template <typename T>
void
MacroAssembler::branchValueIsNurseryCellImpl(Condition cond, const T& value, Register temp,
Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
MOZ_ASSERT(temp != InvalidReg);
Label done, checkAddress;
Register tag = temp;
splitTag(value, tag);
branchTestObject(Assembler::Equal, tag, &checkAddress);
branchTestString(Assembler::NotEqual, tag, cond == Assembler::Equal ? &done : label);
bind(&checkAddress);
unboxNonDouble(value, temp);
orPtr(Imm32(gc::ChunkMask), temp);
branch32(cond, Address(temp, gc::ChunkLocationOffsetFromLastByte),
Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
bind(&done);
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address, Register temp,
Label* label)
{
branchValueIsNurseryCellImpl(cond, address, temp, label);
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value, Register temp,
Label* label)
{
branchValueIsNurseryCellImpl(cond, value, temp, label);
}
void
MacroAssembler::branchTestValue(Condition cond, const ValueOperand& lhs,
const Value& rhs, Label* label)

Просмотреть файл

@ -541,15 +541,6 @@ MacroAssembler::moveValue(const Value& src, const ValueOperand& dest)
// ===============================================================
// Branch functions
void
MacroAssembler::loadStoreBuffer(Register ptr, Register buffer)
{
if (ptr != buffer)
movePtr(ptr, buffer);
orPtr(Imm32(gc::ChunkMask), buffer);
loadPtr(Address(buffer, gc::ChunkStoreBufferOffsetFromLastByte), buffer);
}
void
MacroAssembler::branchPtrInNurseryChunk(Condition cond, Register ptr, Register temp,
Label* label)
@ -579,6 +570,20 @@ MacroAssembler::branchPtrInNurseryChunkImpl(Condition cond, Register ptr, Label*
Imm32(int32_t(gc::ChunkLocation::Nursery)), label);
}
void
MacroAssembler::branchValueIsNurseryObject(Condition cond, const Address& address, Register temp,
Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
Label done;
branchTestObject(Assembler::NotEqual, address, cond == Assembler::Equal ? &done : label);
branchPtrInNurseryChunk(cond, address, temp, label);
bind(&done);
}
void
MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value, Register temp,
Label* label)
@ -593,40 +598,6 @@ MacroAssembler::branchValueIsNurseryObject(Condition cond, ValueOperand value, R
bind(&done);
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, const Address& address, Register temp,
Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
Label done, checkAddress;
Register tag = extractTag(address, temp);
MOZ_ASSERT(tag == temp);
branchTestObject(Assembler::Equal, tag, &checkAddress);
branchTestString(Assembler::NotEqual, tag, cond == Assembler::Equal ? &done : label);
bind(&checkAddress);
branchPtrInNurseryChunk(cond, ToPayload(address), temp, label);
bind(&done);
}
void
MacroAssembler::branchValueIsNurseryCell(Condition cond, ValueOperand value, Register temp,
Label* label)
{
MOZ_ASSERT(cond == Assembler::Equal || cond == Assembler::NotEqual);
Label done, checkAddress;
branchTestObject(Assembler::Equal, value, &checkAddress);
branchTestString(Assembler::NotEqual, value, cond == Assembler::Equal ? &done : label);
bind(&checkAddress);
branchPtrInNurseryChunk(cond, value.payloadReg(), temp, label);
bind(&done);
}
void
MacroAssembler::branchTestValue(Condition cond, const ValueOperand& lhs,
const Value& rhs, Label* label)

Просмотреть файл

@ -7713,13 +7713,6 @@ JS::GetObjectZone(JSObject* obj)
return obj->zone();
}
JS_PUBLIC_API(Zone*)
JS::GetNurseryStringZone(JSString* str)
{
MOZ_ASSERT(!str->isTenured());
return str->zone();
}
JS_PUBLIC_API(JS::TraceKind)
JS::GCThingTraceKind(void* thing)
{

Просмотреть файл

@ -826,7 +826,7 @@ JSCompartment::sweepAfterMinorGC(JSTracer* trc)
table.sweepAfterMinorGC();
crossCompartmentWrappers.sweepAfterMinorGC(trc);
dtoaCache.purge();
sweepMapAndSetObjectsAfterMinorGC();
}

Просмотреть файл

@ -226,7 +226,7 @@ class CrossCompartmentKey
using ReturnType = bool;
ReturnType operator()(JSObject** tp) { return !IsInsideNursery(*tp); }
ReturnType operator()(JSScript** tp) { return true; }
ReturnType operator()(JSString** tp) { return !IsInsideNursery(*tp); }
ReturnType operator()(JSString** tp) { return true; }
};
return const_cast<CrossCompartmentKey*>(this)->applyToWrapped(IsTenuredFunctor());
}

Просмотреть файл

@ -20,7 +20,6 @@
#include "js/CallArgs.h"
#include "js/CallNonGenericMethod.h"
#include "js/Class.h"
#include "js/HeapAPI.h"
#include "js/Utility.h"
#if JS_STACK_GROWTH_DIRECTION > 0
@ -613,11 +612,10 @@ struct Function {
struct String
{
static const uint32_t NON_ATOM_BIT = JS_BIT(0);
static const uint32_t INLINE_CHARS_BIT = JS_BIT(3);
static const uint32_t INLINE_CHARS_BIT = JS_BIT(2);
static const uint32_t LATIN1_CHARS_BIT = JS_BIT(6);
static const uint32_t ROPE_FLAGS = NON_ATOM_BIT;
static const uint32_t EXTERNAL_FLAGS = NON_ATOM_BIT | JS_BIT(5);
static const uint32_t ROPE_FLAGS = 0;
static const uint32_t EXTERNAL_FLAGS = JS_BIT(5);
static const uint32_t TYPE_FLAGS_MASK = JS_BIT(6) - 1;
uint32_t flags;
uint32_t length;
@ -628,11 +626,6 @@ struct String
char16_t inlineStorageTwoByte[1];
};
const JSStringFinalizer* externalFinalizer;
static bool nurseryCellIsString(const js::gc::Cell* cell) {
MOZ_ASSERT(IsInsideNursery(cell));
return reinterpret_cast<const String*>(cell)->flags & NON_ATOM_BIT;
}
};
} /* namespace shadow */

Просмотреть файл

@ -3884,19 +3884,6 @@ class MOZ_RAII js::gc::AutoRunParallelTask : public GCParallelTask
}
};
void
GCRuntime::purgeRuntimeForMinorGC()
{
// If external strings become nursery allocable, remember to call
// zone->externalStringCache().purge() (and delete this assert.)
MOZ_ASSERT(!IsNurseryAllocable(AllocKind::EXTERNAL_STRING));
for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next())
zone->functionToStringCache().purge();
rt->caches().purgeForMinorGC(rt);
}
void
GCRuntime::purgeRuntime(AutoLockForExclusiveAccess& lock)
{
@ -3917,7 +3904,12 @@ GCRuntime::purgeRuntime(AutoLockForExclusiveAccess& lock)
target.context()->frontendCollectionPool().purge();
}
rt->caches().purge();
rt->caches().gsnCache.purge();
rt->caches().envCoordinateNameCache.purge();
rt->caches().newObjectCache.purge();
rt->caches().uncompressedSourceCache.purge();
if (rt->caches().evalCache.initialized())
rt->caches().evalCache.clear();
if (auto cache = rt->maybeThisRuntimeSharedImmutableStrings())
cache->purge();
@ -6582,7 +6574,9 @@ GCRuntime::compactPhase(JS::gcreason::Reason reason, SliceBudget& sliceBudget,
releaseRelocatedArenas(relocatedArenas);
// Clear caches that can contain cell pointers.
rt->caches().purgeForCompaction();
rt->caches().newObjectCache.purge();
if (rt->caches().evalCache.initialized())
rt->caches().evalCache.clear();
#ifdef DEBUG
CheckHashTablesAfterMovingGC(rt);
@ -8232,10 +8226,10 @@ JS::AssertGCThingMustBeTenured(JSObject* obj)
}
JS_FRIEND_API(void)
JS::AssertGCThingIsNotNurseryAllocable(Cell* cell)
JS::AssertGCThingIsNotAnObjectSubclass(Cell* cell)
{
MOZ_ASSERT(cell);
MOZ_ASSERT(!cell->is<JSObject>() && !cell->is<JSString>());
MOZ_ASSERT(!cell->is<JSObject>());
}
JS_FRIEND_API(void)
@ -8249,8 +8243,7 @@ js::gc::AssertGCThingHasType(js::gc::Cell* cell, JS::TraceKind kind)
MOZ_ASSERT(IsCellPointerValid(cell));
if (IsInsideNursery(cell)) {
MOZ_ASSERT(kind == (JSString::nurseryCellIsString(cell) ? JS::TraceKind::String
: JS::TraceKind::Object));
MOZ_ASSERT(kind == JS::TraceKind::Object);
return;
}

Просмотреть файл

@ -167,7 +167,7 @@ class ArenaCellIterImpl
template<typename T> T* get() const {
MOZ_ASSERT(!done());
MOZ_ASSERT(JS::MapTypeToTraceKind<T>::kind == traceKind);
return reinterpret_cast<T*>(getCell());
return static_cast<T*>(getCell());
}
void next() {

Просмотреть файл

@ -41,9 +41,6 @@ JITFLAGS = {
'--ion-check-range-analysis', '--ion-extra-checks', '--no-sse3', '--no-threads'],
['--no-asmjs', '--no-wasm', '--no-baseline', '--no-ion'],
],
'baseline': [
['--no-ion'],
],
# Interpreter-only, for tools that cannot handle binary code generation.
'interp': [
['--no-baseline', '--no-asmjs', '--no-wasm', '--no-native-regexp']

Просмотреть файл

@ -64,15 +64,6 @@ struct EvalCacheEntry
JSScript* script;
JSScript* callerScript;
jsbytecode* pc;
// We sweep this cache before a nursery collection to remove entries with
// string keys in the nursery.
//
// The entire cache is purged on a major GC, so we don't need to sweep it
// then.
bool needsSweep() {
return !str->isTenured();
}
};
struct EvalCacheLookup
@ -91,7 +82,7 @@ struct EvalCacheHashPolicy
static bool match(const EvalCacheEntry& entry, const EvalCacheLookup& l);
};
typedef GCHashSet<EvalCacheEntry, EvalCacheHashPolicy, SystemAllocPolicy> EvalCache;
typedef HashSet<EvalCacheEntry, EvalCacheHashPolicy, SystemAllocPolicy> EvalCache;
/*
* Cache for speeding up repetitive creation of objects in the VM.
@ -245,24 +236,6 @@ class RuntimeCaches
js::MathCache* maybeGetMathCache() {
return mathCache_.get();
}
void purgeForMinorGC(JSRuntime* rt) {
newObjectCache.clearNurseryObjects(rt);
evalCache.sweep();
}
void purgeForCompaction() {
newObjectCache.purge();
if (evalCache.initialized())
evalCache.clear();
}
void purge() {
purgeForCompaction();
gsnCache.purge();
envCoordinateNameCache.purge();
uncompressedSourceCache.purge();
}
};
} // namespace js

Просмотреть файл

@ -224,7 +224,6 @@ GetSelectorRuntime(const CompilationSelector& selector)
{
JSRuntime* match(JSScript* script) { return script->runtimeFromActiveCooperatingThread(); }
JSRuntime* match(JSCompartment* comp) { return comp->runtimeFromActiveCooperatingThread(); }
JSRuntime* match(Zone* zone) { return zone->runtimeFromActiveCooperatingThread(); }
JSRuntime* match(ZonesInState zbs) { return zbs.runtime; }
JSRuntime* match(JSRuntime* runtime) { return runtime; }
JSRuntime* match(AllCompilations all) { return nullptr; }
@ -241,7 +240,6 @@ JitDataStructuresExist(const CompilationSelector& selector)
{
bool match(JSScript* script) { return !!script->compartment()->jitCompartment(); }
bool match(JSCompartment* comp) { return !!comp->jitCompartment(); }
bool match(Zone* zone) { return !!zone->jitZone(); }
bool match(ZonesInState zbs) { return zbs.runtime->hasJitRuntime(); }
bool match(JSRuntime* runtime) { return runtime->hasJitRuntime(); }
bool match(AllCompilations all) { return true; }
@ -260,7 +258,6 @@ IonBuilderMatches(const CompilationSelector& selector, jit::IonBuilder* builder)
bool match(JSScript* script) { return script == builder_->script(); }
bool match(JSCompartment* comp) { return comp == builder_->script()->compartment(); }
bool match(Zone* zone) { return zone == builder_->script()->zone(); }
bool match(JSRuntime* runtime) { return runtime == builder_->script()->runtimeFromAnyThread(); }
bool match(AllCompilations all) { return true; }
bool match(ZonesInState zbs) {

Просмотреть файл

@ -502,7 +502,6 @@ struct CompilationsUsingNursery { JSRuntime* runtime; };
using CompilationSelector = mozilla::Variant<JSScript*,
JSCompartment*,
Zone*,
ZonesInState,
JSRuntime*,
CompilationsUsingNursery,
@ -526,12 +525,6 @@ CancelOffThreadIonCompile(JSCompartment* comp)
CancelOffThreadIonCompile(CompilationSelector(comp), true);
}
inline void
CancelOffThreadIonCompile(Zone* zone)
{
CancelOffThreadIonCompile(CompilationSelector(zone), true);
}
inline void
CancelOffThreadIonCompile(JSRuntime* runtime, JS::Zone::GCState state)
{

Просмотреть файл

@ -14,7 +14,6 @@
#include "jsscript.h"
#include "gc/Heap.h"
#include "gc/Nursery.h"
#include "jit/BaselineJIT.h"
#include "jit/Ion.h"
#include "vm/ArrayObject.h"
@ -522,16 +521,13 @@ StatsCellCallback(JSRuntime* rt, void* data, void* thing, JS::TraceKind traceKin
case JS::TraceKind::String: {
JSString* str = static_cast<JSString*>(thing);
size_t size = thingSize;
if (!str->isTenured())
size += Nursery::stringHeaderSize();
JS::StringInfo info;
if (str->hasLatin1Chars()) {
info.gcHeapLatin1 = size;
info.gcHeapLatin1 = thingSize;
info.mallocHeapLatin1 = str->sizeOfExcludingThis(rtStats->mallocSizeOf_);
} else {
info.gcHeapTwoByte = size;
info.gcHeapTwoByte = thingSize;
info.mallocHeapTwoByte = str->sizeOfExcludingThis(rtStats->mallocSizeOf_);
}
info.numCopies = 1;

Просмотреть файл

@ -125,7 +125,7 @@ NativeObject::elementsRangeWriteBarrierPost(uint32_t start, uint32_t count)
{
for (size_t i = 0; i < count; i++) {
const Value& v = elements_[start + i];
if ((v.isObject() || v.isString()) && IsInsideNursery(v.toGCThing())) {
if (v.isObject() && IsInsideNursery(&v.toObject())) {
zone()->group()->storeBuffer().putSlot(this, HeapSlot::Element,
unshiftedIndex(start + i),
count - i);

Просмотреть файл

@ -1391,7 +1391,8 @@ class NativeObject : public ShapedObject
}
void setPrivateGCThing(gc::Cell* cell) {
MOZ_ASSERT_IF(IsMarkedBlack(this), !cell->isMarkedGray());
MOZ_ASSERT_IF(IsMarkedBlack(this),
!JS::GCThingIsMarkedGray(JS::GCCellPtr(cell, cell->getTraceKind())));
void** pprivate = &privateRef(numFixedSlots());
privateWriteBarrierPre(pprivate);
*pprivate = reinterpret_cast<void*>(cell);

Просмотреть файл

@ -224,14 +224,7 @@ class Scope : public js::gc::TenuredCell
friend class GCMarker;
// The kind determines data_.
//
// The memory here must be fully initialized, since otherwise the magic_
// value for gc::RelocationOverlay will land in the padding and may be
// stale.
union {
ScopeKind kind_;
uintptr_t paddedKind_;
};
ScopeKind kind_;
// The enclosing scope or nullptr.
GCPtrScope enclosing_;
@ -244,13 +237,11 @@ class Scope : public js::gc::TenuredCell
uintptr_t data_;
Scope(ScopeKind kind, Scope* enclosing, Shape* environmentShape)
: enclosing_(enclosing),
: kind_(kind),
enclosing_(enclosing),
environmentShape_(environmentShape),
data_(0)
{
paddedKind_ = 0;
kind_ = kind;
}
{ }
static Scope* create(JSContext* cx, ScopeKind kind, HandleScope enclosing,
HandleShape envShape);

Просмотреть файл

@ -82,9 +82,13 @@ NewInlineString(JSContext* cx, HandleLinearString base, size_t start, size_t len
}
static inline void
StringWriteBarrierPost(JSContext* maybecx, JSString** strp, JSString* prev, JSString* next)
StringWriteBarrierPost(JSContext* maybecx, JSString** strp)
{
}
static inline void
StringWriteBarrierPostRemove(JSContext* maybecx, JSString** strp)
{
js::BarrierMethods<JSString*>::postBarrier(strp, prev, next);
}
} /* namespace js */
@ -109,8 +113,8 @@ JSRope::init(JSContext* cx, JSString* left, JSString* right, size_t length)
d.u1.flags |= LATIN1_CHARS_BIT;
d.s.u2.left = left;
d.s.u3.right = right;
js::BarrierMethods<JSString*>::postBarrier(&d.s.u2.left, nullptr, left);
js::BarrierMethods<JSString*>::postBarrier(&d.s.u3.right, nullptr, right);
js::StringWriteBarrierPost(cx, &d.s.u2.left);
js::StringWriteBarrierPost(cx, &d.s.u3.right);
}
template <js::AllowGC allowGC>
@ -122,7 +126,7 @@ JSRope::new_(JSContext* cx,
{
if (!validateLength(cx, length))
return nullptr;
JSRope* str = js::Allocate<JSRope, allowGC>(cx, js::gc::DefaultHeap);
JSRope* str = static_cast<JSRope*>(js::Allocate<JSString, allowGC>(cx));
if (!str)
return nullptr;
str->init(cx, left, right, length);
@ -144,7 +148,7 @@ JSDependentString::init(JSContext* cx, JSLinearString* base, size_t start,
d.s.u2.nonInlineCharsTwoByte = base->twoByteChars(nogc) + start;
}
d.s.u3.base = base;
js::BarrierMethods<JSString*>::postBarrier(reinterpret_cast<JSString**>(&d.s.u3.base), nullptr, base);
js::StringWriteBarrierPost(cx, reinterpret_cast<JSString**>(&d.s.u3.base));
}
MOZ_ALWAYS_INLINE JSLinearString*
@ -182,7 +186,7 @@ JSDependentString::new_(JSContext* cx, JSLinearString* baseArg, size_t start,
if (baseArg->isExternal() && !baseArg->ensureFlat(cx))
return nullptr;
JSDependentString* str = js::Allocate<JSDependentString, js::NoGC>(cx, js::gc::DefaultHeap);
JSDependentString* str = static_cast<JSDependentString*>(js::Allocate<JSString, js::NoGC>(cx));
if (str) {
str->init(cx, baseArg, start, length);
return str;
@ -190,7 +194,7 @@ JSDependentString::new_(JSContext* cx, JSLinearString* baseArg, size_t start,
js::RootedLinearString base(cx, baseArg);
str = js::Allocate<JSDependentString>(cx, js::gc::DefaultHeap);
str = static_cast<JSDependentString*>(js::Allocate<JSString>(cx));
if (!str)
return nullptr;
str->init(cx, base, start, length);
@ -201,7 +205,7 @@ MOZ_ALWAYS_INLINE void
JSFlatString::init(const char16_t* chars, size_t length)
{
d.u1.length = length;
d.u1.flags = FLAT_FLAGS;
d.u1.flags = FLAT_BIT;
d.s.u2.nonInlineCharsTwoByte = chars;
}
@ -209,7 +213,7 @@ MOZ_ALWAYS_INLINE void
JSFlatString::init(const JS::Latin1Char* chars, size_t length)
{
d.u1.length = length;
d.u1.flags = FLAT_FLAGS | LATIN1_CHARS_BIT;
d.u1.flags = FLAT_BIT | LATIN1_CHARS_BIT;
d.s.u2.nonInlineCharsLatin1 = chars;
}
@ -226,24 +230,10 @@ JSFlatString::new_(JSContext* cx, const CharT* chars, size_t length)
if (cx->compartment()->isAtomsCompartment())
str = js::Allocate<js::NormalAtom, allowGC>(cx);
else
str = js::Allocate<JSFlatString, allowGC>(cx, js::gc::DefaultHeap);
str = static_cast<JSFlatString*>(js::Allocate<JSString, allowGC>(cx));
if (!str)
return nullptr;
if (!str->isTenured()) {
// The chars pointer is only considered to be handed over to this
// function on a successful return. If the following registration
// fails, the string is partially initialized and must be made valid,
// or its finalizer may attempt to free uninitialized memory.
void* ptr = const_cast<void*>(static_cast<const void*>(chars));
if (!cx->runtime()->gc.nursery().registerMallocedBuffer(ptr)) {
str->init((JS::Latin1Char*)nullptr, 0);
if (allowGC)
ReportOutOfMemory(cx);
return nullptr;
}
}
str->init(chars, length);
return str;
}
@ -270,7 +260,7 @@ JSThinInlineString::new_(JSContext* cx)
if (cx->compartment()->isAtomsCompartment())
return (JSThinInlineString*)(js::Allocate<js::NormalAtom, allowGC>(cx));
return js::Allocate<JSThinInlineString, allowGC>(cx, js::gc::DefaultHeap);
return static_cast<JSThinInlineString*>(js::Allocate<JSString, allowGC>(cx));
}
template <js::AllowGC allowGC>
@ -280,7 +270,7 @@ JSFatInlineString::new_(JSContext* cx)
if (cx->compartment()->isAtomsCompartment())
return (JSFatInlineString*)(js::Allocate<js::FatInlineAtom, allowGC>(cx));
return js::Allocate<JSFatInlineString, allowGC>(cx, js::gc::DefaultHeap);
return js::Allocate<JSFatInlineString, allowGC>(cx);
}
template<>

Просмотреть файл

@ -14,7 +14,6 @@
#include "mozilla/Unused.h"
#include "gc/Marking.h"
#include "gc/Nursery.h"
#include "js/UbiNode.h"
#include "vm/GeckoProfiler.h"
@ -91,9 +90,9 @@ JS::ubi::Concrete<JSString>::size(mozilla::MallocSizeOf mallocSizeOf) const
else
size = str.isFatInline() ? sizeof(JSFatInlineString) : sizeof(JSString);
if (IsInsideNursery(&str))
size += Nursery::stringHeaderSize();
// We can't use mallocSizeof on things in the nursery. At the moment,
// strings are never in the nursery, but that may change.
MOZ_ASSERT(!IsInsideNursery(&str));
size += str.sizeOfExcludingThis(mallocSizeOf);
return size;
@ -209,11 +208,10 @@ JSString::dumpRepresentationHeader(js::GenericPrinter& out, int indent, const ch
if (flags & FLAT_BIT) out.put(" FLAT");
if (flags & HAS_BASE_BIT) out.put(" HAS_BASE");
if (flags & INLINE_CHARS_BIT) out.put(" INLINE_CHARS");
if (flags & NON_ATOM_BIT) out.put(" NON_ATOM");
if (flags & ATOM_BIT) out.put(" ATOM");
if (isPermanentAtom()) out.put(" PERMANENT");
if (flags & LATIN1_CHARS_BIT) out.put(" LATIN1");
if (flags & INDEX_VALUE_BIT) out.put(" INDEX_VALUE(%u)", getIndexValue());
if (!isTenured()) out.put(" NURSERY");
out.putChar('\n');
}
@ -486,7 +484,6 @@ JSRope::flattenInternal(JSContext* maybecx)
JSString::writeBarrierPre(str->d.s.u3.right);
}
JSString* child = str->d.s.u2.left;
js::BarrierMethods<JSString*>::postBarrier(&str->d.s.u2.left, child, nullptr);
MOZ_ASSERT(child->isRope());
str->setNonInlineChars(left.nonInlineChars<CharT>(nogc));
child->d.u1.flattenData = uintptr_t(str) | Tag_VisitRightChild;
@ -500,17 +497,14 @@ JSRope::flattenInternal(JSContext* maybecx)
wholeCapacity = capacity;
wholeChars = const_cast<CharT*>(left.nonInlineChars<CharT>(nogc));
pos = wholeChars + left.d.u1.length;
static_assert((EXTENSIBLE_FLAGS & DEPENDENT_FLAGS) == NON_ATOM_BIT,
"extensible and dependent flags must only overlap on NON_ATOM_BIT");
left.d.u1.flags ^= (EXTENSIBLE_FLAGS | DEPENDENT_FLAGS) & ~NON_ATOM_BIT;
JS_STATIC_ASSERT(!(EXTENSIBLE_FLAGS & DEPENDENT_FLAGS));
left.d.u1.flags ^= (EXTENSIBLE_FLAGS | DEPENDENT_FLAGS);
left.d.s.u3.base = (JSLinearString*)this; /* will be true on exit */
MOZ_ASSERT(!static_cast<JSString&>(left).isExtensible());
MOZ_ASSERT(left.isDependent());
MOZ_ASSERT(!left.isAtom());
BarrierMethods<JSString*>::postBarrier((JSString**)&left.d.s.u3.base, nullptr, this);
StringWriteBarrierPostRemove(maybecx, &left.d.s.u2.left);
StringWriteBarrierPost(maybecx, (JSString**)&left.d.s.u3.base);
goto visit_right_child;
}
}
}
if (!AllocChars(this, wholeLength, &wholeChars, &wholeCapacity)) {
if (maybecx)
@ -518,15 +512,6 @@ JSRope::flattenInternal(JSContext* maybecx)
return nullptr;
}
if (!isTenured() && maybecx) {
JSRuntime* rt = maybecx->runtime();
if (!rt->gc.nursery().registerMallocedBuffer(wholeChars)) {
js_free(wholeChars);
ReportOutOfMemory(maybecx);
return nullptr;
}
}
pos = wholeChars;
first_visit_node: {
if (b == WithIncrementalBarrier) {
@ -535,8 +520,8 @@ JSRope::flattenInternal(JSContext* maybecx)
}
JSString& left = *str->d.s.u2.left;
js::BarrierMethods<JSString*>::postBarrier(&str->d.s.u2.left, &left, nullptr);
str->setNonInlineChars(pos);
StringWriteBarrierPostRemove(maybecx, &str->d.s.u2.left);
if (left.isRope()) {
/* Return to this node when 'left' done, then goto visit_right_child. */
left.d.u1.flattenData = uintptr_t(str) | Tag_VisitRightChild;
@ -548,7 +533,6 @@ JSRope::flattenInternal(JSContext* maybecx)
}
visit_right_child: {
JSString& right = *str->d.s.u3.right;
BarrierMethods<JSString*>::postBarrier(&str->d.s.u3.right, &right, nullptr);
if (right.isRope()) {
/* Return to this node when 'right' done, then goto finish_node. */
right.d.u1.flattenData = uintptr_t(str) | Tag_FinishNode;
@ -558,7 +542,6 @@ JSRope::flattenInternal(JSContext* maybecx)
CopyChars(pos, right.asLinear());
pos += right.length();
}
finish_node: {
if (str == this) {
MOZ_ASSERT(pos == wholeChars + wholeLength);
@ -570,6 +553,8 @@ JSRope::flattenInternal(JSContext* maybecx)
str->d.u1.flags = EXTENSIBLE_FLAGS | LATIN1_CHARS_BIT;
str->setNonInlineChars(wholeChars);
str->d.s.u3.capacity = wholeCapacity;
StringWriteBarrierPostRemove(maybecx, &str->d.s.u2.left);
StringWriteBarrierPostRemove(maybecx, &str->d.s.u3.right);
return &this->asFlat();
}
uintptr_t flattenData = str->d.u1.flattenData;
@ -579,7 +564,7 @@ JSRope::flattenInternal(JSContext* maybecx)
str->d.u1.flags = DEPENDENT_FLAGS | LATIN1_CHARS_BIT;
str->d.u1.length = pos - str->asLinear().nonInlineChars<CharT>(nogc);
str->d.s.u3.base = (JSLinearString*)this; /* will be true on exit */
BarrierMethods<JSString*>::postBarrier((JSString**)&str->d.s.u3.base, nullptr, this);
StringWriteBarrierPost(maybecx, (JSString**)&str->d.s.u3.base);
str = (JSString*)(flattenData & ~Tag_Mask);
if ((flattenData & Tag_Mask) == Tag_VisitRightChild)
goto visit_right_child;
@ -1131,11 +1116,9 @@ JSExternalString::ensureFlat(JSContext* cx)
// Release the external chars.
finalize(cx->runtime()->defaultFreeOp());
// Transform the string into a non-external, flat string. Note that the
// resulting string will still be in an AllocKind::EXTERNAL_STRING arena,
// but will no longer be an external string.
// Transform the string into a non-external, flat string.
setNonInlineChars<char16_t>(s);
d.u1.flags = FLAT_FLAGS;
d.u1.flags = FLAT_BIT;
return &this->asFlat();
}

Просмотреть файл

@ -16,9 +16,7 @@
#include "jsstr.h"
#include "gc/Barrier.h"
#include "gc/Cell.h"
#include "gc/Heap.h"
#include "gc/Nursery.h"
#include "gc/Rooting.h"
#include "js/CharacterEncoding.h"
#include "js/RootingAPI.h"
@ -155,7 +153,7 @@ static const size_t UINT32_CHAR_BUFFER_LENGTH = sizeof("4294967295") - 1;
* at least X (e.g., ensureLinear will change a JSRope to be a JSFlatString).
*/
class JSString : public js::gc::Cell
class JSString : public js::gc::TenuredCell
{
protected:
static const size_t NUM_INLINE_CHARS_LATIN1 = 2 * sizeof(void*) / sizeof(JS::Latin1Char);
@ -222,28 +220,28 @@ class JSString : public js::gc::Cell
* String Instance Subtype
* type encoding predicate
* ------------------------------------
* Rope 000001 000001
* Linear - !000001
* HasBase - xxx1xx
* Dependent 000101 000101
* External 100001 100001
* Flat - xxxx1x
* Undepended 000111 000111
* Extensible 010011 010011
* Inline 001011 xx1xxx
* FatInline 011011 x11xxx
* Atom 000000 xxxxx0
* PermanentAtom 100000 1xxxx0
* InlineAtom - xx1xx0
* FatInlineAtom - x11xx0
* Rope 000000 000000
* Linear - !000000
* HasBase - xxxx1x
* Dependent 000010 000010
* External 100000 100000
* Flat - xxxxx1
* Undepended 000011 000011
* Extensible 010001 010001
* Inline 000101 xxx1xx
* FatInline 010101 x1x1xx
* Atom 001001 xx1xxx
* PermanentAtom 101001 1x1xxx
* InlineAtom - xx11xx
* FatInlineAtom - x111xx
*
* Note that the first 4 flag bits (from right to left in the previous table)
* have the following meaning and can be used for some hot queries:
*
* Bit 0: !IsAtom (Atom, PermanentAtom)
* Bit 1: IsFlat
* Bit 2: HasBase (Dependent, Undepended)
* Bit 3: IsInline (Inline, FatInline)
* Bit 0: IsFlat
* Bit 1: HasBase (Dependent, Undepended)
* Bit 2: IsInline (Inline, FatInline)
* Bit 3: IsAtom (Atom, PermanentAtom)
*
* "HasBase" here refers to the two string types that have a 'base' field:
* JSDependentString and JSUndependedString.
@ -251,37 +249,27 @@ class JSString : public js::gc::Cell
* to be null-terminated. In such cases, the string must keep marking its base since
* there may be any number of *other* JSDependentStrings transitively depending on it.
*
* The atom bit (NON_ATOM_BIT) is inverted so that objects and strings can
* be differentiated in the nursery: atoms are never in the nursery, so
* this bit is always 1 for a nursery string. For an object on a
* little-endian architecture, this is the low-order bit of the ObjectGroup
* pointer in a JSObject, which will always be zero. A 64-bit big-endian
* architecture will need to do something else (the ObjectGroup* is in the
* same place as a string's struct { uint32_t flags; uint32_t length; }).
*
* If the INDEX_VALUE_BIT is set the upper 16 bits of the flag word hold the integer
* index.
*/
static const uint32_t NON_ATOM_BIT = JS_BIT(0);
static const uint32_t FLAT_BIT = JS_BIT(1);
static const uint32_t HAS_BASE_BIT = JS_BIT(2);
static const uint32_t INLINE_CHARS_BIT = JS_BIT(3);
static const uint32_t FLAT_BIT = JS_BIT(0);
static const uint32_t HAS_BASE_BIT = JS_BIT(1);
static const uint32_t INLINE_CHARS_BIT = JS_BIT(2);
static const uint32_t ATOM_BIT = JS_BIT(3);
static const uint32_t ROPE_FLAGS = NON_ATOM_BIT;
static const uint32_t DEPENDENT_FLAGS = NON_ATOM_BIT | HAS_BASE_BIT;
static const uint32_t FLAT_FLAGS = NON_ATOM_BIT | FLAT_BIT;
static const uint32_t UNDEPENDED_FLAGS = NON_ATOM_BIT | FLAT_BIT | HAS_BASE_BIT;
static const uint32_t EXTENSIBLE_FLAGS = NON_ATOM_BIT | FLAT_BIT | JS_BIT(4);
static const uint32_t EXTERNAL_FLAGS = NON_ATOM_BIT | JS_BIT(5);
static const uint32_t ROPE_FLAGS = 0;
static const uint32_t DEPENDENT_FLAGS = HAS_BASE_BIT;
static const uint32_t UNDEPENDED_FLAGS = FLAT_BIT | HAS_BASE_BIT;
static const uint32_t EXTENSIBLE_FLAGS = FLAT_BIT | JS_BIT(4);
static const uint32_t EXTERNAL_FLAGS = JS_BIT(5);
static const uint32_t FAT_INLINE_MASK = INLINE_CHARS_BIT | JS_BIT(4);
static const uint32_t PERMANENT_ATOM_MASK = NON_ATOM_BIT | JS_BIT(5);
static const uint32_t PERMANENT_ATOM = JS_BIT(5);
static const uint32_t PERMANENT_ATOM_MASK = ATOM_BIT | JS_BIT(5);
/* Initial flags for thin inline and fat inline strings. */
static const uint32_t INIT_THIN_INLINE_FLAGS = NON_ATOM_BIT | FLAT_BIT | INLINE_CHARS_BIT;
static const uint32_t INIT_FAT_INLINE_FLAGS = NON_ATOM_BIT | FLAT_BIT | FAT_INLINE_MASK;
static const uint32_t INIT_THIN_INLINE_FLAGS = FLAT_BIT | INLINE_CHARS_BIT;
static const uint32_t INIT_FAT_INLINE_FLAGS = FLAT_BIT | FAT_INLINE_MASK;
static const uint32_t TYPE_FLAGS_MASK = JS_BIT(6) - 1;
@ -483,12 +471,12 @@ class JSString : public js::gc::Cell
MOZ_ALWAYS_INLINE
bool isAtom() const {
return !(d.u1.flags & NON_ATOM_BIT);
return d.u1.flags & ATOM_BIT;
}
MOZ_ALWAYS_INLINE
bool isPermanentAtom() const {
return (d.u1.flags & PERMANENT_ATOM_MASK) == PERMANENT_ATOM;
return (d.u1.flags & PERMANENT_ATOM_MASK) == PERMANENT_ATOM_MASK;
}
MOZ_ALWAYS_INLINE
@ -497,14 +485,6 @@ class JSString : public js::gc::Cell
return *(JSAtom*)this;
}
// Used for distinguishing strings from objects in the nursery. The caller
// must ensure that cell is in the nursery (and not forwarded).
MOZ_ALWAYS_INLINE
static bool nurseryCellIsString(js::gc::Cell* cell) {
MOZ_ASSERT(!cell->isTenured());
return !static_cast<JSString*>(cell)->isAtom();
}
// Fills |array| with various strings that represent the different string
// kinds and character encodings.
static bool fillWithRepresentatives(JSContext* cx, js::HandleArrayObject array);
@ -545,54 +525,6 @@ class JSString : public js::gc::Cell
static const JS::TraceKind TraceKind = JS::TraceKind::String;
JS::Zone* zone() const {
if (isTenured()) {
// Allow permanent atoms to be accessed across zones and runtimes.
if (isPermanentAtom())
return zoneFromAnyThread();
return asTenured().zone();
}
return js::Nursery::getStringZone(this);
}
// Implement TenuredZone members needed for template instantiations.
JS::Zone* zoneFromAnyThread() const {
if (isTenured())
return asTenured().zoneFromAnyThread();
return js::Nursery::getStringZone(this);
}
void fixupAfterMovingGC() {}
js::gc::AllocKind getAllocKind() const {
using js::gc::AllocKind;
AllocKind kind;
if (isAtom())
if (isFatInline())
kind = AllocKind::FAT_INLINE_ATOM;
else
kind = AllocKind::ATOM;
else if (isFatInline())
kind = AllocKind::FAT_INLINE_STRING;
else if (isExternal())
kind = AllocKind::EXTERNAL_STRING;
else
kind = AllocKind::STRING;
#if DEBUG
if (isTenured()) {
// Normally, the kinds should match, but an EXTERNAL_STRING arena
// may contain strings that have been flattened (see
// JSExternalString::ensureFlat).
AllocKind tenuredKind = asTenured().getAllocKind();
MOZ_ASSERT(kind == tenuredKind ||
(tenuredKind == AllocKind::EXTERNAL_STRING && kind == AllocKind::STRING));
}
#endif
return kind;
}
#ifdef DEBUG
void dump(); // Debugger-friendly stderr dump.
void dump(js::GenericPrinter& out);
@ -610,42 +542,17 @@ class JSString : public js::gc::Cell
void traceChildren(JSTracer* trc);
static MOZ_ALWAYS_INLINE void readBarrier(JSString* thing) {
if (thing->isPermanentAtom() || js::gc::IsInsideNursery(thing))
if (thing->isPermanentAtom())
return;
js::gc::TenuredCell::readBarrier(&thing->asTenured());
TenuredCell::readBarrier(thing);
}
static MOZ_ALWAYS_INLINE void writeBarrierPre(JSString* thing) {
if (!thing || thing->isPermanentAtom() || js::gc::IsInsideNursery(thing))
if (!thing || thing->isPermanentAtom())
return;
js::gc::TenuredCell::writeBarrierPre(&thing->asTenured());
}
static void addCellAddressToStoreBuffer(js::gc::StoreBuffer* buffer, js::gc::Cell** cellp)
{
buffer->putCell(cellp);
}
static void removeCellAddressFromStoreBuffer(js::gc::StoreBuffer* buffer, js::gc::Cell** cellp)
{
buffer->unputCell(cellp);
}
static void writeBarrierPost(void* cellp, JSString* prev, JSString* next) {
// See JSObject::writeBarrierPost for a description of the logic here.
MOZ_ASSERT(cellp);
js::gc::StoreBuffer* buffer;
if (next && (buffer = next->storeBuffer())) {
if (prev && prev->storeBuffer())
return;
buffer->putCell(static_cast<js::gc::Cell**>(cellp));
return;
}
if (prev && (buffer = prev->storeBuffer()))
buffer->unputCell(static_cast<js::gc::Cell**>(cellp));
TenuredCell::writeBarrierPre(thing);
}
private:
@ -722,7 +629,6 @@ class JSLinearString : public JSString
{
friend class JSString;
friend class js::AutoStableStringChars;
friend class js::TenuringTracer;
/* Vacuous and therefore unimplemented. */
JSLinearString* ensureLinear(JSContext* cx) = delete;
@ -1090,11 +996,6 @@ class JSExternalString : public JSLinearString
inline void finalize(js::FreeOp* fop);
/*
* Free the external chars and allocate a new buffer, converting this to a
* flat string (which still lives in an AllocKind::EXTERNAL_STRING
* arena).
*/
JSFlatString* ensureFlat(JSContext* cx);
#ifdef DEBUG
@ -1137,8 +1038,7 @@ class JSAtom : public JSFlatString
// Transform this atom into a permanent atom. This is only done during
// initialization of the runtime.
MOZ_ALWAYS_INLINE void morphIntoPermanentAtom() {
MOZ_ASSERT(static_cast<JSString*>(this)->isAtom());
d.u1.flags = (d.u1.flags & ~PERMANENT_ATOM_MASK) | PERMANENT_ATOM;
d.u1.flags |= PERMANENT_ATOM_MASK;
}
inline js::HashNumber hash() const;
@ -1215,8 +1115,7 @@ JSAtom::initHash(js::HashNumber hash)
MOZ_ALWAYS_INLINE JSAtom*
JSFlatString::morphAtomizedStringIntoAtom(js::HashNumber hash)
{
MOZ_ASSERT(!isAtom());
d.u1.flags &= ~NON_ATOM_BIT;
d.u1.flags |= ATOM_BIT;
JSAtom* atom = &asAtom();
atom->initHash(hash);
return atom;
@ -1225,8 +1124,7 @@ JSFlatString::morphAtomizedStringIntoAtom(js::HashNumber hash)
MOZ_ALWAYS_INLINE JSAtom*
JSFlatString::morphAtomizedStringIntoPermanentAtom(js::HashNumber hash)
{
MOZ_ASSERT(!isAtom());
d.u1.flags = (d.u1.flags & ~PERMANENT_ATOM_MASK) | PERMANENT_ATOM;
d.u1.flags |= PERMANENT_ATOM_MASK;
JSAtom* atom = &asAtom();
atom->initHash(hash);
return atom;
@ -1669,22 +1567,4 @@ JSAtom::asPropertyName()
return static_cast<js::PropertyName*>(this);
}
namespace js {
namespace gc {
template<>
inline JSString*
Cell::as<JSString>() {
MOZ_ASSERT(is<JSString>());
return reinterpret_cast<JSString*>(this);
}
template<>
inline JSString*
TenuredCell::as<JSString>() {
MOZ_ASSERT(is<JSString>());
return reinterpret_cast<JSString*>(this);
}
}
}
#endif /* vm_String_h */

Просмотреть файл

@ -123,10 +123,8 @@ SetUnboxedValue(JSContext* cx, JSObject* unboxedObject, jsid id,
case JSVAL_TYPE_STRING:
if (v.isString()) {
MOZ_ASSERT(!IsInsideNursery(v.toString()));
JSString** np = reinterpret_cast<JSString**>(p);
if (IsInsideNursery(v.toString()) && !IsInsideNursery(unboxedObject))
unboxedObject->zone()->group()->storeBuffer().putWholeCell(unboxedObject);
if (preBarrier)
JSString::writeBarrierPre(*np);
*np = v.toString();

Просмотреть файл

@ -137,23 +137,13 @@ UnboxedLayout::makeConstructorCode(JSContext* cx, HandleObjectGroup group)
Label postBarrier;
for (size_t i = 0; i < layout.properties().length(); i++) {
const UnboxedLayout::Property& property = layout.properties()[i];
if (!UnboxedTypeNeedsPostBarrier(property.type))
continue;
Address valueAddress(propertiesReg, i * sizeof(IdValuePair) + offsetof(IdValuePair, value));
if (property.type == JSVAL_TYPE_OBJECT) {
Address valueAddress(propertiesReg, i * sizeof(IdValuePair) + offsetof(IdValuePair, value));
Label notObject;
masm.branchTestObject(Assembler::NotEqual, valueAddress, &notObject);
Register valueObject = masm.extractObject(valueAddress, scratch1);
masm.branchPtrInNurseryChunk(Assembler::Equal, valueObject, scratch2, &postBarrier);
masm.bind(&notObject);
} else {
MOZ_ASSERT(property.type == JSVAL_TYPE_STRING);
Label notString;
masm.branchTestString(Assembler::NotEqual, valueAddress, &notString);
Register valueString = masm.extractString(valueAddress, scratch1);
masm.branchPtrInNurseryChunk(Assembler::Equal, valueString, scratch2, &postBarrier);
masm.bind(&notString);
}
}

Просмотреть файл

@ -40,7 +40,7 @@ UnboxedTypeNeedsPreBarrier(JSValueType type)
static inline bool
UnboxedTypeNeedsPostBarrier(JSValueType type)
{
return type == JSVAL_TYPE_STRING || type == JSVAL_TYPE_OBJECT;
return type == JSVAL_TYPE_OBJECT;
}
// Class tracking information specific to unboxed objects.

Просмотреть файл

@ -73,10 +73,6 @@
shortStrings.push(str);
}
// Strings in the nursery are not reported, so make sure the above test
// strings are tenured.
Components.utils.forceGC();
let mySandbox = Components.utils.Sandbox(document.nodePrincipal,
{ sandboxName: "this-is-a-sandbox-name" });