Bug 1251833 - Part 1: Move allocation into FreeSpan and move firstFreeSpan to the top of Arenas. r=terrence

This commit is contained in:
Emanuel Hoogeveen 2016-02-29 11:21:00 -05:00
Родитель 40bd6a6b1e
Коммит 75b7110778
7 изменённых файлов: 88 добавлений и 62 удалений

Просмотреть файл

@ -56,7 +56,7 @@ const size_t ChunkMarkBitmapBits = 129024;
const size_t ChunkRuntimeOffset = ChunkSize - sizeof(void*);
const size_t ChunkTrailerSize = 2 * sizeof(uintptr_t) + sizeof(uint64_t);
const size_t ChunkLocationOffset = ChunkSize - ChunkTrailerSize;
const size_t ArenaZoneOffset = 0;
const size_t ArenaZoneOffset = sizeof(size_t);
/*
* Live objects are marked black. How many other additional colors are available

Просмотреть файл

@ -358,11 +358,11 @@ ArenaLists::allocateFromArenaInner(JS::Zone* zone, ArenaHeader* aheader, AllocKi
{
size_t thingSize = Arena::thingSize(kind);
freeLists[kind] = aheader;
freeLists[kind] = aheader->getFirstFreeSpan();
if (MOZ_UNLIKELY(zone->wasGCStarted()))
zone->runtimeFromAnyThread()->gc.arenaAllocatedDuringGC(zone, aheader);
TenuredCell* thing = aheader->allocate(thingSize);
TenuredCell* thing = freeLists[kind]->allocate(thingSize);
MOZ_ASSERT(thing); // This allocation is infallible.
return thing;
}

Просмотреть файл

@ -10,6 +10,7 @@
#include "mozilla/ArrayUtils.h"
#include "mozilla/Atomics.h"
#include "mozilla/Attributes.h"
#include "mozilla/DebugOnly.h"
#include "mozilla/EnumeratedArray.h"
#include "mozilla/EnumeratedRange.h"
#include "mozilla/PodOperations.h"
@ -352,22 +353,55 @@ class FreeSpan
checkSpan(aheader);
}
bool isEmpty(const ArenaHeader* aheader) const {
checkSpan(aheader);
bool isEmpty() const {
return !first;
}
ArenaHeader* getArenaUnchecked() { return reinterpret_cast<ArenaHeader*>(this); }
inline ArenaHeader* getArena();
static size_t offsetOfFirst() {
return offsetof(FreeSpan, first);
}
static size_t offsetOfLast() {
return offsetof(FreeSpan, last);
}
// Like nextSpan(), but no checking of the following span is done.
FreeSpan* nextSpanUnchecked(const ArenaHeader* aheader) const {
MOZ_ASSERT(aheader && first);
MOZ_ASSERT(aheader && !isEmpty());
return reinterpret_cast<FreeSpan*>(uintptr_t(aheader) + last);
}
const FreeSpan* nextSpan(const ArenaHeader* aheader) const {
MOZ_ASSERT(!isEmpty(aheader));
checkSpan(aheader);
return nextSpanUnchecked(aheader);
}
MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) {
// Eschew the usual checks, because this might be the placeholder span.
// If this is somehow an invalid, non-empty span, checkSpan() will catch it.
ArenaHeader* arena = getArenaUnchecked();
checkSpan(arena);
uintptr_t thing = uintptr_t(arena) + first;
if (first < last) {
// We have space for at least two more things, so do a simple bump-allocate.
first += thingSize;
} else if (MOZ_LIKELY(first)) {
// The last space points to the next free span (which may be empty).
const FreeSpan* next = nextSpan(arena);
first = next->first;
last = next->last;
} else {
return nullptr; // The span is empty.
}
checkSpan(arena);
JS_EXTRA_POISON(reinterpret_cast<void*>(thing), JS_ALLOCATED_TENURED_PATTERN, thingSize);
MemProfiler::SampleTenured(reinterpret_cast<void*>(thing), thingSize);
return reinterpret_cast<TenuredCell*>(thing);
}
inline void checkSpan(const ArenaHeader* aheader) const;
inline void checkRange(uintptr_t first, uintptr_t last, const ArenaHeader*) const;
};
@ -376,8 +410,14 @@ class FreeSpan
struct ArenaHeader
{
friend struct Arena;
friend class ArenaCellIterImpl;
private:
/*
* The first span of free things in the arena.
*/
FreeSpan firstFreeSpan;
public:
JS::Zone* zone;
/*
@ -388,11 +428,6 @@ struct ArenaHeader
ArenaHeader* next;
private:
/*
* The first span of free things in the arena.
*/
FreeSpan firstFreeSpan;
/*
* One of AllocKind constants or AllocKind::LIMIT when the arena does not
* contain any GC things and is on the list of empty arenas in the GC
@ -436,7 +471,13 @@ struct ArenaHeader
ArenaHeader() { setAsNotAllocated(); }
inline uintptr_t address() const;
uintptr_t address() const {
checkAddress();
return uintptr_t(this);
}
inline void checkAddress() const;
inline Chunk* chunk() const;
bool allocated() const {
@ -472,6 +513,7 @@ struct ArenaHeader
}
Arena* getArena() { return reinterpret_cast<Arena*>(address()); }
FreeSpan* getFirstFreeSpan() { return &firstFreeSpan; }
AllocKind getAllocKind() const {
MOZ_ASSERT(allocated());
@ -481,32 +523,24 @@ struct ArenaHeader
inline size_t getThingSize() const;
bool hasFreeThings() const {
return !firstFreeSpan.isEmpty(this);
return !firstFreeSpan.isEmpty();
}
size_t numFreeThings(size_t thingSize) const {
firstFreeSpan.checkSpan(this);
size_t numFree = 0;
const FreeSpan* span = &firstFreeSpan;
for (; !span->isEmpty(this); span = span->nextSpan(this))
for (; !span->isEmpty(); span = span->nextSpan(this))
numFree += (span->last - span->first) / thingSize + 1;
return numFree;
}
inline bool isEmpty() const;
static size_t offsetOfFreeSpanFirst() {
return offsetof(ArenaHeader, firstFreeSpan) + offsetof(FreeSpan, first);
}
static size_t offsetOfFreeSpanLast() {
return offsetof(ArenaHeader, firstFreeSpan) + offsetof(FreeSpan, last);
}
bool inFreeList(uintptr_t thing) {
uintptr_t base = address();
const FreeSpan* span = &firstFreeSpan;
for (; !span->isEmpty(this); span = span->nextSpan(this)) {
for (; !span->isEmpty(); span = span->nextSpan(this)) {
/* If the thing comes before the current span, it's not free. */
if (thing < base + span->first)
return false;
@ -518,24 +552,6 @@ struct ArenaHeader
return false;
}
MOZ_ALWAYS_INLINE TenuredCell* allocate(size_t thingSize) {
firstFreeSpan.checkSpan(this);
uintptr_t thing = uintptr_t(this) + firstFreeSpan.first;
if (firstFreeSpan.first < firstFreeSpan.last) {
// We have space for at least two more things, so do a simple bump-allocate.
firstFreeSpan.first += thingSize;
} else if (MOZ_LIKELY(firstFreeSpan.first)) {
// The last space points to the next free span (which may be empty).
firstFreeSpan = *firstFreeSpan.nextSpan(this);
} else {
return nullptr; // The span is empty.
}
firstFreeSpan.checkSpan(this);
JS_EXTRA_POISON(reinterpret_cast<void*>(thing), JS_ALLOCATED_TENURED_PATTERN, thingSize);
MemProfiler::SampleTenured(reinterpret_cast<void*>(thing), thingSize);
return reinterpret_cast<TenuredCell*>(thing);
}
inline ArenaHeader* getNextDelayedMarking() const;
inline void setNextDelayedMarking(ArenaHeader* aheader);
inline void unsetDelayedMarking();
@ -631,6 +647,8 @@ FreeSpan::checkSpan(const ArenaHeader* aheader) const
return;
}
aheader->checkAddress();
checkRange(first, last, aheader);
// If there's a following span, it must have a higher address,
@ -1006,14 +1024,21 @@ class HeapUsage
}
};
inline uintptr_t
ArenaHeader::address() const
inline ArenaHeader*
FreeSpan::getArena()
{
uintptr_t addr = reinterpret_cast<uintptr_t>(this);
ArenaHeader* arena = getArenaUnchecked();
arena->checkAddress();
return arena;
}
inline void
ArenaHeader::checkAddress() const
{
mozilla::DebugOnly<uintptr_t> addr = uintptr_t(this);
MOZ_ASSERT(addr);
MOZ_ASSERT(!(addr & ArenaMask));
MOZ_ASSERT(Chunk::withinValidRange(addr));
return addr;
}
inline Chunk*

Просмотреть файл

@ -777,7 +777,7 @@ MacroAssembler::nurseryAllocate(Register result, Register temp, gc::AllocKind al
}
}
// Inlined version of ArenaHeader::allocate. This does not fill in slots_.
// Inlined version of FreeSpan::allocate. This does not fill in slots_.
void
MacroAssembler::freeListAllocate(Register result, Register temp, gc::AllocKind allocKind, Label* fail)
{
@ -790,14 +790,14 @@ MacroAssembler::freeListAllocate(Register result, Register temp, gc::AllocKind a
// Load the first and last offsets of |zone|'s free list for |allocKind|.
// If there is no room remaining in the span, fall back to get the next one.
loadPtr(AbsoluteAddress(zone->addressOfFreeList(allocKind)), temp);
load16ZeroExtend(Address(temp, js::gc::ArenaHeader::offsetOfFreeSpanFirst()), result);
load16ZeroExtend(Address(temp, js::gc::ArenaHeader::offsetOfFreeSpanLast()), temp);
load16ZeroExtend(Address(temp, js::gc::FreeSpan::offsetOfFirst()), result);
load16ZeroExtend(Address(temp, js::gc::FreeSpan::offsetOfLast()), temp);
branch32(Assembler::AboveOrEqual, result, temp, &fallback);
// Bump the offset for the next allocation.
add32(Imm32(thingSize), result);
loadPtr(AbsoluteAddress(zone->addressOfFreeList(allocKind)), temp);
store16(result, Address(temp, js::gc::ArenaHeader::offsetOfFreeSpanFirst()));
store16(result, Address(temp, js::gc::FreeSpan::offsetOfFirst()));
sub32(Imm32(thingSize), result);
addPtr(temp, result); // Turn the offset into a pointer.
jump(&success);
@ -812,7 +812,7 @@ MacroAssembler::freeListAllocate(Register result, Register temp, gc::AllocKind a
Push(result);
// Update the free list to point to the next span (which may be empty).
load32(Address(result, 0), result);
store32(result, Address(temp, js::gc::ArenaHeader::offsetOfFreeSpanFirst()));
store32(result, Address(temp, js::gc::FreeSpan::offsetOfFirst()));
Pop(result);
bind(&success);

Просмотреть файл

@ -302,7 +302,7 @@ const uint32_t Arena::ThingSizes[] = CHECK_THING_SIZE(
sizeof(jit::JitCode), /* AllocKind::JITCODE */
);
ArenaHeader ArenaLists::placeholder;
FreeSpan ArenaLists::placeholder;
#undef CHECK_THING_SIZE_INNER
#undef CHECK_THING_SIZE
@ -1951,9 +1951,10 @@ inline void
ArenaLists::prepareForIncrementalGC(JSRuntime* rt)
{
for (auto i : AllAllocKinds()) {
ArenaHeader* aheader = freeLists[i];
if (aheader != &placeholder) {
if (aheader->hasFreeThings()) {
FreeSpan* span = freeLists[i];
if (span != &placeholder) {
if (!span->isEmpty()) {
ArenaHeader* aheader = span->getArena();
aheader->allocatedDuringIncremental = true;
rt->gc.marker.delayMarkingArena(aheader);
} else {

Просмотреть файл

@ -594,12 +594,12 @@ class ArenaLists
* GC we only move the head of the of the list of spans back to the arena
* only for the arena that was not fully allocated.
*/
AllAllocKindArray<ArenaHeader*> freeLists;
AllAllocKindArray<FreeSpan*> freeLists;
// Because the JITs can allocate from the free lists, they cannot be null.
// We use a placeholder ArenaHeader with an empty span (and no associated
// We use a placeholder FreeSpan that is empty (and wihout an associated
// Arena) so the JITs can fall back gracefully.
static ArenaHeader placeholder;
static FreeSpan placeholder;
AllAllocKindArray<ArenaList> arenaLists;
@ -716,7 +716,7 @@ class ArenaLists
/* Check if |aheader|'s arena is in use. */
bool arenaIsInUse(ArenaHeader* aheader, AllocKind kind) const {
MOZ_ASSERT(aheader);
return aheader == freeLists[kind];
return aheader == freeLists[kind]->getArenaUnchecked();
}
MOZ_ALWAYS_INLINE TenuredCell* allocateFromFreeList(AllocKind thingKind, size_t thingSize) {
@ -739,7 +739,7 @@ class ArenaLists
}
void checkEmptyFreeList(AllocKind kind) {
MOZ_ASSERT(!freeLists[kind]->hasFreeThings());
MOZ_ASSERT(freeLists[kind]->isEmpty());
}
bool relocateArenas(Zone* zone, ArenaHeader*& relocatedListOut, JS::gcreason::Reason reason,

Просмотреть файл

@ -145,7 +145,7 @@ class ArenaCellIterImpl
void reset(ArenaHeader* aheader) {
MOZ_ASSERT(isInited);
arenaAddr = aheader;
span = aheader->firstFreeSpan;
span = *aheader->getFirstFreeSpan();
thing = firstThingOffset;
moveForwardIfFree();
}