зеркало из https://github.com/mozilla/pjs.git
Bug 751003 - Move Cell, Chunk, Arena, ArenaHeader, FreeSpan, ChunkInfo, ChunkBitmap, your mom into a new gc/Heap.h header that doesn't have any used-but-not-defined traps for the unwary #include-r. r=billm
--HG-- extra : rebase_source : 5ef81fb9032ac5c13fb4cf2a6107a0f63343caa1
This commit is contained in:
Родитель
b1ed75de19
Коммит
f1b63963d7
|
@ -196,7 +196,6 @@ INSTALLED_HEADERS = \
|
|||
jsdhash.h \
|
||||
jsfriendapi.h \
|
||||
jsgc.h \
|
||||
jscell.h \
|
||||
jshash.h \
|
||||
jslock.h \
|
||||
json.h \
|
||||
|
@ -223,9 +222,10 @@ EXPORTS_ds = \
|
|||
$(NULL)
|
||||
|
||||
EXPORTS_gc = \
|
||||
Statistics.h \
|
||||
Barrier.h \
|
||||
Heap.h \
|
||||
Root.h \
|
||||
Statistics.h \
|
||||
$(NULL)
|
||||
|
||||
######################################################
|
||||
|
|
|
@ -41,8 +41,8 @@
|
|||
#define jsgc_barrier_h___
|
||||
|
||||
#include "jsapi.h"
|
||||
#include "jscell.h"
|
||||
|
||||
#include "gc/Heap.h"
|
||||
#include "js/HashTable.h"
|
||||
|
||||
/*
|
||||
|
|
|
@ -0,0 +1,984 @@
|
|||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
*/
|
||||
/* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
* License, v. 2.0. If a copy of the MPL was not distributed with this file,
|
||||
* You can obtain one at http://mozilla.org/MPL/2.0/. */
|
||||
|
||||
#ifndef gc_heap_h___
|
||||
#define gc_heap_h___
|
||||
|
||||
#include "mozilla/Attributes.h"
|
||||
#include "mozilla/StandardInteger.h"
|
||||
|
||||
#include <stddef.h>
|
||||
|
||||
#include "jstypes.h"
|
||||
#include "jsutil.h"
|
||||
|
||||
#include "ds/BitArray.h"
|
||||
|
||||
struct JSCompartment;
|
||||
|
||||
extern "C" {
|
||||
struct JSRuntime;
|
||||
}
|
||||
|
||||
namespace js {
|
||||
|
||||
class FreeOp;
|
||||
|
||||
namespace gc {
|
||||
|
||||
struct Arena;
|
||||
struct ArenaHeader;
|
||||
struct Chunk;
|
||||
|
||||
/*
|
||||
* Live objects are marked black. How many other additional colors are available
|
||||
* depends on the size of the GCThing. Objects marked gray are eligible for
|
||||
* cycle collection.
|
||||
*/
|
||||
static const uint32_t BLACK = 0;
|
||||
static const uint32_t GRAY = 1;
|
||||
|
||||
/* The GC allocation kinds. */
|
||||
enum AllocKind {
|
||||
FINALIZE_OBJECT0,
|
||||
FINALIZE_OBJECT0_BACKGROUND,
|
||||
FINALIZE_OBJECT2,
|
||||
FINALIZE_OBJECT2_BACKGROUND,
|
||||
FINALIZE_OBJECT4,
|
||||
FINALIZE_OBJECT4_BACKGROUND,
|
||||
FINALIZE_OBJECT8,
|
||||
FINALIZE_OBJECT8_BACKGROUND,
|
||||
FINALIZE_OBJECT12,
|
||||
FINALIZE_OBJECT12_BACKGROUND,
|
||||
FINALIZE_OBJECT16,
|
||||
FINALIZE_OBJECT16_BACKGROUND,
|
||||
FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND,
|
||||
FINALIZE_SCRIPT,
|
||||
FINALIZE_SHAPE,
|
||||
FINALIZE_BASE_SHAPE,
|
||||
FINALIZE_TYPE_OBJECT,
|
||||
#if JS_HAS_XML_SUPPORT
|
||||
FINALIZE_XML,
|
||||
#endif
|
||||
FINALIZE_SHORT_STRING,
|
||||
FINALIZE_STRING,
|
||||
FINALIZE_EXTERNAL_STRING,
|
||||
FINALIZE_LAST = FINALIZE_EXTERNAL_STRING
|
||||
};
|
||||
|
||||
static const unsigned FINALIZE_LIMIT = FINALIZE_LAST + 1;
|
||||
static const unsigned FINALIZE_OBJECT_LIMIT = FINALIZE_OBJECT_LAST + 1;
|
||||
|
||||
/*
|
||||
* This must be an upper bound, but we do not need the least upper bound, so
|
||||
* we just exclude non-background objects.
|
||||
*/
|
||||
static const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
|
||||
|
||||
/*
|
||||
* A GC cell is the base class for all GC things.
|
||||
*/
|
||||
struct Cell
|
||||
{
|
||||
static const size_t CellShift = 3;
|
||||
static const size_t CellSize = size_t(1) << CellShift;
|
||||
static const size_t CellMask = CellSize - 1;
|
||||
|
||||
inline uintptr_t address() const;
|
||||
inline ArenaHeader *arenaHeader() const;
|
||||
inline Chunk *chunk() const;
|
||||
inline AllocKind getAllocKind() const;
|
||||
MOZ_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
|
||||
MOZ_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
|
||||
MOZ_ALWAYS_INLINE void unmark(uint32_t color) const;
|
||||
|
||||
inline JSCompartment *compartment() const;
|
||||
|
||||
#ifdef DEBUG
|
||||
inline bool isAligned() const;
|
||||
#endif
|
||||
};
|
||||
|
||||
/*
|
||||
* Page size is 4096 by default, except for SPARC, where it is 8192.
|
||||
* Note: Do not use JS_CPU_SPARC here, this header is used outside JS.
|
||||
* Bug 692267: Move page size definition to gc/Memory.h and include it
|
||||
* directly once jsgc.h is no longer an installed header.
|
||||
*/
|
||||
#if defined(SOLARIS) && (defined(__sparc) || defined(__sparcv9))
|
||||
const size_t PageShift = 13;
|
||||
#else
|
||||
const size_t PageShift = 12;
|
||||
#endif
|
||||
const size_t PageSize = size_t(1) << PageShift;
|
||||
|
||||
const size_t ChunkShift = 20;
|
||||
const size_t ChunkSize = size_t(1) << ChunkShift;
|
||||
const size_t ChunkMask = ChunkSize - 1;
|
||||
|
||||
const size_t ArenaShift = PageShift;
|
||||
const size_t ArenaSize = PageSize;
|
||||
const size_t ArenaMask = ArenaSize - 1;
|
||||
|
||||
/*
|
||||
* This is the maximum number of arenas we allow in the FreeCommitted state
|
||||
* before we trigger a GC_SHRINK to release free arenas to the OS.
|
||||
*/
|
||||
const static uint32_t FreeCommittedArenasThreshold = (32 << 20) / ArenaSize;
|
||||
|
||||
/*
|
||||
* The mark bitmap has one bit per each GC cell. For multi-cell GC things this
|
||||
* wastes space but allows to avoid expensive devisions by thing's size when
|
||||
* accessing the bitmap. In addition this allows to use some bits for colored
|
||||
* marking during the cycle GC.
|
||||
*/
|
||||
const size_t ArenaCellCount = size_t(1) << (ArenaShift - Cell::CellShift);
|
||||
const size_t ArenaBitmapBits = ArenaCellCount;
|
||||
const size_t ArenaBitmapBytes = ArenaBitmapBits / 8;
|
||||
const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
|
||||
|
||||
/*
|
||||
* A FreeSpan represents a contiguous sequence of free cells in an Arena.
|
||||
* |first| is the address of the first free cell in the span. |last| is the
|
||||
* address of the last free cell in the span. This last cell holds a FreeSpan
|
||||
* data structure for the next span unless this is the last span on the list
|
||||
* of spans in the arena. For this last span |last| points to the last byte of
|
||||
* the last thing in the arena and no linkage is stored there, so
|
||||
* |last| == arenaStart + ArenaSize - 1. If the space at the arena end is
|
||||
* fully used this last span is empty and |first| == |last + 1|.
|
||||
*
|
||||
* Thus |first| < |last| implies that we have either the last span with at least
|
||||
* one element or that the span is not the last and contains at least 2
|
||||
* elements. In both cases to allocate a thing from this span we need simply
|
||||
* to increment |first| by the allocation size.
|
||||
*
|
||||
* |first| == |last| implies that we have a one element span that records the
|
||||
* next span. So to allocate from it we need to update the span list head
|
||||
* with a copy of the span stored at |last| address so the following
|
||||
* allocations will use that span.
|
||||
*
|
||||
* |first| > |last| implies that we have an empty last span and the arena is
|
||||
* fully used.
|
||||
*
|
||||
* Also only for the last span (|last| & 1)! = 0 as all allocation sizes are
|
||||
* multiples of Cell::CellSize.
|
||||
*/
|
||||
struct FreeSpan
|
||||
{
|
||||
uintptr_t first;
|
||||
uintptr_t last;
|
||||
|
||||
public:
|
||||
FreeSpan() {}
|
||||
|
||||
FreeSpan(uintptr_t first, uintptr_t last)
|
||||
: first(first), last(last) {
|
||||
checkSpan();
|
||||
}
|
||||
|
||||
/*
|
||||
* To minimize the size of the arena header the first span is encoded
|
||||
* there as offsets from the arena start.
|
||||
*/
|
||||
static size_t encodeOffsets(size_t firstOffset, size_t lastOffset) {
|
||||
/* Check that we can pack the offsets into uint16. */
|
||||
JS_STATIC_ASSERT(ArenaShift < 16);
|
||||
JS_ASSERT(firstOffset <= ArenaSize);
|
||||
JS_ASSERT(lastOffset < ArenaSize);
|
||||
JS_ASSERT(firstOffset <= ((lastOffset + 1) & ~size_t(1)));
|
||||
return firstOffset | (lastOffset << 16);
|
||||
}
|
||||
|
||||
/*
|
||||
* Encoded offsets for a full arena when its first span is the last one
|
||||
* and empty.
|
||||
*/
|
||||
static const size_t FullArenaOffsets = ArenaSize | ((ArenaSize - 1) << 16);
|
||||
|
||||
static FreeSpan decodeOffsets(uintptr_t arenaAddr, size_t offsets) {
|
||||
JS_ASSERT(!(arenaAddr & ArenaMask));
|
||||
|
||||
size_t firstOffset = offsets & 0xFFFF;
|
||||
size_t lastOffset = offsets >> 16;
|
||||
JS_ASSERT(firstOffset <= ArenaSize);
|
||||
JS_ASSERT(lastOffset < ArenaSize);
|
||||
|
||||
/*
|
||||
* We must not use | when calculating first as firstOffset is
|
||||
* ArenaMask + 1 for the empty span.
|
||||
*/
|
||||
return FreeSpan(arenaAddr + firstOffset, arenaAddr | lastOffset);
|
||||
}
|
||||
|
||||
void initAsEmpty(uintptr_t arenaAddr = 0) {
|
||||
JS_ASSERT(!(arenaAddr & ArenaMask));
|
||||
first = arenaAddr + ArenaSize;
|
||||
last = arenaAddr | (ArenaSize - 1);
|
||||
JS_ASSERT(isEmpty());
|
||||
}
|
||||
|
||||
bool isEmpty() const {
|
||||
checkSpan();
|
||||
return first > last;
|
||||
}
|
||||
|
||||
bool hasNext() const {
|
||||
checkSpan();
|
||||
return !(last & uintptr_t(1));
|
||||
}
|
||||
|
||||
const FreeSpan *nextSpan() const {
|
||||
JS_ASSERT(hasNext());
|
||||
return reinterpret_cast<FreeSpan *>(last);
|
||||
}
|
||||
|
||||
FreeSpan *nextSpanUnchecked(size_t thingSize) const {
|
||||
#ifdef DEBUG
|
||||
uintptr_t lastOffset = last & ArenaMask;
|
||||
JS_ASSERT(!(lastOffset & 1));
|
||||
JS_ASSERT((ArenaSize - lastOffset) % thingSize == 0);
|
||||
#endif
|
||||
return reinterpret_cast<FreeSpan *>(last);
|
||||
}
|
||||
|
||||
uintptr_t arenaAddressUnchecked() const {
|
||||
return last & ~ArenaMask;
|
||||
}
|
||||
|
||||
uintptr_t arenaAddress() const {
|
||||
checkSpan();
|
||||
return arenaAddressUnchecked();
|
||||
}
|
||||
|
||||
ArenaHeader *arenaHeader() const {
|
||||
return reinterpret_cast<ArenaHeader *>(arenaAddress());
|
||||
}
|
||||
|
||||
bool isSameNonEmptySpan(const FreeSpan *another) const {
|
||||
JS_ASSERT(!isEmpty());
|
||||
JS_ASSERT(!another->isEmpty());
|
||||
return first == another->first && last == another->last;
|
||||
}
|
||||
|
||||
bool isWithinArena(uintptr_t arenaAddr) const {
|
||||
JS_ASSERT(!(arenaAddr & ArenaMask));
|
||||
|
||||
/* Return true for the last empty span as well. */
|
||||
return arenaAddress() == arenaAddr;
|
||||
}
|
||||
|
||||
size_t encodeAsOffsets() const {
|
||||
/*
|
||||
* We must use first - arenaAddress(), not first & ArenaMask as
|
||||
* first == ArenaMask + 1 for an empty span.
|
||||
*/
|
||||
uintptr_t arenaAddr = arenaAddress();
|
||||
return encodeOffsets(first - arenaAddr, last & ArenaMask);
|
||||
}
|
||||
|
||||
/* See comments before FreeSpan for details. */
|
||||
MOZ_ALWAYS_INLINE void *allocate(size_t thingSize) {
|
||||
JS_ASSERT(thingSize % Cell::CellSize == 0);
|
||||
checkSpan();
|
||||
uintptr_t thing = first;
|
||||
if (thing < last) {
|
||||
/* Bump-allocate from the current span. */
|
||||
first = thing + thingSize;
|
||||
} else if (JS_LIKELY(thing == last)) {
|
||||
/*
|
||||
* Move to the next span. We use JS_LIKELY as without PGO
|
||||
* compilers mis-predict == here as unlikely to succeed.
|
||||
*/
|
||||
*this = *reinterpret_cast<FreeSpan *>(thing);
|
||||
} else {
|
||||
return NULL;
|
||||
}
|
||||
checkSpan();
|
||||
return reinterpret_cast<void *>(thing);
|
||||
}
|
||||
|
||||
/* A version of allocate when we know that the span is not empty. */
|
||||
MOZ_ALWAYS_INLINE void *infallibleAllocate(size_t thingSize) {
|
||||
JS_ASSERT(thingSize % Cell::CellSize == 0);
|
||||
checkSpan();
|
||||
uintptr_t thing = first;
|
||||
if (thing < last) {
|
||||
first = thing + thingSize;
|
||||
} else {
|
||||
JS_ASSERT(thing == last);
|
||||
*this = *reinterpret_cast<FreeSpan *>(thing);
|
||||
}
|
||||
checkSpan();
|
||||
return reinterpret_cast<void *>(thing);
|
||||
}
|
||||
|
||||
/*
|
||||
* Allocate from a newly allocated arena. We do not move the free list
|
||||
* from the arena. Rather we set the arena up as fully used during the
|
||||
* initialization so to allocate we simply return the first thing in the
|
||||
* arena and set the free list to point to the second.
|
||||
*/
|
||||
MOZ_ALWAYS_INLINE void *allocateFromNewArena(uintptr_t arenaAddr, size_t firstThingOffset,
|
||||
size_t thingSize) {
|
||||
JS_ASSERT(!(arenaAddr & ArenaMask));
|
||||
uintptr_t thing = arenaAddr | firstThingOffset;
|
||||
first = thing + thingSize;
|
||||
last = arenaAddr | ArenaMask;
|
||||
checkSpan();
|
||||
return reinterpret_cast<void *>(thing);
|
||||
}
|
||||
|
||||
void checkSpan() const {
|
||||
#ifdef DEBUG
|
||||
/* We do not allow spans at the end of the address space. */
|
||||
JS_ASSERT(last != uintptr_t(-1));
|
||||
JS_ASSERT(first);
|
||||
JS_ASSERT(last);
|
||||
JS_ASSERT(first - 1 <= last);
|
||||
uintptr_t arenaAddr = arenaAddressUnchecked();
|
||||
if (last & 1) {
|
||||
/* The span is the last. */
|
||||
JS_ASSERT((last & ArenaMask) == ArenaMask);
|
||||
|
||||
if (first - 1 == last) {
|
||||
/* The span is last and empty. The above start != 0 check
|
||||
* implies that we are not at the end of the address space.
|
||||
*/
|
||||
return;
|
||||
}
|
||||
size_t spanLength = last - first + 1;
|
||||
JS_ASSERT(spanLength % Cell::CellSize == 0);
|
||||
|
||||
/* Start and end must belong to the same arena. */
|
||||
JS_ASSERT((first & ~ArenaMask) == arenaAddr);
|
||||
return;
|
||||
}
|
||||
|
||||
/* The span is not the last and we have more spans to follow. */
|
||||
JS_ASSERT(first <= last);
|
||||
size_t spanLengthWithoutOneThing = last - first;
|
||||
JS_ASSERT(spanLengthWithoutOneThing % Cell::CellSize == 0);
|
||||
|
||||
JS_ASSERT((first & ~ArenaMask) == arenaAddr);
|
||||
|
||||
/*
|
||||
* If there is not enough space before the arena end to allocate one
|
||||
* more thing, then the span must be marked as the last one to avoid
|
||||
* storing useless empty span reference.
|
||||
*/
|
||||
size_t beforeTail = ArenaSize - (last & ArenaMask);
|
||||
JS_ASSERT(beforeTail >= sizeof(FreeSpan) + Cell::CellSize);
|
||||
|
||||
FreeSpan *next = reinterpret_cast<FreeSpan *>(last);
|
||||
|
||||
/*
|
||||
* The GC things on the list of free spans come from one arena
|
||||
* and the spans are linked in ascending address order with
|
||||
* at least one non-free thing between spans.
|
||||
*/
|
||||
JS_ASSERT(last < next->first);
|
||||
JS_ASSERT(arenaAddr == next->arenaAddressUnchecked());
|
||||
|
||||
if (next->first > next->last) {
|
||||
/*
|
||||
* The next span is the empty span that terminates the list for
|
||||
* arenas that do not have any free things at the end.
|
||||
*/
|
||||
JS_ASSERT(next->first - 1 == next->last);
|
||||
JS_ASSERT(arenaAddr + ArenaSize == next->first);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
/* Every arena has a header. */
|
||||
struct ArenaHeader
|
||||
{
|
||||
friend struct FreeLists;
|
||||
|
||||
JSCompartment *compartment;
|
||||
|
||||
/*
|
||||
* ArenaHeader::next has two purposes: when unallocated, it points to the
|
||||
* next available Arena's header. When allocated, it points to the next
|
||||
* arena of the same size class and compartment.
|
||||
*/
|
||||
ArenaHeader *next;
|
||||
|
||||
private:
|
||||
/*
|
||||
* The first span of free things in the arena. We encode it as the start
|
||||
* and end offsets within the arena, not as FreeSpan structure, to
|
||||
* minimize the header size.
|
||||
*/
|
||||
size_t firstFreeSpanOffsets;
|
||||
|
||||
/*
|
||||
* One of AllocKind constants or FINALIZE_LIMIT when the arena does not
|
||||
* contain any GC things and is on the list of empty arenas in the GC
|
||||
* chunk. The latter allows to quickly check if the arena is allocated
|
||||
* during the conservative GC scanning without searching the arena in the
|
||||
* list.
|
||||
*/
|
||||
size_t allocKind : 8;
|
||||
|
||||
/*
|
||||
* When recursive marking uses too much stack the marking is delayed and
|
||||
* the corresponding arenas are put into a stack using the following field
|
||||
* as a linkage. To distinguish the bottom of the stack from the arenas
|
||||
* not present in the stack we use an extra flag to tag arenas on the
|
||||
* stack.
|
||||
*
|
||||
* Delayed marking is also used for arenas that we allocate into during an
|
||||
* incremental GC. In this case, we intend to mark all the objects in the
|
||||
* arena, and it's faster to do this marking in bulk.
|
||||
*
|
||||
* To minimize the ArenaHeader size we record the next delayed marking
|
||||
* linkage as arenaAddress() >> ArenaShift and pack it with the allocKind
|
||||
* field and hasDelayedMarking flag. We use 8 bits for the allocKind, not
|
||||
* ArenaShift - 1, so the compiler can use byte-level memory instructions
|
||||
* to access it.
|
||||
*/
|
||||
public:
|
||||
size_t hasDelayedMarking : 1;
|
||||
size_t allocatedDuringIncremental : 1;
|
||||
size_t markOverflow : 1;
|
||||
size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
|
||||
|
||||
static void staticAsserts() {
|
||||
/* We must be able to fit the allockind into uint8_t. */
|
||||
JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
|
||||
|
||||
/*
|
||||
* nextDelayedMarkingpacking assumes that ArenaShift has enough bits
|
||||
* to cover allocKind and hasDelayedMarking.
|
||||
*/
|
||||
JS_STATIC_ASSERT(ArenaShift >= 8 + 1 + 1 + 1);
|
||||
}
|
||||
|
||||
inline uintptr_t address() const;
|
||||
inline Chunk *chunk() const;
|
||||
|
||||
bool allocated() const {
|
||||
JS_ASSERT(allocKind <= size_t(FINALIZE_LIMIT));
|
||||
return allocKind < size_t(FINALIZE_LIMIT);
|
||||
}
|
||||
|
||||
void init(JSCompartment *comp, AllocKind kind) {
|
||||
JS_ASSERT(!allocated());
|
||||
JS_ASSERT(!markOverflow);
|
||||
JS_ASSERT(!allocatedDuringIncremental);
|
||||
JS_ASSERT(!hasDelayedMarking);
|
||||
compartment = comp;
|
||||
|
||||
JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
|
||||
allocKind = size_t(kind);
|
||||
|
||||
/* See comments in FreeSpan::allocateFromNewArena. */
|
||||
firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
|
||||
}
|
||||
|
||||
void setAsNotAllocated() {
|
||||
allocKind = size_t(FINALIZE_LIMIT);
|
||||
markOverflow = 0;
|
||||
allocatedDuringIncremental = 0;
|
||||
hasDelayedMarking = 0;
|
||||
nextDelayedMarking = 0;
|
||||
}
|
||||
|
||||
inline uintptr_t arenaAddress() const;
|
||||
inline Arena *getArena();
|
||||
|
||||
AllocKind getAllocKind() const {
|
||||
JS_ASSERT(allocated());
|
||||
return AllocKind(allocKind);
|
||||
}
|
||||
|
||||
inline size_t getThingSize() const;
|
||||
|
||||
bool hasFreeThings() const {
|
||||
return firstFreeSpanOffsets != FreeSpan::FullArenaOffsets;
|
||||
}
|
||||
|
||||
inline bool isEmpty() const;
|
||||
|
||||
void setAsFullyUsed() {
|
||||
firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
|
||||
}
|
||||
|
||||
inline FreeSpan getFirstFreeSpan() const;
|
||||
inline void setFirstFreeSpan(const FreeSpan *span);
|
||||
|
||||
#ifdef DEBUG
|
||||
void checkSynchronizedWithFreeList() const;
|
||||
#endif
|
||||
|
||||
inline ArenaHeader *getNextDelayedMarking() const;
|
||||
inline void setNextDelayedMarking(ArenaHeader *aheader);
|
||||
};
|
||||
|
||||
struct Arena
|
||||
{
|
||||
/*
|
||||
* Layout of an arena:
|
||||
* An arena is 4K in size and 4K-aligned. It starts with the ArenaHeader
|
||||
* descriptor followed by some pad bytes. The remainder of the arena is
|
||||
* filled with the array of T things. The pad bytes ensure that the thing
|
||||
* array ends exactly at the end of the arena.
|
||||
*
|
||||
* +-------------+-----+----+----+-----+----+
|
||||
* | ArenaHeader | pad | T0 | T1 | ... | Tn |
|
||||
* +-------------+-----+----+----+-----+----+
|
||||
*
|
||||
* <----------------------------------------> = ArenaSize bytes
|
||||
* <-------------------> = first thing offset
|
||||
*/
|
||||
ArenaHeader aheader;
|
||||
uint8_t data[ArenaSize - sizeof(ArenaHeader)];
|
||||
|
||||
private:
|
||||
static JS_FRIEND_DATA(const uint32_t) ThingSizes[];
|
||||
static JS_FRIEND_DATA(const uint32_t) FirstThingOffsets[];
|
||||
|
||||
public:
|
||||
static void staticAsserts();
|
||||
|
||||
static size_t thingSize(AllocKind kind) {
|
||||
return ThingSizes[kind];
|
||||
}
|
||||
|
||||
static size_t firstThingOffset(AllocKind kind) {
|
||||
return FirstThingOffsets[kind];
|
||||
}
|
||||
|
||||
static size_t thingsPerArena(size_t thingSize) {
|
||||
JS_ASSERT(thingSize % Cell::CellSize == 0);
|
||||
|
||||
/* We should be able to fit FreeSpan in any GC thing. */
|
||||
JS_ASSERT(thingSize >= sizeof(FreeSpan));
|
||||
|
||||
return (ArenaSize - sizeof(ArenaHeader)) / thingSize;
|
||||
}
|
||||
|
||||
static size_t thingsSpan(size_t thingSize) {
|
||||
return thingsPerArena(thingSize) * thingSize;
|
||||
}
|
||||
|
||||
static bool isAligned(uintptr_t thing, size_t thingSize) {
|
||||
/* Things ends at the arena end. */
|
||||
uintptr_t tailOffset = (ArenaSize - thing) & ArenaMask;
|
||||
return tailOffset % thingSize == 0;
|
||||
}
|
||||
|
||||
uintptr_t address() const {
|
||||
return aheader.address();
|
||||
}
|
||||
|
||||
uintptr_t thingsStart(AllocKind thingKind) {
|
||||
return address() | firstThingOffset(thingKind);
|
||||
}
|
||||
|
||||
uintptr_t thingsEnd() {
|
||||
return address() + ArenaSize;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
bool finalize(FreeOp *fop, AllocKind thingKind, size_t thingSize);
|
||||
};
|
||||
|
||||
inline size_t
|
||||
ArenaHeader::getThingSize() const
|
||||
{
|
||||
JS_ASSERT(allocated());
|
||||
return Arena::thingSize(getAllocKind());
|
||||
}
|
||||
|
||||
/* The chunk header (located at the end of the chunk to preserve arena alignment). */
|
||||
struct ChunkInfo
|
||||
{
|
||||
Chunk *next;
|
||||
Chunk **prevp;
|
||||
|
||||
/* Free arenas are linked together with aheader.next. */
|
||||
ArenaHeader *freeArenasHead;
|
||||
|
||||
/*
|
||||
* Decommitted arenas are tracked by a bitmap in the chunk header. We use
|
||||
* this offset to start our search iteration close to a decommitted arena
|
||||
* that we can allocate.
|
||||
*/
|
||||
uint32_t lastDecommittedArenaOffset;
|
||||
|
||||
/* Number of free arenas, either committed or decommitted. */
|
||||
uint32_t numArenasFree;
|
||||
|
||||
/* Number of free, committed arenas. */
|
||||
uint32_t numArenasFreeCommitted;
|
||||
|
||||
/* Number of GC cycles this chunk has survived. */
|
||||
uint32_t age;
|
||||
};
|
||||
|
||||
/*
|
||||
* Calculating ArenasPerChunk:
|
||||
*
|
||||
* In order to figure out how many Arenas will fit in a chunk, we need to know
|
||||
* how much extra space is available after we allocate the header data. This
|
||||
* is a problem because the header size depends on the number of arenas in the
|
||||
* chunk. The two dependent fields are bitmap and decommittedArenas.
|
||||
*
|
||||
* For the mark bitmap, we know that each arena will use a fixed number of full
|
||||
* bytes: ArenaBitmapBytes. The full size of the header data is this number
|
||||
* multiplied by the eventual number of arenas we have in the header. We,
|
||||
* conceptually, distribute this header data among the individual arenas and do
|
||||
* not include it in the header. This way we do not have to worry about its
|
||||
* variable size: it gets attached to the variable number we are computing.
|
||||
*
|
||||
* For the decommitted arena bitmap, we only have 1 bit per arena, so this
|
||||
* technique will not work. Instead, we observe that we do not have enough
|
||||
* header info to fill 8 full arenas: it is currently 4 on 64bit, less on
|
||||
* 32bit. Thus, with current numbers, we need 64 bytes for decommittedArenas.
|
||||
* This will not become 63 bytes unless we double the data required in the
|
||||
* header. Therefore, we just compute the number of bytes required to track
|
||||
* every possible arena and do not worry about slop bits, since there are too
|
||||
* few to usefully allocate.
|
||||
*
|
||||
* To actually compute the number of arenas we can allocate in a chunk, we
|
||||
* divide the amount of available space less the header info (not including
|
||||
* the mark bitmap which is distributed into the arena size) by the size of
|
||||
* the arena (with the mark bitmap bytes it uses).
|
||||
*/
|
||||
const size_t BytesPerArenaWithHeader = ArenaSize + ArenaBitmapBytes;
|
||||
const size_t ChunkDecommitBitmapBytes = ChunkSize / ArenaSize / JS_BITS_PER_BYTE;
|
||||
const size_t ChunkBytesAvailable = ChunkSize - sizeof(ChunkInfo) - ChunkDecommitBitmapBytes;
|
||||
const size_t ArenasPerChunk = ChunkBytesAvailable / BytesPerArenaWithHeader;
|
||||
|
||||
/* A chunk bitmap contains enough mark bits for all the cells in a chunk. */
|
||||
struct ChunkBitmap
|
||||
{
|
||||
uintptr_t bitmap[ArenaBitmapWords * ArenasPerChunk];
|
||||
|
||||
MOZ_ALWAYS_INLINE void getMarkWordAndMask(const Cell *cell, uint32_t color,
|
||||
uintptr_t **wordp, uintptr_t *maskp);
|
||||
|
||||
MOZ_ALWAYS_INLINE bool isMarked(const Cell *cell, uint32_t color) {
|
||||
uintptr_t *word, mask;
|
||||
getMarkWordAndMask(cell, color, &word, &mask);
|
||||
return *word & mask;
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE bool markIfUnmarked(const Cell *cell, uint32_t color) {
|
||||
uintptr_t *word, mask;
|
||||
getMarkWordAndMask(cell, BLACK, &word, &mask);
|
||||
if (*word & mask)
|
||||
return false;
|
||||
*word |= mask;
|
||||
if (color != BLACK) {
|
||||
/*
|
||||
* We use getMarkWordAndMask to recalculate both mask and word as
|
||||
* doing just mask << color may overflow the mask.
|
||||
*/
|
||||
getMarkWordAndMask(cell, color, &word, &mask);
|
||||
if (*word & mask)
|
||||
return false;
|
||||
*word |= mask;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
MOZ_ALWAYS_INLINE void unmark(const Cell *cell, uint32_t color) {
|
||||
uintptr_t *word, mask;
|
||||
getMarkWordAndMask(cell, color, &word, &mask);
|
||||
*word &= ~mask;
|
||||
}
|
||||
|
||||
void clear() {
|
||||
PodArrayZero(bitmap);
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
bool noBitsSet(ArenaHeader *aheader) {
|
||||
/*
|
||||
* We assume that the part of the bitmap corresponding to the arena
|
||||
* has the exact number of words so we do not need to deal with a word
|
||||
* that covers bits from two arenas.
|
||||
*/
|
||||
JS_STATIC_ASSERT(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD);
|
||||
|
||||
uintptr_t *word, unused;
|
||||
getMarkWordAndMask(reinterpret_cast<Cell *>(aheader->address()), BLACK, &word, &unused);
|
||||
for (size_t i = 0; i != ArenaBitmapWords; i++) {
|
||||
if (word[i])
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
};
|
||||
|
||||
JS_STATIC_ASSERT(ArenaBitmapBytes * ArenasPerChunk == sizeof(ChunkBitmap));
|
||||
|
||||
typedef BitArray<ArenasPerChunk> PerArenaBitmap;
|
||||
|
||||
const size_t ChunkPadSize = ChunkSize
|
||||
- (sizeof(Arena) * ArenasPerChunk)
|
||||
- sizeof(ChunkBitmap)
|
||||
- sizeof(PerArenaBitmap)
|
||||
- sizeof(ChunkInfo);
|
||||
JS_STATIC_ASSERT(ChunkPadSize < BytesPerArenaWithHeader);
|
||||
|
||||
/*
|
||||
* Chunks contain arenas and associated data structures (mark bitmap, delayed
|
||||
* marking state).
|
||||
*/
|
||||
struct Chunk
|
||||
{
|
||||
Arena arenas[ArenasPerChunk];
|
||||
|
||||
/* Pad to full size to ensure cache alignment of ChunkInfo. */
|
||||
uint8_t padding[ChunkPadSize];
|
||||
|
||||
ChunkBitmap bitmap;
|
||||
PerArenaBitmap decommittedArenas;
|
||||
ChunkInfo info;
|
||||
|
||||
static Chunk *fromAddress(uintptr_t addr) {
|
||||
addr &= ~ChunkMask;
|
||||
return reinterpret_cast<Chunk *>(addr);
|
||||
}
|
||||
|
||||
static bool withinArenasRange(uintptr_t addr) {
|
||||
uintptr_t offset = addr & ChunkMask;
|
||||
return offset < ArenasPerChunk * ArenaSize;
|
||||
}
|
||||
|
||||
static size_t arenaIndex(uintptr_t addr) {
|
||||
JS_ASSERT(withinArenasRange(addr));
|
||||
return (addr & ChunkMask) >> ArenaShift;
|
||||
}
|
||||
|
||||
uintptr_t address() const {
|
||||
uintptr_t addr = reinterpret_cast<uintptr_t>(this);
|
||||
JS_ASSERT(!(addr & ChunkMask));
|
||||
return addr;
|
||||
}
|
||||
|
||||
bool unused() const {
|
||||
return info.numArenasFree == ArenasPerChunk;
|
||||
}
|
||||
|
||||
bool hasAvailableArenas() const {
|
||||
return info.numArenasFree != 0;
|
||||
}
|
||||
|
||||
inline void addToAvailableList(JSCompartment *compartment);
|
||||
inline void insertToAvailableList(Chunk **insertPoint);
|
||||
inline void removeFromAvailableList();
|
||||
|
||||
ArenaHeader *allocateArena(JSCompartment *comp, AllocKind kind);
|
||||
|
||||
void releaseArena(ArenaHeader *aheader);
|
||||
|
||||
static Chunk *allocate(JSRuntime *rt);
|
||||
|
||||
/* Must be called with the GC lock taken. */
|
||||
static inline void release(JSRuntime *rt, Chunk *chunk);
|
||||
static inline void releaseList(JSRuntime *rt, Chunk *chunkListHead);
|
||||
|
||||
/* Must be called with the GC lock taken. */
|
||||
inline void prepareToBeFreed(JSRuntime *rt);
|
||||
|
||||
/*
|
||||
* Assuming that the info.prevp points to the next field of the previous
|
||||
* chunk in a doubly-linked list, get that chunk.
|
||||
*/
|
||||
Chunk *getPrevious() {
|
||||
JS_ASSERT(info.prevp);
|
||||
return fromPointerToNext(info.prevp);
|
||||
}
|
||||
|
||||
/* Get the chunk from a pointer to its info.next field. */
|
||||
static Chunk *fromPointerToNext(Chunk **nextFieldPtr) {
|
||||
uintptr_t addr = reinterpret_cast<uintptr_t>(nextFieldPtr);
|
||||
JS_ASSERT((addr & ChunkMask) == offsetof(Chunk, info.next));
|
||||
return reinterpret_cast<Chunk *>(addr - offsetof(Chunk, info.next));
|
||||
}
|
||||
|
||||
private:
|
||||
inline void init();
|
||||
|
||||
/* Search for a decommitted arena to allocate. */
|
||||
unsigned findDecommittedArenaOffset();
|
||||
ArenaHeader* fetchNextDecommittedArena();
|
||||
|
||||
public:
|
||||
/* Unlink and return the freeArenasHead. */
|
||||
inline ArenaHeader* fetchNextFreeArena(JSRuntime *rt);
|
||||
|
||||
inline void addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader);
|
||||
};
|
||||
|
||||
JS_STATIC_ASSERT(sizeof(Chunk) == ChunkSize);
|
||||
|
||||
inline uintptr_t
|
||||
Cell::address() const
|
||||
{
|
||||
uintptr_t addr = uintptr_t(this);
|
||||
JS_ASSERT(addr % Cell::CellSize == 0);
|
||||
JS_ASSERT(Chunk::withinArenasRange(addr));
|
||||
return addr;
|
||||
}
|
||||
|
||||
inline uintptr_t
|
||||
ArenaHeader::address() const
|
||||
{
|
||||
uintptr_t addr = reinterpret_cast<uintptr_t>(this);
|
||||
JS_ASSERT(!(addr & ArenaMask));
|
||||
JS_ASSERT(Chunk::withinArenasRange(addr));
|
||||
return addr;
|
||||
}
|
||||
|
||||
inline Chunk *
|
||||
ArenaHeader::chunk() const
|
||||
{
|
||||
return Chunk::fromAddress(address());
|
||||
}
|
||||
|
||||
inline uintptr_t
|
||||
ArenaHeader::arenaAddress() const
|
||||
{
|
||||
return address();
|
||||
}
|
||||
|
||||
inline Arena *
|
||||
ArenaHeader::getArena()
|
||||
{
|
||||
return reinterpret_cast<Arena *>(arenaAddress());
|
||||
}
|
||||
|
||||
inline bool
|
||||
ArenaHeader::isEmpty() const
|
||||
{
|
||||
/* Arena is empty if its first span covers the whole arena. */
|
||||
JS_ASSERT(allocated());
|
||||
size_t firstThingOffset = Arena::firstThingOffset(getAllocKind());
|
||||
return firstFreeSpanOffsets == FreeSpan::encodeOffsets(firstThingOffset, ArenaMask);
|
||||
}
|
||||
|
||||
FreeSpan
|
||||
ArenaHeader::getFirstFreeSpan() const
|
||||
{
|
||||
#ifdef DEBUG
|
||||
checkSynchronizedWithFreeList();
|
||||
#endif
|
||||
return FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
|
||||
}
|
||||
|
||||
void
|
||||
ArenaHeader::setFirstFreeSpan(const FreeSpan *span)
|
||||
{
|
||||
JS_ASSERT(span->isWithinArena(arenaAddress()));
|
||||
firstFreeSpanOffsets = span->encodeAsOffsets();
|
||||
}
|
||||
|
||||
inline ArenaHeader *
|
||||
ArenaHeader::getNextDelayedMarking() const
|
||||
{
|
||||
return &reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift)->aheader;
|
||||
}
|
||||
|
||||
inline void
|
||||
ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader)
|
||||
{
|
||||
JS_ASSERT(!(uintptr_t(aheader) & ArenaMask));
|
||||
hasDelayedMarking = 1;
|
||||
nextDelayedMarking = aheader->arenaAddress() >> ArenaShift;
|
||||
}
|
||||
|
||||
JS_ALWAYS_INLINE void
|
||||
ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32_t color,
|
||||
uintptr_t **wordp, uintptr_t *maskp)
|
||||
{
|
||||
size_t bit = (cell->address() & ChunkMask) / Cell::CellSize + color;
|
||||
JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk);
|
||||
*maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
|
||||
*wordp = &bitmap[bit / JS_BITS_PER_WORD];
|
||||
}
|
||||
|
||||
static void
|
||||
AssertValidColor(const void *thing, uint32_t color)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
ArenaHeader *aheader = reinterpret_cast<const Cell *>(thing)->arenaHeader();
|
||||
JS_ASSERT_IF(color, color < aheader->getThingSize() / Cell::CellSize);
|
||||
#endif
|
||||
}
|
||||
|
||||
inline ArenaHeader *
|
||||
Cell::arenaHeader() const
|
||||
{
|
||||
uintptr_t addr = address();
|
||||
addr &= ~ArenaMask;
|
||||
return reinterpret_cast<ArenaHeader *>(addr);
|
||||
}
|
||||
|
||||
Chunk *
|
||||
Cell::chunk() const
|
||||
{
|
||||
uintptr_t addr = uintptr_t(this);
|
||||
JS_ASSERT(addr % Cell::CellSize == 0);
|
||||
addr &= ~(ChunkSize - 1);
|
||||
return reinterpret_cast<Chunk *>(addr);
|
||||
}
|
||||
|
||||
AllocKind
|
||||
Cell::getAllocKind() const
|
||||
{
|
||||
return arenaHeader()->getAllocKind();
|
||||
}
|
||||
|
||||
bool
|
||||
Cell::isMarked(uint32_t color /* = BLACK */) const
|
||||
{
|
||||
AssertValidColor(this, color);
|
||||
return chunk()->bitmap.isMarked(this, color);
|
||||
}
|
||||
|
||||
bool
|
||||
Cell::markIfUnmarked(uint32_t color /* = BLACK */) const
|
||||
{
|
||||
AssertValidColor(this, color);
|
||||
return chunk()->bitmap.markIfUnmarked(this, color);
|
||||
}
|
||||
|
||||
void
|
||||
Cell::unmark(uint32_t color) const
|
||||
{
|
||||
JS_ASSERT(color != BLACK);
|
||||
AssertValidColor(this, color);
|
||||
chunk()->bitmap.unmark(this, color);
|
||||
}
|
||||
|
||||
JSCompartment *
|
||||
Cell::compartment() const
|
||||
{
|
||||
return arenaHeader()->compartment;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
bool
|
||||
Cell::isAligned() const
|
||||
{
|
||||
return Arena::isAligned(address(), arenaHeader()->getThingSize());
|
||||
}
|
||||
#endif
|
||||
|
||||
} /* namespace gc */
|
||||
|
||||
} /* namespace js */
|
||||
|
||||
#endif /* gc_heap_h___ */
|
119
js/src/jscell.h
119
js/src/jscell.h
|
@ -1,119 +0,0 @@
|
|||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
*
|
||||
* ***** BEGIN LICENSE BLOCK *****
|
||||
* Version: MPL 1.1/GPL 2.0/LGPL 2.1
|
||||
*
|
||||
* The contents of this file are subject to the Mozilla Public License Version
|
||||
* 1.1 (the "License"); you may not use this file except in compliance with
|
||||
* the License. You may obtain a copy of the License at
|
||||
* http://www.mozilla.org/MPL/
|
||||
*
|
||||
* Software distributed under the License is distributed on an "AS IS" basis,
|
||||
* WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
|
||||
* for the specific language governing rights and limitations under the
|
||||
* License.
|
||||
*
|
||||
* The Original Code is SpiderMonkey code.
|
||||
*
|
||||
* The Initial Developer of the Original Code is
|
||||
* Mozilla Corporation.
|
||||
* Portions created by the Initial Developer are Copyright (C) 2010
|
||||
* the Initial Developer. All Rights Reserved.
|
||||
*
|
||||
* Contributor(s):
|
||||
* Gregor Wagner <anygregor@gmail.com>
|
||||
*
|
||||
* Alternatively, the contents of this file may be used under the terms of
|
||||
* either of the GNU General Public License Version 2 or later (the "GPL"),
|
||||
* or the GNU Lesser General Public License Version 2.1 or later (the "LGPL"),
|
||||
* in which case the provisions of the GPL or the LGPL are applicable instead
|
||||
* of those above. If you wish to allow use of your version of this file only
|
||||
* under the terms of either the GPL or the LGPL, and not to allow others to
|
||||
* use your version of this file under the terms of the MPL, indicate your
|
||||
* decision by deleting the provisions above and replace them with the notice
|
||||
* and other provisions required by the GPL or the LGPL. If you do not delete
|
||||
* the provisions above, a recipient may use your version of this file under
|
||||
* the terms of any one of the MPL, the GPL or the LGPL.
|
||||
*
|
||||
* ***** END LICENSE BLOCK ***** */
|
||||
|
||||
#ifndef jscell_h___
|
||||
#define jscell_h___
|
||||
|
||||
#include "jspubtd.h"
|
||||
|
||||
struct JSCompartment;
|
||||
|
||||
namespace js {
|
||||
namespace gc {
|
||||
|
||||
struct ArenaHeader;
|
||||
struct Chunk;
|
||||
|
||||
/* The GC allocation kinds. */
|
||||
enum AllocKind {
|
||||
FINALIZE_OBJECT0,
|
||||
FINALIZE_OBJECT0_BACKGROUND,
|
||||
FINALIZE_OBJECT2,
|
||||
FINALIZE_OBJECT2_BACKGROUND,
|
||||
FINALIZE_OBJECT4,
|
||||
FINALIZE_OBJECT4_BACKGROUND,
|
||||
FINALIZE_OBJECT8,
|
||||
FINALIZE_OBJECT8_BACKGROUND,
|
||||
FINALIZE_OBJECT12,
|
||||
FINALIZE_OBJECT12_BACKGROUND,
|
||||
FINALIZE_OBJECT16,
|
||||
FINALIZE_OBJECT16_BACKGROUND,
|
||||
FINALIZE_OBJECT_LAST = FINALIZE_OBJECT16_BACKGROUND,
|
||||
FINALIZE_SCRIPT,
|
||||
FINALIZE_SHAPE,
|
||||
FINALIZE_BASE_SHAPE,
|
||||
FINALIZE_TYPE_OBJECT,
|
||||
#if JS_HAS_XML_SUPPORT
|
||||
FINALIZE_XML,
|
||||
#endif
|
||||
FINALIZE_SHORT_STRING,
|
||||
FINALIZE_STRING,
|
||||
FINALIZE_EXTERNAL_STRING,
|
||||
FINALIZE_LAST = FINALIZE_EXTERNAL_STRING
|
||||
};
|
||||
|
||||
static const unsigned FINALIZE_LIMIT = FINALIZE_LAST + 1;
|
||||
static const unsigned FINALIZE_OBJECT_LIMIT = FINALIZE_OBJECT_LAST + 1;
|
||||
|
||||
/*
|
||||
* Live objects are marked black. How many other additional colors are available
|
||||
* depends on the size of the GCThing. Objects marked gray are eligible for
|
||||
* cycle collection.
|
||||
*/
|
||||
static const uint32_t BLACK = 0;
|
||||
static const uint32_t GRAY = 1;
|
||||
|
||||
/*
|
||||
* A GC cell is the base class for all GC things.
|
||||
*/
|
||||
struct Cell {
|
||||
static const size_t CellShift = 3;
|
||||
static const size_t CellSize = size_t(1) << CellShift;
|
||||
static const size_t CellMask = CellSize - 1;
|
||||
|
||||
inline uintptr_t address() const;
|
||||
inline ArenaHeader *arenaHeader() const;
|
||||
inline Chunk *chunk() const;
|
||||
inline AllocKind getAllocKind() const;
|
||||
|
||||
JS_ALWAYS_INLINE bool isMarked(uint32_t color = BLACK) const;
|
||||
JS_ALWAYS_INLINE bool markIfUnmarked(uint32_t color = BLACK) const;
|
||||
JS_ALWAYS_INLINE void unmark(uint32_t color) const;
|
||||
|
||||
inline JSCompartment *compartment() const;
|
||||
|
||||
#ifdef DEBUG
|
||||
inline bool isAligned() const;
|
||||
#endif
|
||||
};
|
||||
|
||||
} /* namespace gc */
|
||||
} /* namespace js */
|
||||
|
||||
#endif /* jscell_h___ */
|
865
js/src/jsgc.h
865
js/src/jsgc.h
|
@ -54,9 +54,9 @@
|
|||
#include "jslock.h"
|
||||
#include "jsutil.h"
|
||||
#include "jsversion.h"
|
||||
#include "jscell.h"
|
||||
|
||||
#include "ds/BitArray.h"
|
||||
#include "gc/Heap.h"
|
||||
#include "gc/Statistics.h"
|
||||
#include "js/HashTable.h"
|
||||
#include "js/Vector.h"
|
||||
|
@ -87,737 +87,6 @@ enum State {
|
|||
INVALID
|
||||
};
|
||||
|
||||
struct Arena;
|
||||
|
||||
/*
|
||||
* This must be an upper bound, but we do not need the least upper bound, so
|
||||
* we just exclude non-background objects.
|
||||
*/
|
||||
const size_t MAX_BACKGROUND_FINALIZE_KINDS = FINALIZE_LIMIT - FINALIZE_OBJECT_LIMIT / 2;
|
||||
|
||||
/*
|
||||
* Page size is 4096 by default, except for SPARC, where it is 8192.
|
||||
* Note: Do not use JS_CPU_SPARC here, this header is used outside JS.
|
||||
* Bug 692267: Move page size definition to gc/Memory.h and include it
|
||||
* directly once jsgc.h is no longer an installed header.
|
||||
*/
|
||||
#if defined(SOLARIS) && (defined(__sparc) || defined(__sparcv9))
|
||||
const size_t PageShift = 13;
|
||||
#else
|
||||
const size_t PageShift = 12;
|
||||
#endif
|
||||
const size_t PageSize = size_t(1) << PageShift;
|
||||
|
||||
const size_t ChunkShift = 20;
|
||||
const size_t ChunkSize = size_t(1) << ChunkShift;
|
||||
const size_t ChunkMask = ChunkSize - 1;
|
||||
|
||||
const size_t ArenaShift = PageShift;
|
||||
const size_t ArenaSize = PageSize;
|
||||
const size_t ArenaMask = ArenaSize - 1;
|
||||
|
||||
/*
|
||||
* This is the maximum number of arenas we allow in the FreeCommitted state
|
||||
* before we trigger a GC_SHRINK to release free arenas to the OS.
|
||||
*/
|
||||
const static uint32_t FreeCommittedArenasThreshold = (32 << 20) / ArenaSize;
|
||||
|
||||
/*
|
||||
* The mark bitmap has one bit per each GC cell. For multi-cell GC things this
|
||||
* wastes space but allows to avoid expensive devisions by thing's size when
|
||||
* accessing the bitmap. In addition this allows to use some bits for colored
|
||||
* marking during the cycle GC.
|
||||
*/
|
||||
const size_t ArenaCellCount = size_t(1) << (ArenaShift - Cell::CellShift);
|
||||
const size_t ArenaBitmapBits = ArenaCellCount;
|
||||
const size_t ArenaBitmapBytes = ArenaBitmapBits / 8;
|
||||
const size_t ArenaBitmapWords = ArenaBitmapBits / JS_BITS_PER_WORD;
|
||||
|
||||
/*
|
||||
* A FreeSpan represents a contiguous sequence of free cells in an Arena.
|
||||
* |first| is the address of the first free cell in the span. |last| is the
|
||||
* address of the last free cell in the span. This last cell holds a FreeSpan
|
||||
* data structure for the next span unless this is the last span on the list
|
||||
* of spans in the arena. For this last span |last| points to the last byte of
|
||||
* the last thing in the arena and no linkage is stored there, so
|
||||
* |last| == arenaStart + ArenaSize - 1. If the space at the arena end is
|
||||
* fully used this last span is empty and |first| == |last + 1|.
|
||||
*
|
||||
* Thus |first| < |last| implies that we have either the last span with at least
|
||||
* one element or that the span is not the last and contains at least 2
|
||||
* elements. In both cases to allocate a thing from this span we need simply
|
||||
* to increment |first| by the allocation size.
|
||||
*
|
||||
* |first| == |last| implies that we have a one element span that records the
|
||||
* next span. So to allocate from it we need to update the span list head
|
||||
* with a copy of the span stored at |last| address so the following
|
||||
* allocations will use that span.
|
||||
*
|
||||
* |first| > |last| implies that we have an empty last span and the arena is
|
||||
* fully used.
|
||||
*
|
||||
* Also only for the last span (|last| & 1)! = 0 as all allocation sizes are
|
||||
* multiples of Cell::CellSize.
|
||||
*/
|
||||
struct FreeSpan {
|
||||
uintptr_t first;
|
||||
uintptr_t last;
|
||||
|
||||
public:
|
||||
FreeSpan() {}
|
||||
|
||||
FreeSpan(uintptr_t first, uintptr_t last)
|
||||
: first(first), last(last) {
|
||||
checkSpan();
|
||||
}
|
||||
|
||||
/*
|
||||
* To minimize the size of the arena header the first span is encoded
|
||||
* there as offsets from the arena start.
|
||||
*/
|
||||
static size_t encodeOffsets(size_t firstOffset, size_t lastOffset) {
|
||||
/* Check that we can pack the offsets into uint16. */
|
||||
JS_STATIC_ASSERT(ArenaShift < 16);
|
||||
JS_ASSERT(firstOffset <= ArenaSize);
|
||||
JS_ASSERT(lastOffset < ArenaSize);
|
||||
JS_ASSERT(firstOffset <= ((lastOffset + 1) & ~size_t(1)));
|
||||
return firstOffset | (lastOffset << 16);
|
||||
}
|
||||
|
||||
/*
|
||||
* Encoded offsets for a full arena when its first span is the last one
|
||||
* and empty.
|
||||
*/
|
||||
static const size_t FullArenaOffsets = ArenaSize | ((ArenaSize - 1) << 16);
|
||||
|
||||
static FreeSpan decodeOffsets(uintptr_t arenaAddr, size_t offsets) {
|
||||
JS_ASSERT(!(arenaAddr & ArenaMask));
|
||||
|
||||
size_t firstOffset = offsets & 0xFFFF;
|
||||
size_t lastOffset = offsets >> 16;
|
||||
JS_ASSERT(firstOffset <= ArenaSize);
|
||||
JS_ASSERT(lastOffset < ArenaSize);
|
||||
|
||||
/*
|
||||
* We must not use | when calculating first as firstOffset is
|
||||
* ArenaMask + 1 for the empty span.
|
||||
*/
|
||||
return FreeSpan(arenaAddr + firstOffset, arenaAddr | lastOffset);
|
||||
}
|
||||
|
||||
void initAsEmpty(uintptr_t arenaAddr = 0) {
|
||||
JS_ASSERT(!(arenaAddr & ArenaMask));
|
||||
first = arenaAddr + ArenaSize;
|
||||
last = arenaAddr | (ArenaSize - 1);
|
||||
JS_ASSERT(isEmpty());
|
||||
}
|
||||
|
||||
bool isEmpty() const {
|
||||
checkSpan();
|
||||
return first > last;
|
||||
}
|
||||
|
||||
bool hasNext() const {
|
||||
checkSpan();
|
||||
return !(last & uintptr_t(1));
|
||||
}
|
||||
|
||||
const FreeSpan *nextSpan() const {
|
||||
JS_ASSERT(hasNext());
|
||||
return reinterpret_cast<FreeSpan *>(last);
|
||||
}
|
||||
|
||||
FreeSpan *nextSpanUnchecked(size_t thingSize) const {
|
||||
#ifdef DEBUG
|
||||
uintptr_t lastOffset = last & ArenaMask;
|
||||
JS_ASSERT(!(lastOffset & 1));
|
||||
JS_ASSERT((ArenaSize - lastOffset) % thingSize == 0);
|
||||
#endif
|
||||
return reinterpret_cast<FreeSpan *>(last);
|
||||
}
|
||||
|
||||
uintptr_t arenaAddressUnchecked() const {
|
||||
return last & ~ArenaMask;
|
||||
}
|
||||
|
||||
uintptr_t arenaAddress() const {
|
||||
checkSpan();
|
||||
return arenaAddressUnchecked();
|
||||
}
|
||||
|
||||
ArenaHeader *arenaHeader() const {
|
||||
return reinterpret_cast<ArenaHeader *>(arenaAddress());
|
||||
}
|
||||
|
||||
bool isSameNonEmptySpan(const FreeSpan *another) const {
|
||||
JS_ASSERT(!isEmpty());
|
||||
JS_ASSERT(!another->isEmpty());
|
||||
return first == another->first && last == another->last;
|
||||
}
|
||||
|
||||
bool isWithinArena(uintptr_t arenaAddr) const {
|
||||
JS_ASSERT(!(arenaAddr & ArenaMask));
|
||||
|
||||
/* Return true for the last empty span as well. */
|
||||
return arenaAddress() == arenaAddr;
|
||||
}
|
||||
|
||||
size_t encodeAsOffsets() const {
|
||||
/*
|
||||
* We must use first - arenaAddress(), not first & ArenaMask as
|
||||
* first == ArenaMask + 1 for an empty span.
|
||||
*/
|
||||
uintptr_t arenaAddr = arenaAddress();
|
||||
return encodeOffsets(first - arenaAddr, last & ArenaMask);
|
||||
}
|
||||
|
||||
/* See comments before FreeSpan for details. */
|
||||
JS_ALWAYS_INLINE void *allocate(size_t thingSize) {
|
||||
JS_ASSERT(thingSize % Cell::CellSize == 0);
|
||||
checkSpan();
|
||||
uintptr_t thing = first;
|
||||
if (thing < last) {
|
||||
/* Bump-allocate from the current span. */
|
||||
first = thing + thingSize;
|
||||
} else if (JS_LIKELY(thing == last)) {
|
||||
/*
|
||||
* Move to the next span. We use JS_LIKELY as without PGO
|
||||
* compilers mis-predict == here as unlikely to succeed.
|
||||
*/
|
||||
*this = *reinterpret_cast<FreeSpan *>(thing);
|
||||
} else {
|
||||
return NULL;
|
||||
}
|
||||
checkSpan();
|
||||
return reinterpret_cast<void *>(thing);
|
||||
}
|
||||
|
||||
/* A version of allocate when we know that the span is not empty. */
|
||||
JS_ALWAYS_INLINE void *infallibleAllocate(size_t thingSize) {
|
||||
JS_ASSERT(thingSize % Cell::CellSize == 0);
|
||||
checkSpan();
|
||||
uintptr_t thing = first;
|
||||
if (thing < last) {
|
||||
first = thing + thingSize;
|
||||
} else {
|
||||
JS_ASSERT(thing == last);
|
||||
*this = *reinterpret_cast<FreeSpan *>(thing);
|
||||
}
|
||||
checkSpan();
|
||||
return reinterpret_cast<void *>(thing);
|
||||
}
|
||||
|
||||
/*
|
||||
* Allocate from a newly allocated arena. We do not move the free list
|
||||
* from the arena. Rather we set the arena up as fully used during the
|
||||
* initialization so to allocate we simply return the first thing in the
|
||||
* arena and set the free list to point to the second.
|
||||
*/
|
||||
JS_ALWAYS_INLINE void *allocateFromNewArena(uintptr_t arenaAddr, size_t firstThingOffset,
|
||||
size_t thingSize) {
|
||||
JS_ASSERT(!(arenaAddr & ArenaMask));
|
||||
uintptr_t thing = arenaAddr | firstThingOffset;
|
||||
first = thing + thingSize;
|
||||
last = arenaAddr | ArenaMask;
|
||||
checkSpan();
|
||||
return reinterpret_cast<void *>(thing);
|
||||
}
|
||||
|
||||
void checkSpan() const {
|
||||
#ifdef DEBUG
|
||||
/* We do not allow spans at the end of the address space. */
|
||||
JS_ASSERT(last != uintptr_t(-1));
|
||||
JS_ASSERT(first);
|
||||
JS_ASSERT(last);
|
||||
JS_ASSERT(first - 1 <= last);
|
||||
uintptr_t arenaAddr = arenaAddressUnchecked();
|
||||
if (last & 1) {
|
||||
/* The span is the last. */
|
||||
JS_ASSERT((last & ArenaMask) == ArenaMask);
|
||||
|
||||
if (first - 1 == last) {
|
||||
/* The span is last and empty. The above start != 0 check
|
||||
* implies that we are not at the end of the address space.
|
||||
*/
|
||||
return;
|
||||
}
|
||||
size_t spanLength = last - first + 1;
|
||||
JS_ASSERT(spanLength % Cell::CellSize == 0);
|
||||
|
||||
/* Start and end must belong to the same arena. */
|
||||
JS_ASSERT((first & ~ArenaMask) == arenaAddr);
|
||||
return;
|
||||
}
|
||||
|
||||
/* The span is not the last and we have more spans to follow. */
|
||||
JS_ASSERT(first <= last);
|
||||
size_t spanLengthWithoutOneThing = last - first;
|
||||
JS_ASSERT(spanLengthWithoutOneThing % Cell::CellSize == 0);
|
||||
|
||||
JS_ASSERT((first & ~ArenaMask) == arenaAddr);
|
||||
|
||||
/*
|
||||
* If there is not enough space before the arena end to allocate one
|
||||
* more thing, then the span must be marked as the last one to avoid
|
||||
* storing useless empty span reference.
|
||||
*/
|
||||
size_t beforeTail = ArenaSize - (last & ArenaMask);
|
||||
JS_ASSERT(beforeTail >= sizeof(FreeSpan) + Cell::CellSize);
|
||||
|
||||
FreeSpan *next = reinterpret_cast<FreeSpan *>(last);
|
||||
|
||||
/*
|
||||
* The GC things on the list of free spans come from one arena
|
||||
* and the spans are linked in ascending address order with
|
||||
* at least one non-free thing between spans.
|
||||
*/
|
||||
JS_ASSERT(last < next->first);
|
||||
JS_ASSERT(arenaAddr == next->arenaAddressUnchecked());
|
||||
|
||||
if (next->first > next->last) {
|
||||
/*
|
||||
* The next span is the empty span that terminates the list for
|
||||
* arenas that do not have any free things at the end.
|
||||
*/
|
||||
JS_ASSERT(next->first - 1 == next->last);
|
||||
JS_ASSERT(arenaAddr + ArenaSize == next->first);
|
||||
}
|
||||
#endif
|
||||
}
|
||||
|
||||
};
|
||||
|
||||
/* Every arena has a header. */
|
||||
struct ArenaHeader {
|
||||
friend struct FreeLists;
|
||||
|
||||
JSCompartment *compartment;
|
||||
|
||||
/*
|
||||
* ArenaHeader::next has two purposes: when unallocated, it points to the
|
||||
* next available Arena's header. When allocated, it points to the next
|
||||
* arena of the same size class and compartment.
|
||||
*/
|
||||
ArenaHeader *next;
|
||||
|
||||
private:
|
||||
/*
|
||||
* The first span of free things in the arena. We encode it as the start
|
||||
* and end offsets within the arena, not as FreeSpan structure, to
|
||||
* minimize the header size.
|
||||
*/
|
||||
size_t firstFreeSpanOffsets;
|
||||
|
||||
/*
|
||||
* One of AllocKind constants or FINALIZE_LIMIT when the arena does not
|
||||
* contain any GC things and is on the list of empty arenas in the GC
|
||||
* chunk. The latter allows to quickly check if the arena is allocated
|
||||
* during the conservative GC scanning without searching the arena in the
|
||||
* list.
|
||||
*/
|
||||
size_t allocKind : 8;
|
||||
|
||||
/*
|
||||
* When recursive marking uses too much stack the marking is delayed and
|
||||
* the corresponding arenas are put into a stack using the following field
|
||||
* as a linkage. To distinguish the bottom of the stack from the arenas
|
||||
* not present in the stack we use an extra flag to tag arenas on the
|
||||
* stack.
|
||||
*
|
||||
* Delayed marking is also used for arenas that we allocate into during an
|
||||
* incremental GC. In this case, we intend to mark all the objects in the
|
||||
* arena, and it's faster to do this marking in bulk.
|
||||
*
|
||||
* To minimize the ArenaHeader size we record the next delayed marking
|
||||
* linkage as arenaAddress() >> ArenaShift and pack it with the allocKind
|
||||
* field and hasDelayedMarking flag. We use 8 bits for the allocKind, not
|
||||
* ArenaShift - 1, so the compiler can use byte-level memory instructions
|
||||
* to access it.
|
||||
*/
|
||||
public:
|
||||
size_t hasDelayedMarking : 1;
|
||||
size_t allocatedDuringIncremental : 1;
|
||||
size_t markOverflow : 1;
|
||||
size_t nextDelayedMarking : JS_BITS_PER_WORD - 8 - 1 - 1 - 1;
|
||||
|
||||
static void staticAsserts() {
|
||||
/* We must be able to fit the allockind into uint8_t. */
|
||||
JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
|
||||
|
||||
/*
|
||||
* nextDelayedMarkingpacking assumes that ArenaShift has enough bits
|
||||
* to cover allocKind and hasDelayedMarking.
|
||||
*/
|
||||
JS_STATIC_ASSERT(ArenaShift >= 8 + 1 + 1 + 1);
|
||||
}
|
||||
|
||||
inline uintptr_t address() const;
|
||||
inline Chunk *chunk() const;
|
||||
|
||||
bool allocated() const {
|
||||
JS_ASSERT(allocKind <= size_t(FINALIZE_LIMIT));
|
||||
return allocKind < size_t(FINALIZE_LIMIT);
|
||||
}
|
||||
|
||||
void init(JSCompartment *comp, AllocKind kind) {
|
||||
JS_ASSERT(!allocated());
|
||||
JS_ASSERT(!markOverflow);
|
||||
JS_ASSERT(!allocatedDuringIncremental);
|
||||
JS_ASSERT(!hasDelayedMarking);
|
||||
compartment = comp;
|
||||
|
||||
JS_STATIC_ASSERT(FINALIZE_LIMIT <= 255);
|
||||
allocKind = size_t(kind);
|
||||
|
||||
/* See comments in FreeSpan::allocateFromNewArena. */
|
||||
firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
|
||||
}
|
||||
|
||||
void setAsNotAllocated() {
|
||||
allocKind = size_t(FINALIZE_LIMIT);
|
||||
markOverflow = 0;
|
||||
allocatedDuringIncremental = 0;
|
||||
hasDelayedMarking = 0;
|
||||
nextDelayedMarking = 0;
|
||||
}
|
||||
|
||||
uintptr_t arenaAddress() const {
|
||||
return address();
|
||||
}
|
||||
|
||||
Arena *getArena() {
|
||||
return reinterpret_cast<Arena *>(arenaAddress());
|
||||
}
|
||||
|
||||
AllocKind getAllocKind() const {
|
||||
JS_ASSERT(allocated());
|
||||
return AllocKind(allocKind);
|
||||
}
|
||||
|
||||
inline size_t getThingSize() const;
|
||||
|
||||
bool hasFreeThings() const {
|
||||
return firstFreeSpanOffsets != FreeSpan::FullArenaOffsets;
|
||||
}
|
||||
|
||||
inline bool isEmpty() const;
|
||||
|
||||
void setAsFullyUsed() {
|
||||
firstFreeSpanOffsets = FreeSpan::FullArenaOffsets;
|
||||
}
|
||||
|
||||
FreeSpan getFirstFreeSpan() const {
|
||||
#ifdef DEBUG
|
||||
checkSynchronizedWithFreeList();
|
||||
#endif
|
||||
return FreeSpan::decodeOffsets(arenaAddress(), firstFreeSpanOffsets);
|
||||
}
|
||||
|
||||
void setFirstFreeSpan(const FreeSpan *span) {
|
||||
JS_ASSERT(span->isWithinArena(arenaAddress()));
|
||||
firstFreeSpanOffsets = span->encodeAsOffsets();
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
void checkSynchronizedWithFreeList() const;
|
||||
#endif
|
||||
|
||||
inline ArenaHeader *getNextDelayedMarking() const;
|
||||
inline void setNextDelayedMarking(ArenaHeader *aheader);
|
||||
};
|
||||
|
||||
struct Arena {
|
||||
/*
|
||||
* Layout of an arena:
|
||||
* An arena is 4K in size and 4K-aligned. It starts with the ArenaHeader
|
||||
* descriptor followed by some pad bytes. The remainder of the arena is
|
||||
* filled with the array of T things. The pad bytes ensure that the thing
|
||||
* array ends exactly at the end of the arena.
|
||||
*
|
||||
* +-------------+-----+----+----+-----+----+
|
||||
* | ArenaHeader | pad | T0 | T1 | ... | Tn |
|
||||
* +-------------+-----+----+----+-----+----+
|
||||
*
|
||||
* <----------------------------------------> = ArenaSize bytes
|
||||
* <-------------------> = first thing offset
|
||||
*/
|
||||
ArenaHeader aheader;
|
||||
uint8_t data[ArenaSize - sizeof(ArenaHeader)];
|
||||
|
||||
private:
|
||||
static JS_FRIEND_DATA(const uint32_t) ThingSizes[];
|
||||
static JS_FRIEND_DATA(const uint32_t) FirstThingOffsets[];
|
||||
|
||||
public:
|
||||
static void staticAsserts();
|
||||
|
||||
static size_t thingSize(AllocKind kind) {
|
||||
return ThingSizes[kind];
|
||||
}
|
||||
|
||||
static size_t firstThingOffset(AllocKind kind) {
|
||||
return FirstThingOffsets[kind];
|
||||
}
|
||||
|
||||
static size_t thingsPerArena(size_t thingSize) {
|
||||
JS_ASSERT(thingSize % Cell::CellSize == 0);
|
||||
|
||||
/* We should be able to fit FreeSpan in any GC thing. */
|
||||
JS_ASSERT(thingSize >= sizeof(FreeSpan));
|
||||
|
||||
return (ArenaSize - sizeof(ArenaHeader)) / thingSize;
|
||||
}
|
||||
|
||||
static size_t thingsSpan(size_t thingSize) {
|
||||
return thingsPerArena(thingSize) * thingSize;
|
||||
}
|
||||
|
||||
static bool isAligned(uintptr_t thing, size_t thingSize) {
|
||||
/* Things ends at the arena end. */
|
||||
uintptr_t tailOffset = (ArenaSize - thing) & ArenaMask;
|
||||
return tailOffset % thingSize == 0;
|
||||
}
|
||||
|
||||
uintptr_t address() const {
|
||||
return aheader.address();
|
||||
}
|
||||
|
||||
uintptr_t thingsStart(AllocKind thingKind) {
|
||||
return address() | firstThingOffset(thingKind);
|
||||
}
|
||||
|
||||
uintptr_t thingsEnd() {
|
||||
return address() + ArenaSize;
|
||||
}
|
||||
|
||||
template <typename T>
|
||||
bool finalize(FreeOp *fop, AllocKind thingKind, size_t thingSize);
|
||||
};
|
||||
|
||||
/* The chunk header (located at the end of the chunk to preserve arena alignment). */
|
||||
struct ChunkInfo {
|
||||
Chunk *next;
|
||||
Chunk **prevp;
|
||||
|
||||
/* Free arenas are linked together with aheader.next. */
|
||||
ArenaHeader *freeArenasHead;
|
||||
|
||||
/*
|
||||
* Decommitted arenas are tracked by a bitmap in the chunk header. We use
|
||||
* this offset to start our search iteration close to a decommitted arena
|
||||
* that we can allocate.
|
||||
*/
|
||||
uint32_t lastDecommittedArenaOffset;
|
||||
|
||||
/* Number of free arenas, either committed or decommitted. */
|
||||
uint32_t numArenasFree;
|
||||
|
||||
/* Number of free, committed arenas. */
|
||||
uint32_t numArenasFreeCommitted;
|
||||
|
||||
/* Number of GC cycles this chunk has survived. */
|
||||
uint32_t age;
|
||||
};
|
||||
|
||||
/*
|
||||
* Calculating ArenasPerChunk:
|
||||
*
|
||||
* In order to figure out how many Arenas will fit in a chunk, we need to know
|
||||
* how much extra space is available after we allocate the header data. This
|
||||
* is a problem because the header size depends on the number of arenas in the
|
||||
* chunk. The two dependent fields are bitmap and decommittedArenas.
|
||||
*
|
||||
* For the mark bitmap, we know that each arena will use a fixed number of full
|
||||
* bytes: ArenaBitmapBytes. The full size of the header data is this number
|
||||
* multiplied by the eventual number of arenas we have in the header. We,
|
||||
* conceptually, distribute this header data among the individual arenas and do
|
||||
* not include it in the header. This way we do not have to worry about its
|
||||
* variable size: it gets attached to the variable number we are computing.
|
||||
*
|
||||
* For the decommitted arena bitmap, we only have 1 bit per arena, so this
|
||||
* technique will not work. Instead, we observe that we do not have enough
|
||||
* header info to fill 8 full arenas: it is currently 4 on 64bit, less on
|
||||
* 32bit. Thus, with current numbers, we need 64 bytes for decommittedArenas.
|
||||
* This will not become 63 bytes unless we double the data required in the
|
||||
* header. Therefore, we just compute the number of bytes required to track
|
||||
* every possible arena and do not worry about slop bits, since there are too
|
||||
* few to usefully allocate.
|
||||
*
|
||||
* To actually compute the number of arenas we can allocate in a chunk, we
|
||||
* divide the amount of available space less the header info (not including
|
||||
* the mark bitmap which is distributed into the arena size) by the size of
|
||||
* the arena (with the mark bitmap bytes it uses).
|
||||
*/
|
||||
const size_t BytesPerArenaWithHeader = ArenaSize + ArenaBitmapBytes;
|
||||
const size_t ChunkDecommitBitmapBytes = ChunkSize / ArenaSize / JS_BITS_PER_BYTE;
|
||||
const size_t ChunkBytesAvailable = ChunkSize - sizeof(ChunkInfo) - ChunkDecommitBitmapBytes;
|
||||
const size_t ArenasPerChunk = ChunkBytesAvailable / BytesPerArenaWithHeader;
|
||||
|
||||
/* A chunk bitmap contains enough mark bits for all the cells in a chunk. */
|
||||
struct ChunkBitmap {
|
||||
uintptr_t bitmap[ArenaBitmapWords * ArenasPerChunk];
|
||||
|
||||
JS_ALWAYS_INLINE void getMarkWordAndMask(const Cell *cell, uint32_t color,
|
||||
uintptr_t **wordp, uintptr_t *maskp);
|
||||
|
||||
JS_ALWAYS_INLINE bool isMarked(const Cell *cell, uint32_t color) {
|
||||
uintptr_t *word, mask;
|
||||
getMarkWordAndMask(cell, color, &word, &mask);
|
||||
return *word & mask;
|
||||
}
|
||||
|
||||
JS_ALWAYS_INLINE bool markIfUnmarked(const Cell *cell, uint32_t color) {
|
||||
uintptr_t *word, mask;
|
||||
getMarkWordAndMask(cell, BLACK, &word, &mask);
|
||||
if (*word & mask)
|
||||
return false;
|
||||
*word |= mask;
|
||||
if (color != BLACK) {
|
||||
/*
|
||||
* We use getMarkWordAndMask to recalculate both mask and word as
|
||||
* doing just mask << color may overflow the mask.
|
||||
*/
|
||||
getMarkWordAndMask(cell, color, &word, &mask);
|
||||
if (*word & mask)
|
||||
return false;
|
||||
*word |= mask;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
|
||||
JS_ALWAYS_INLINE void unmark(const Cell *cell, uint32_t color) {
|
||||
uintptr_t *word, mask;
|
||||
getMarkWordAndMask(cell, color, &word, &mask);
|
||||
*word &= ~mask;
|
||||
}
|
||||
|
||||
void clear() {
|
||||
PodArrayZero(bitmap);
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
bool noBitsSet(ArenaHeader *aheader) {
|
||||
/*
|
||||
* We assume that the part of the bitmap corresponding to the arena
|
||||
* has the exact number of words so we do not need to deal with a word
|
||||
* that covers bits from two arenas.
|
||||
*/
|
||||
JS_STATIC_ASSERT(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD);
|
||||
|
||||
uintptr_t *word, unused;
|
||||
getMarkWordAndMask(reinterpret_cast<Cell *>(aheader->address()), BLACK, &word, &unused);
|
||||
for (size_t i = 0; i != ArenaBitmapWords; i++) {
|
||||
if (word[i])
|
||||
return false;
|
||||
}
|
||||
return true;
|
||||
}
|
||||
#endif
|
||||
};
|
||||
|
||||
JS_STATIC_ASSERT(ArenaBitmapBytes * ArenasPerChunk == sizeof(ChunkBitmap));
|
||||
|
||||
typedef BitArray<ArenasPerChunk> PerArenaBitmap;
|
||||
|
||||
const size_t ChunkPadSize = ChunkSize
|
||||
- (sizeof(Arena) * ArenasPerChunk)
|
||||
- sizeof(ChunkBitmap)
|
||||
- sizeof(PerArenaBitmap)
|
||||
- sizeof(ChunkInfo);
|
||||
JS_STATIC_ASSERT(ChunkPadSize < BytesPerArenaWithHeader);
|
||||
|
||||
/*
|
||||
* Chunks contain arenas and associated data structures (mark bitmap, delayed
|
||||
* marking state).
|
||||
*/
|
||||
struct Chunk {
|
||||
Arena arenas[ArenasPerChunk];
|
||||
|
||||
/* Pad to full size to ensure cache alignment of ChunkInfo. */
|
||||
uint8_t padding[ChunkPadSize];
|
||||
|
||||
ChunkBitmap bitmap;
|
||||
PerArenaBitmap decommittedArenas;
|
||||
ChunkInfo info;
|
||||
|
||||
static Chunk *fromAddress(uintptr_t addr) {
|
||||
addr &= ~ChunkMask;
|
||||
return reinterpret_cast<Chunk *>(addr);
|
||||
}
|
||||
|
||||
static bool withinArenasRange(uintptr_t addr) {
|
||||
uintptr_t offset = addr & ChunkMask;
|
||||
return offset < ArenasPerChunk * ArenaSize;
|
||||
}
|
||||
|
||||
static size_t arenaIndex(uintptr_t addr) {
|
||||
JS_ASSERT(withinArenasRange(addr));
|
||||
return (addr & ChunkMask) >> ArenaShift;
|
||||
}
|
||||
|
||||
uintptr_t address() const {
|
||||
uintptr_t addr = reinterpret_cast<uintptr_t>(this);
|
||||
JS_ASSERT(!(addr & ChunkMask));
|
||||
return addr;
|
||||
}
|
||||
|
||||
bool unused() const {
|
||||
return info.numArenasFree == ArenasPerChunk;
|
||||
}
|
||||
|
||||
bool hasAvailableArenas() const {
|
||||
return info.numArenasFree != 0;
|
||||
}
|
||||
|
||||
inline void addToAvailableList(JSCompartment *compartment);
|
||||
inline void insertToAvailableList(Chunk **insertPoint);
|
||||
inline void removeFromAvailableList();
|
||||
|
||||
ArenaHeader *allocateArena(JSCompartment *comp, AllocKind kind);
|
||||
|
||||
void releaseArena(ArenaHeader *aheader);
|
||||
|
||||
static Chunk *allocate(JSRuntime *rt);
|
||||
|
||||
/* Must be called with the GC lock taken. */
|
||||
static inline void release(JSRuntime *rt, Chunk *chunk);
|
||||
static inline void releaseList(JSRuntime *rt, Chunk *chunkListHead);
|
||||
|
||||
/* Must be called with the GC lock taken. */
|
||||
inline void prepareToBeFreed(JSRuntime *rt);
|
||||
|
||||
/*
|
||||
* Assuming that the info.prevp points to the next field of the previous
|
||||
* chunk in a doubly-linked list, get that chunk.
|
||||
*/
|
||||
Chunk *getPrevious() {
|
||||
JS_ASSERT(info.prevp);
|
||||
return fromPointerToNext(info.prevp);
|
||||
}
|
||||
|
||||
/* Get the chunk from a pointer to its info.next field. */
|
||||
static Chunk *fromPointerToNext(Chunk **nextFieldPtr) {
|
||||
uintptr_t addr = reinterpret_cast<uintptr_t>(nextFieldPtr);
|
||||
JS_ASSERT((addr & ChunkMask) == offsetof(Chunk, info.next));
|
||||
return reinterpret_cast<Chunk *>(addr - offsetof(Chunk, info.next));
|
||||
}
|
||||
|
||||
private:
|
||||
inline void init();
|
||||
|
||||
/* Search for a decommitted arena to allocate. */
|
||||
unsigned findDecommittedArenaOffset();
|
||||
ArenaHeader* fetchNextDecommittedArena();
|
||||
|
||||
public:
|
||||
/* Unlink and return the freeArenasHead. */
|
||||
inline ArenaHeader* fetchNextFreeArena(JSRuntime *rt);
|
||||
|
||||
inline void addArenaToFreeList(JSRuntime *rt, ArenaHeader *aheader);
|
||||
};
|
||||
|
||||
JS_STATIC_ASSERT(sizeof(Chunk) == ChunkSize);
|
||||
|
||||
class ChunkPool {
|
||||
Chunk *emptyChunkListHead;
|
||||
size_t emptyCount;
|
||||
|
@ -852,138 +121,6 @@ class ChunkPool {
|
|||
JS_FRIEND_API(int64_t) countCleanDecommittedArenas(JSRuntime *rt);
|
||||
};
|
||||
|
||||
inline uintptr_t
|
||||
Cell::address() const
|
||||
{
|
||||
uintptr_t addr = uintptr_t(this);
|
||||
JS_ASSERT(addr % Cell::CellSize == 0);
|
||||
JS_ASSERT(Chunk::withinArenasRange(addr));
|
||||
return addr;
|
||||
}
|
||||
|
||||
inline ArenaHeader *
|
||||
Cell::arenaHeader() const
|
||||
{
|
||||
uintptr_t addr = address();
|
||||
addr &= ~ArenaMask;
|
||||
return reinterpret_cast<ArenaHeader *>(addr);
|
||||
}
|
||||
|
||||
Chunk *
|
||||
Cell::chunk() const
|
||||
{
|
||||
uintptr_t addr = uintptr_t(this);
|
||||
JS_ASSERT(addr % Cell::CellSize == 0);
|
||||
addr &= ~(ChunkSize - 1);
|
||||
return reinterpret_cast<Chunk *>(addr);
|
||||
}
|
||||
|
||||
AllocKind
|
||||
Cell::getAllocKind() const
|
||||
{
|
||||
return arenaHeader()->getAllocKind();
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
inline bool
|
||||
Cell::isAligned() const
|
||||
{
|
||||
return Arena::isAligned(address(), arenaHeader()->getThingSize());
|
||||
}
|
||||
#endif
|
||||
|
||||
inline uintptr_t
|
||||
ArenaHeader::address() const
|
||||
{
|
||||
uintptr_t addr = reinterpret_cast<uintptr_t>(this);
|
||||
JS_ASSERT(!(addr & ArenaMask));
|
||||
JS_ASSERT(Chunk::withinArenasRange(addr));
|
||||
return addr;
|
||||
}
|
||||
|
||||
inline Chunk *
|
||||
ArenaHeader::chunk() const
|
||||
{
|
||||
return Chunk::fromAddress(address());
|
||||
}
|
||||
|
||||
inline bool
|
||||
ArenaHeader::isEmpty() const
|
||||
{
|
||||
/* Arena is empty if its first span covers the whole arena. */
|
||||
JS_ASSERT(allocated());
|
||||
size_t firstThingOffset = Arena::firstThingOffset(getAllocKind());
|
||||
return firstFreeSpanOffsets == FreeSpan::encodeOffsets(firstThingOffset, ArenaMask);
|
||||
}
|
||||
|
||||
inline size_t
|
||||
ArenaHeader::getThingSize() const
|
||||
{
|
||||
JS_ASSERT(allocated());
|
||||
return Arena::thingSize(getAllocKind());
|
||||
}
|
||||
|
||||
inline ArenaHeader *
|
||||
ArenaHeader::getNextDelayedMarking() const
|
||||
{
|
||||
return &reinterpret_cast<Arena *>(nextDelayedMarking << ArenaShift)->aheader;
|
||||
}
|
||||
|
||||
inline void
|
||||
ArenaHeader::setNextDelayedMarking(ArenaHeader *aheader)
|
||||
{
|
||||
JS_ASSERT(!(uintptr_t(aheader) & ArenaMask));
|
||||
hasDelayedMarking = 1;
|
||||
nextDelayedMarking = aheader->arenaAddress() >> ArenaShift;
|
||||
}
|
||||
|
||||
JS_ALWAYS_INLINE void
|
||||
ChunkBitmap::getMarkWordAndMask(const Cell *cell, uint32_t color,
|
||||
uintptr_t **wordp, uintptr_t *maskp)
|
||||
{
|
||||
size_t bit = (cell->address() & ChunkMask) / Cell::CellSize + color;
|
||||
JS_ASSERT(bit < ArenaBitmapBits * ArenasPerChunk);
|
||||
*maskp = uintptr_t(1) << (bit % JS_BITS_PER_WORD);
|
||||
*wordp = &bitmap[bit / JS_BITS_PER_WORD];
|
||||
}
|
||||
|
||||
static void
|
||||
AssertValidColor(const void *thing, uint32_t color)
|
||||
{
|
||||
#ifdef DEBUG
|
||||
ArenaHeader *aheader = reinterpret_cast<const js::gc::Cell *>(thing)->arenaHeader();
|
||||
JS_ASSERT_IF(color, color < aheader->getThingSize() / Cell::CellSize);
|
||||
#endif
|
||||
}
|
||||
|
||||
inline bool
|
||||
Cell::isMarked(uint32_t color) const
|
||||
{
|
||||
AssertValidColor(this, color);
|
||||
return chunk()->bitmap.isMarked(this, color);
|
||||
}
|
||||
|
||||
bool
|
||||
Cell::markIfUnmarked(uint32_t color) const
|
||||
{
|
||||
AssertValidColor(this, color);
|
||||
return chunk()->bitmap.markIfUnmarked(this, color);
|
||||
}
|
||||
|
||||
void
|
||||
Cell::unmark(uint32_t color) const
|
||||
{
|
||||
JS_ASSERT(color != BLACK);
|
||||
AssertValidColor(this, color);
|
||||
chunk()->bitmap.unmark(this, color);
|
||||
}
|
||||
|
||||
JSCompartment *
|
||||
Cell::compartment() const
|
||||
{
|
||||
return arenaHeader()->compartment;
|
||||
}
|
||||
|
||||
static inline JSGCTraceKind
|
||||
MapAllocToTraceKind(AllocKind thingKind)
|
||||
{
|
||||
|
|
|
@ -43,12 +43,12 @@
|
|||
#define jsinfer_h___
|
||||
|
||||
#include "jsalloc.h"
|
||||
#include "jscell.h"
|
||||
#include "jsfriendapi.h"
|
||||
#include "jsprvtd.h"
|
||||
|
||||
#include "ds/LifoAlloc.h"
|
||||
#include "gc/Barrier.h"
|
||||
#include "gc/Heap.h"
|
||||
#include "js/HashTable.h"
|
||||
|
||||
namespace JS {
|
||||
|
|
|
@ -58,9 +58,9 @@
|
|||
#include "jspubtd.h"
|
||||
#include "jsprvtd.h"
|
||||
#include "jslock.h"
|
||||
#include "jscell.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
#include "gc/Heap.h"
|
||||
|
||||
#include "vm/ObjectImpl.h"
|
||||
#include "vm/String.h"
|
||||
|
|
|
@ -45,7 +45,6 @@
|
|||
#include "jsatom.h"
|
||||
#include "jsprvtd.h"
|
||||
#include "jslock.h"
|
||||
#include "jscell.h"
|
||||
#include "jsutil.h"
|
||||
|
||||
#include "js/HashTable.h"
|
||||
|
|
|
@ -41,9 +41,9 @@
|
|||
|
||||
#include "jspubtd.h"
|
||||
#include "jsobj.h"
|
||||
#include "jscell.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
#include "gc/Heap.h"
|
||||
|
||||
extern const char js_AnyName_str[];
|
||||
extern const char js_AttributeName_str[];
|
||||
|
|
|
@ -10,11 +10,11 @@
|
|||
|
||||
#include "mozilla/Assertions.h"
|
||||
|
||||
#include "jscell.h"
|
||||
#include "jscompartment.h"
|
||||
#include "jsgc.h"
|
||||
#include "jsinterp.h"
|
||||
|
||||
#include "gc/Heap.h"
|
||||
#include "gc/Marking.h"
|
||||
#include "js/TemplateLib.h"
|
||||
|
||||
|
|
|
@ -44,10 +44,10 @@
|
|||
#include "mozilla/Attributes.h"
|
||||
|
||||
#include "jsapi.h"
|
||||
#include "jscell.h"
|
||||
#include "jsfriendapi.h"
|
||||
|
||||
#include "gc/Barrier.h"
|
||||
#include "gc/Heap.h"
|
||||
|
||||
class JSString;
|
||||
class JSDependentString;
|
||||
|
|
Загрузка…
Ссылка в новой задаче