Bug 889682 - Use LifoAlloc to back the StoreBuffer's allocation; r=billm

--HG--
extra : rebase_source : 467b43c3dae41691e6ed9c7bef996b71120b6369
This commit is contained in:
Terrence Cole 2013-07-05 16:54:39 -07:00
Родитель f34f4b2799
Коммит dd62f810d2
10 изменённых файлов: 240 добавлений и 315 удалений

Просмотреть файл

@ -240,7 +240,7 @@ MinorGC(JSContext *cx, unsigned argc, jsval *vp)
CallArgs args = CallArgsFromVp(argc, vp);
if (args.get(0) == BooleanValue(true))
cx->runtime()->gcStoreBuffer.setOverflowed();
cx->runtime()->gcStoreBuffer.setAboutToOverflow();
MinorGC(cx->runtime(), gcreason::API);
#endif
@ -993,9 +993,9 @@ static JSFunctionSpecWithHelp TestingFunctions[] = {
" GC via schedulegc."),
JS_FN_HELP("minorgc", ::MinorGC, 0, 0,
"minorgc([overflow])",
" Run a minor collector on the Nursery. When overflow is true, marks the\n"
" store buffer as overflowed before collecting."),
"minorgc([aboutToOverflow])",
" Run a minor collector on the Nursery. When aboutToOverflow is true, marks\n"
" the store buffer as about-to-overflow before collecting."),
JS_FN_HELP("gcparam", GCParameter, 2, 0,
"gcparam(name [, value])",

Просмотреть файл

@ -90,6 +90,9 @@ class BumpChunk
size_t used() const { return bump - bumpBase(); }
void *start() const { return bumpBase(); }
void *end() const { return limit; }
size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf) {
return mallocSizeOf(this);
}
@ -347,6 +350,19 @@ class LifoAlloc
return accum;
}
// Return true if the LifoAlloc does not currently contain any allocations.
bool isEmpty() const {
return !latest || !latest->used();
}
// Return the number of bytes remaining to allocate in the current chunk.
// e.g. How many bytes we can allocate before needing a new block.
size_t availableInCurrentChunk() const {
if (!latest)
return 0;
return latest->unused();
}
// Get the total size of the arena chunks (including unused space).
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf) const {
size_t n = 0;
@ -372,6 +388,76 @@ class LifoAlloc
}
JS_DECLARE_NEW_METHODS(new_, alloc, JS_ALWAYS_INLINE)
// A mutable enumeration of the allocated data.
class Enum
{
friend class LifoAlloc;
friend class detail::BumpChunk;
LifoAlloc *alloc_; // The LifoAlloc being traversed.
BumpChunk *chunk_; // The current chunk.
char *position_; // The current position (must be within chunk_).
// If there is not enough room in the remaining block for |size|,
// advance to the next block and update the position.
void ensureSpaceAndAlignment(size_t size) {
JS_ASSERT(!empty());
char *aligned = detail::AlignPtr(position_);
if (aligned + size > chunk_->end()) {
chunk_ = chunk_->next();
position_ = static_cast<char *>(chunk_->start());
} else {
position_ = aligned;
}
JS_ASSERT(uintptr_t(position_) + size <= uintptr_t(chunk_->end()));
}
public:
Enum(LifoAlloc &alloc)
: alloc_(&alloc),
chunk_(alloc.first),
position_(static_cast<char *>(alloc.first ? alloc.first->start() : NULL))
{}
// Return true if there are no more bytes to enumerate.
bool empty() {
return !chunk_ || (chunk_ == alloc_->latest && position_ >= chunk_->mark());
}
// Move the read position forward by the size of one T.
template <typename T>
void popFront() {
popFront(sizeof(T));
}
// Move the read position forward by |size| bytes.
void popFront(size_t size) {
ensureSpaceAndAlignment(size);
position_ = detail::AlignPtr(position_ + size);
}
// Update the bytes at the current position with a new value.
template <typename T>
void updateFront(const T &t) {
ensureSpaceAndAlignment(sizeof(T));
memmove(position_, &t, sizeof(T));
}
// Return a pointer to the item at the current position. This
// returns a pointer to the inline storage, not a copy.
template <typename T>
T *get(size_t size = sizeof(T)) {
ensureSpaceAndAlignment(size);
return reinterpret_cast<T *>(position_);
}
// Return a Mark at the current position of the Enum.
Mark mark() {
alloc_->markCount++;
return Mark(chunk_, position_);
}
};
};
class LifoAllocScope

Просмотреть файл

@ -536,87 +536,6 @@ js::Nursery::MinorGCCallback(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
*thingp = trc->nursery->moveToTenured(trc, static_cast<JSObject *>(*thingp));
}
void
js::Nursery::markFallback(Cell *cell)
{
JS_ASSERT(uintptr_t(cell) >= start());
size_t offset = uintptr_t(cell) - start();
JS_ASSERT(offset < heapEnd() - start());
JS_ASSERT(offset % ThingAlignment == 0);
fallbackBitmap.set(offset / ThingAlignment);
}
void
js::Nursery::moveFallbackToTenured(gc::MinorCollectionTracer *trc)
{
for (size_t i = 0; i < FallbackBitmapBits; ++i) {
if (fallbackBitmap.get(i)) {
JSObject *src = reinterpret_cast<JSObject *>(start() + i * ThingAlignment);
moveToTenured(trc, src);
}
}
fallbackBitmap.clear(false);
}
/* static */ void
js::Nursery::MinorFallbackMarkingCallback(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
{
MinorCollectionTracer *trc = static_cast<MinorCollectionTracer *>(jstrc);
if (ShouldMoveToTenured(trc, thingp))
trc->nursery->markFallback(static_cast<JSObject *>(*thingp));
}
/* static */ void
js::Nursery::MinorFallbackFixupCallback(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
{
MinorCollectionTracer *trc = static_cast<MinorCollectionTracer *>(jstrc);
if (trc->nursery->isInside(*thingp))
trc->nursery->getForwardedPointer(thingp);
}
static void
TraceHeapWithCallback(JSTracer *trc, JSTraceCallback callback)
{
JSTraceCallback prior = trc->callback;
AutoCopyFreeListToArenas copy(trc->runtime);
trc->callback = callback;
for (ZonesIter zone(trc->runtime); !zone.done(); zone.next()) {
for (size_t i = 0; i < FINALIZE_LIMIT; ++i) {
AllocKind kind = AllocKind(i);
for (CellIterUnderGC cells(zone, kind); !cells.done(); cells.next())
JS_TraceChildren(trc, cells.getCell(), MapAllocToTraceKind(kind));
}
}
trc->callback = prior;
}
void
js::Nursery::markStoreBuffer(MinorCollectionTracer *trc)
{
JSRuntime *rt = trc->runtime;
if (!rt->gcStoreBuffer.hasOverflowed()) {
rt->gcStoreBuffer.mark(trc);
return;
}
/*
* If the store buffer has overflowed, we need to walk the full heap to
* discover cross-generation edges. Since we cannot easily walk the heap
* while simultaneously allocating, we use a three pass algorithm:
* 1) Walk the major heap and mark live things in the nursery in a
* pre-allocated bitmap.
* 2) Use the bitmap to move all live nursery things to the tenured
* heap.
* 3) Walk the heap a second time to find and update all of the moved
* references in the tenured heap.
*/
TraceHeapWithCallback(trc, MinorFallbackMarkingCallback);
moveFallbackToTenured(trc);
TraceHeapWithCallback(trc, MinorFallbackFixupCallback);
}
void
js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason)
{
@ -643,7 +562,7 @@ js::Nursery::collect(JSRuntime *rt, JS::gcreason::Reason reason)
comp->markAllCrossCompartmentWrappers(&trc);
comp->markAllInitialShapeTableEntries(&trc);
}
markStoreBuffer(&trc);
rt->gcStoreBuffer.mark(&trc);
rt->newObjectCache.clearNurseryObjects(rt);
/*

Просмотреть файл

@ -221,12 +221,6 @@ class Nursery
void setElementsForwardingPointer(ObjectElements *oldHeader, ObjectElements *newHeader,
uint32_t nelems);
/* Handle fallback marking. See the comment in MarkStoreBuffer. */
void markFallback(gc::Cell *cell);
void moveFallbackToTenured(gc::MinorCollectionTracer *trc);
void markStoreBuffer(gc::MinorCollectionTracer *trc);
/*
* Frees all non-live nursery-allocated things at the end of a minor
* collection. This operation takes time proportional to the number of
@ -239,8 +233,6 @@ class Nursery
void shrinkAllocableSpace();
static void MinorGCCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
static void MinorFallbackMarkingCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
static void MinorFallbackFixupCallback(JSTracer *trc, void **thingp, JSGCTraceKind kind);
friend class gc::MinorCollectionTracer;
friend class ion::CodeGenerator;

Просмотреть файл

@ -76,51 +76,26 @@ StoreBuffer::WholeCellEdges::mark(JSTracer *trc)
/*** MonoTypeBuffer ***/
/* How full we allow a store buffer to become before we request a MinorGC. */
const static double HighwaterRatio = 7.0 / 8.0;
template <typename T>
bool
StoreBuffer::MonoTypeBuffer<T>::enable(uint8_t *region, size_t len)
{
JS_ASSERT(len % sizeof(T) == 0);
base = pos = reinterpret_cast<T *>(region);
top = reinterpret_cast<T *>(region + len);
highwater = reinterpret_cast<T *>(region + size_t(double(len) * HighwaterRatio));
JS_ASSERT(highwater > base);
JS_ASSERT(highwater < top);
return true;
}
template <typename T>
void
StoreBuffer::MonoTypeBuffer<T>::disable()
{
base = pos = top = highwater = NULL;
}
template <typename T>
void
StoreBuffer::MonoTypeBuffer<T>::clear()
{
pos = base;
}
template <typename T>
void
StoreBuffer::MonoTypeBuffer<T>::compactRemoveDuplicates()
{
EdgeSet &duplicates = owner->edgeSet;
JS_ASSERT(duplicates.empty());
T *insert = base;
for (T *v = base; v != pos; ++v) {
if (!duplicates.has(v->location())) {
*insert++ = *v;
LifoAlloc::Enum insert(storage_);
for (LifoAlloc::Enum e(storage_); !e.empty(); e.popFront<T>()) {
T *edge = e.get<T>();
if (!duplicates.has(edge->location())) {
insert.updateFront<T>(*edge);
insert.popFront<T>();
/* Failure to insert will leave the set with duplicates. Oh well. */
duplicates.put(v->location());
duplicates.put(edge->location());
}
}
pos = insert;
storage_.release(insert.mark());
duplicates.clear();
}
@ -136,15 +111,14 @@ void
StoreBuffer::MonoTypeBuffer<T>::mark(JSTracer *trc)
{
ReentrancyGuard g(*this);
compact();
T *cursor = base;
while (cursor != pos) {
T edge = *cursor++;
if (edge.isNullEdge())
for (LifoAlloc::Enum e(storage_); !e.empty(); e.popFront<T>()) {
T *edge = e.get<T>();
if (edge->isNullEdge())
continue;
edge->mark(trc);
edge.mark(trc);
}
}
@ -173,26 +147,37 @@ template <typename T>
void
StoreBuffer::RelocatableMonoTypeBuffer<T>::compactMoved()
{
for (T *v = this->base; v != this->pos; ++v) {
if (v->isTagged()) {
T match = v->untagged();
for (T *r = this->base; r != v; ++r) {
T check = r->untagged();
if (check == match)
*r = T(NULL);
}
*v = T(NULL);
LifoAlloc &storage = this->storage_;
EdgeSet &invalidated = this->owner->edgeSet;
JS_ASSERT(invalidated.empty());
/* Collect the set of entries which are currently invalid. */
for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) {
T *edge = e.get<T>();
if (edge->isTagged()) {
if (!invalidated.put(edge->location()))
MOZ_CRASH("RelocatableMonoTypeBuffer::compactMoved: Failed to put removal.");
} else {
invalidated.remove(edge->location());
}
}
T *insert = this->base;
for (T *cursor = this->base; cursor != this->pos; ++cursor) {
if (*cursor != T(NULL))
*insert++ = *cursor;
/* Remove all entries which are in the invalidated set. */
LifoAlloc::Enum insert(storage);
for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>()) {
T *edge = e.get<T>();
if (!edge->isTagged() && !invalidated.has(edge->location())) {
insert.updateFront<T>(*edge);
insert.popFront<T>();
}
}
this->pos = insert;
storage.release(insert.mark());
invalidated.clear();
#ifdef DEBUG
for (T *cursor = this->base; cursor != this->pos; ++cursor)
JS_ASSERT(!cursor->isTagged());
for (LifoAlloc::Enum e(storage); !e.empty(); e.popFront<T>())
JS_ASSERT(!e.get<T>()->isTagged());
#endif
}
@ -206,40 +191,17 @@ StoreBuffer::RelocatableMonoTypeBuffer<T>::compact()
/*** GenericBuffer ***/
bool
StoreBuffer::GenericBuffer::enable(uint8_t *region, size_t len)
{
base = pos = region;
top = region + len;
return true;
}
void
StoreBuffer::GenericBuffer::disable()
{
base = pos = top = NULL;
}
void
StoreBuffer::GenericBuffer::clear()
{
pos = base;
}
void
StoreBuffer::GenericBuffer::mark(JSTracer *trc)
{
ReentrancyGuard g(*this);
uint8_t *p = base;
while (p < pos) {
unsigned size = *((unsigned *)p);
p += sizeof(unsigned);
BufferableRef *edge = reinterpret_cast<BufferableRef *>(p);
for (LifoAlloc::Enum e(storage_); !e.empty();) {
unsigned size = *e.get<unsigned>();
e.popFront<unsigned>();
BufferableRef *edge = e.get<BufferableRef>(size);
edge->mark(trc);
p += size;
e.popFront(size);
}
}
@ -274,43 +236,13 @@ StoreBuffer::enable()
if (enabled)
return true;
buffer = js_malloc(TotalSize);
if (!buffer)
return false;
/* Initialize the individual edge buffers in sub-regions. */
uint8_t *asBytes = static_cast<uint8_t *>(buffer);
size_t offset = 0;
if (!bufferVal.enable(&asBytes[offset], ValueBufferSize))
return false;
offset += ValueBufferSize;
if (!bufferCell.enable(&asBytes[offset], CellBufferSize))
return false;
offset += CellBufferSize;
if (!bufferSlot.enable(&asBytes[offset], SlotBufferSize))
return false;
offset += SlotBufferSize;
if (!bufferWholeCell.enable(&asBytes[offset], WholeCellBufferSize))
return false;
offset += WholeCellBufferSize;
if (!bufferRelocVal.enable(&asBytes[offset], RelocValueBufferSize))
return false;
offset += RelocValueBufferSize;
if (!bufferRelocCell.enable(&asBytes[offset], RelocCellBufferSize))
return false;
offset += RelocCellBufferSize;
if (!bufferGeneric.enable(&asBytes[offset], GenericBufferSize))
return false;
offset += GenericBufferSize;
JS_ASSERT(offset == TotalSize);
bufferVal.enable();
bufferCell.enable();
bufferSlot.enable();
bufferWholeCell.enable();
bufferRelocVal.enable();
bufferRelocCell.enable();
bufferGeneric.enable();
enabled = true;
return true;
@ -332,9 +264,7 @@ StoreBuffer::disable()
bufferRelocCell.disable();
bufferGeneric.disable();
js_free(buffer);
enabled = false;
overflowed = false;
}
bool
@ -360,7 +290,6 @@ void
StoreBuffer::mark(JSTracer *trc)
{
JS_ASSERT(isEnabled());
JS_ASSERT(!overflowed);
bufferVal.mark(trc);
bufferCell.mark(trc);
@ -378,13 +307,6 @@ StoreBuffer::setAboutToOverflow()
runtime->triggerOperationCallback();
}
void
StoreBuffer::setOverflowed()
{
JS_ASSERT(enabled);
overflowed = true;
}
bool
StoreBuffer::inParallelSection() const
{

Просмотреть файл

@ -20,6 +20,7 @@
#include "jsgc.h"
#include "jsobj.h"
#include "ds/LifoAlloc.h"
#include "gc/Nursery.h"
namespace js {
@ -71,6 +72,12 @@ typedef HashSet<void *, PointerHasher<void *, 3>, SystemAllocPolicy> EdgeSet;
*/
class StoreBuffer
{
/* The size of a single block of store buffer storage space. */
const static size_t ChunkSize = 1 << 16; /* 64KiB */
/* The size at which a block is about to overflow. */
const static size_t MinAvailableSize = (size_t)(ChunkSize * 1.0 / 8.0);
/*
* This buffer holds only a single type of edge. Using this buffer is more
* efficient than the generic buffer when many writes will be to the same
@ -84,40 +91,23 @@ class StoreBuffer
StoreBuffer *owner;
T *base; /* Pointer to the start of the buffer. */
T *pos; /* Pointer to the current insertion position. */
T *top; /* Pointer to one element after the end. */
/*
* If the buffer's insertion position goes over the high-water-mark,
* we trigger a minor GC at the next operation callback.
*/
T *highwater;
/*
* This set stores duplicates found when compacting. We create the set
* here, rather than local to the algorithm to avoid malloc overhead in
* the common case.
*/
EdgeSet duplicates;
LifoAlloc storage_;
bool enabled_;
mozilla::DebugOnly<bool> entered;
explicit MonoTypeBuffer(StoreBuffer *owner)
: owner(owner), base(NULL), pos(NULL), top(NULL), entered(false)
{
duplicates.init();
}
: owner(owner), storage_(ChunkSize), enabled_(false), entered(false)
{}
MonoTypeBuffer &operator=(const MonoTypeBuffer& other) MOZ_DELETE;
bool enable(uint8_t *region, size_t len);
void disable();
void clear();
void enable() { enabled_ = true; }
void disable() { enabled_ = false; clear(); }
void clear() { storage_.used() ? storage_.releaseAll() : storage_.freeAll(); }
bool isEmpty() const { return pos == base; }
bool isFull() const { JS_ASSERT(pos <= top); return pos == top; }
bool isAboutToOverflow() const { return pos >= highwater; }
bool isAboutToOverflow() const {
return !storage_.isEmpty() && storage_.availableInCurrentChunk() < MinAvailableSize;
}
/* Compaction algorithms. */
void compactRemoveDuplicates();
@ -129,30 +119,21 @@ class StoreBuffer
virtual void compact();
/* Add one item to the buffer. */
void put(const T &v) {
void put(const T &t) {
mozilla::ReentrancyGuard g(*this);
JS_ASSERT(!owner->inParallelSection());
/* Check if we have been enabled. */
if (!pos)
if (!enabled_)
return;
/*
* Note: it is sometimes valid for a put to happen in the middle of a GC,
* e.g. a rekey of a Relocatable may end up here. In general, we do not
* care about these new entries or any overflows they cause.
*/
*pos++ = v;
T *tp = storage_.new_<T>(t);
if (!tp)
MOZ_CRASH();
if (isAboutToOverflow()) {
compact();
if (isAboutToOverflow())
owner->setAboutToOverflow();
if (isFull()) {
if (isFull()) {
owner->setOverflowed();
clear();
}
}
}
}
@ -190,22 +171,23 @@ class StoreBuffer
friend class mozilla::ReentrancyGuard;
StoreBuffer *owner;
uint8_t *base; /* Pointer to start of buffer. */
uint8_t *pos; /* Pointer to current buffer position. */
uint8_t *top; /* Pointer to one past the last entry. */
LifoAlloc storage_;
bool enabled_;
mozilla::DebugOnly<bool> entered;
explicit GenericBuffer(StoreBuffer *owner)
: owner(owner), base(NULL), pos(NULL), top(NULL), entered(false)
: owner(owner), storage_(ChunkSize), enabled_(false), entered(false)
{}
GenericBuffer &operator=(const GenericBuffer& other) MOZ_DELETE;
bool enable(uint8_t *region, size_t len);
void disable();
void clear();
void enable() { enabled_ = true; }
void disable() { enabled_ = false; clear(); }
void clear() { storage_.used() ? storage_.releaseAll() : storage_.freeAll(); }
bool isAboutToOverflow() const {
return !storage_.isEmpty() && storage_.availableInCurrentChunk() < MinAvailableSize;
}
/* Mark all generic edges. */
void mark(JSTracer *trc);
@ -218,22 +200,21 @@ class StoreBuffer
/* Ensure T is derived from BufferableRef. */
(void)static_cast<const BufferableRef*>(&t);
/* Check if we have been enabled. */
if (!pos)
if (!enabled_)
return;
/* Check for overflow. */
if (unsigned(top - pos) < unsigned(sizeof(unsigned) + sizeof(T))) {
owner->setOverflowed();
return;
}
unsigned size = sizeof(T);
unsigned *sizep = storage_.newPod<unsigned>();
if (!sizep)
MOZ_CRASH();
*sizep = size;
*((unsigned *)pos) = sizeof(T);
pos += sizeof(unsigned);
T *tp = storage_.new_<T>(t);
if (!tp)
MOZ_CRASH();
T *p = (T *)pos;
new (p) T(t);
pos += sizeof(T);
if (isAboutToOverflow())
owner->setAboutToOverflow();
}
};
@ -249,7 +230,7 @@ class StoreBuffer
bool operator==(const CellPtrEdge &other) const { return edge == other.edge; }
bool operator!=(const CellPtrEdge &other) const { return edge != other.edge; }
void *location() const { return (void *)edge; }
void *location() const { return (void *)untagged().edge; }
bool inRememberedSet(const Nursery &nursery) const {
return !nursery.isInside(edge) && nursery.isInside(*edge);
@ -279,7 +260,7 @@ class StoreBuffer
bool operator!=(const ValueEdge &other) const { return edge != other.edge; }
void *deref() const { return edge->isGCThing() ? edge->toGCThing() : NULL; }
void *location() const { return (void *)edge; }
void *location() const { return (void *)untagged().edge; }
bool inRememberedSet(const Nursery &nursery) const {
return !nursery.isInside(edge) && nursery.isInside(deref());
@ -376,35 +357,23 @@ class StoreBuffer
RelocatableMonoTypeBuffer<CellPtrEdge> bufferRelocCell;
GenericBuffer bufferGeneric;
/* This set is used as temporary storage by the buffers when compacting. */
EdgeSet edgeSet;
JSRuntime *runtime;
const Nursery &nursery_;
void *buffer;
bool aboutToOverflow;
bool overflowed;
bool enabled;
/* TODO: profile to find the ideal size for these. */
static const size_t ValueBufferSize = 1 * 1024 * sizeof(ValueEdge);
static const size_t CellBufferSize = 8 * 1024 * sizeof(CellPtrEdge);
static const size_t SlotBufferSize = 2 * 1024 * sizeof(SlotEdge);
static const size_t WholeCellBufferSize = 2 * 1024 * sizeof(WholeCellEdges);
static const size_t RelocValueBufferSize = 1 * 1024 * sizeof(ValueEdge);
static const size_t RelocCellBufferSize = 1 * 1024 * sizeof(CellPtrEdge);
static const size_t GenericBufferSize = 1 * 1024 * sizeof(int);
static const size_t TotalSize = ValueBufferSize + CellBufferSize +
SlotBufferSize + WholeCellBufferSize +
RelocValueBufferSize + RelocCellBufferSize +
GenericBufferSize;
public:
explicit StoreBuffer(JSRuntime *rt, const Nursery &nursery)
: bufferVal(this), bufferCell(this), bufferSlot(this), bufferWholeCell(this),
bufferRelocVal(this), bufferRelocCell(this), bufferGeneric(this),
runtime(rt), nursery_(nursery), buffer(NULL), aboutToOverflow(false), overflowed(false),
enabled(false)
{}
runtime(rt), nursery_(nursery), aboutToOverflow(false), enabled(false)
{
edgeSet.init();
}
bool enable();
void disable();
@ -414,7 +383,6 @@ class StoreBuffer
/* Get the overflowed status. */
bool isAboutToOverflow() const { return aboutToOverflow; }
bool hasOverflowed() const { return overflowed; }
/* Insert a single edge into the buffer/remembered set. */
void putValue(Value *valuep) {

Просмотреть файл

@ -741,9 +741,6 @@ js::gc::EndVerifyPostBarriers(JSRuntime *rt)
VerifyPostTracer *trc = (VerifyPostTracer *)rt->gcVerifyPostData;
if (rt->gcStoreBuffer.hasOverflowed())
goto oom;
/* Visit every entry in the store buffer and put the edges in a hash set. */
JS_TracerInit(trc, rt, PostVerifierCollectStoreBufferEdges);
if (!edges.init())

Просмотреть файл

@ -0,0 +1,13 @@
// |jit-test| error:TypeError
gc();
var recursiveFunctions = [{
text: "(function(){if(a){}g()})"
}];
(function testAllRecursiveFunctions() {
for (var i = 0; i < recursiveFunctions.length; ++i) {
var a = recursiveFunctions[i];
eval(a.text.replace(/@/g, ""))
}
})();
gcslice(2868);
Function("v={c:[{x:[[]],N:{x:[{}[d]]}}]}=minorgc(true)")()

Просмотреть файл

@ -0,0 +1,14 @@
// |jit-test| error:TypeError
(function(){})
gc();
var recursiveFunctions = [{
text: "(function(){if(a){}g()})"
}];
(function testAllRecursiveFunctions() {
for (var i = 0; i < recursiveFunctions.length; ++i) {
var a = recursiveFunctions[i];
eval(a.text.replace(/@/g, ""))
}
})();
gcslice(2868);
Function("v={c:[{x:[[]],N:{x:[{}[d]]}}]}=minorgc(true)")()

Просмотреть файл

@ -0,0 +1,14 @@
// |jit-test| error:TypeError
function f(){}
gc();
var recursiveFunctions = [{
text: "(function(){if(a){}g()})"
}];
(function testAllRecursiveFunctions() {
for (var i = 0; i < recursiveFunctions.length; ++i) {
var a = recursiveFunctions[i];
eval(a.text.replace(/@/g, ""))
}
})();
gcslice(2868);
Function("v={c:[{x:[[]],N:{x:[{}[d]]}}]}=minorgc(true)")()