зеркало из https://github.com/mozilla/gecko-dev.git
Bug 650161 - Fix test failures with compacting GC enabled r=terrence
This commit is contained in:
Родитель
38d0f009a8
Коммит
fccac2a690
|
@ -169,6 +169,7 @@ class BaseShape;
|
|||
class DebugScopeObject;
|
||||
class GlobalObject;
|
||||
class LazyScript;
|
||||
class NestedScopeObject;
|
||||
class Nursery;
|
||||
class ObjectImpl;
|
||||
class PropertyName;
|
||||
|
@ -217,6 +218,7 @@ template <> struct MapTypeToTraceKind<JSObject> { static const JSGCTrace
|
|||
template <> struct MapTypeToTraceKind<JSScript> { static const JSGCTraceKind kind = JSTRACE_SCRIPT; };
|
||||
template <> struct MapTypeToTraceKind<JSString> { static const JSGCTraceKind kind = JSTRACE_STRING; };
|
||||
template <> struct MapTypeToTraceKind<LazyScript> { static const JSGCTraceKind kind = JSTRACE_LAZY_SCRIPT; };
|
||||
template <> struct MapTypeToTraceKind<NestedScopeObject>{ static const JSGCTraceKind kind = JSTRACE_OBJECT; };
|
||||
template <> struct MapTypeToTraceKind<ObjectImpl> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
|
||||
template <> struct MapTypeToTraceKind<PropertyName> { static const JSGCTraceKind kind = JSTRACE_STRING; };
|
||||
template <> struct MapTypeToTraceKind<SavedFrame> { static const JSGCTraceKind kind = JSTRACE_OBJECT; };
|
||||
|
|
|
@ -396,6 +396,11 @@ class GCRuntime
|
|||
void disableGenerationalGC();
|
||||
void enableGenerationalGC();
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
void disableCompactingGC();
|
||||
void enableCompactingGC();
|
||||
#endif
|
||||
|
||||
void setGrayRootsTracer(JSTraceDataOp traceOp, void *data);
|
||||
bool addBlackRootsTracer(JSTraceDataOp traceOp, void *data);
|
||||
void removeBlackRootsTracer(JSTraceDataOp traceOp, void *data);
|
||||
|
@ -713,6 +718,15 @@ class GCRuntime
|
|||
*/
|
||||
unsigned generationalDisabled;
|
||||
|
||||
#ifdef JSGC_COMPACTING
|
||||
/*
|
||||
* Some code cannot tolerate compacting GC so it can be disabled with this
|
||||
* counter. This can happen from code executing in a ThreadSafeContext so
|
||||
* we make it atomic.
|
||||
*/
|
||||
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> compactingDisabled;
|
||||
#endif
|
||||
|
||||
/*
|
||||
* This is true if we are in the middle of a brain transplant (e.g.,
|
||||
* JS_TransplantObject) or some other operation that can manipulate
|
||||
|
|
|
@ -629,6 +629,7 @@ DeclMarkerImpl(Object, DebugScopeObject)
|
|||
DeclMarkerImpl(Object, GlobalObject)
|
||||
DeclMarkerImpl(Object, JSObject)
|
||||
DeclMarkerImpl(Object, JSFunction)
|
||||
DeclMarkerImpl(Object, NestedScopeObject)
|
||||
DeclMarkerImpl(Object, ObjectImpl)
|
||||
DeclMarkerImpl(Object, SavedFrame)
|
||||
DeclMarkerImpl(Object, ScopeObject)
|
||||
|
|
|
@ -23,6 +23,7 @@ class DebugScopeObject;
|
|||
class GCMarker;
|
||||
class GlobalObject;
|
||||
class LazyScript;
|
||||
class NestedScopeObject;
|
||||
class SavedFrame;
|
||||
class ScopeObject;
|
||||
class Shape;
|
||||
|
@ -112,6 +113,7 @@ DeclMarker(Object, DebugScopeObject)
|
|||
DeclMarker(Object, GlobalObject)
|
||||
DeclMarker(Object, JSObject)
|
||||
DeclMarker(Object, JSFunction)
|
||||
DeclMarker(Object, NestedScopeObject)
|
||||
DeclMarker(Object, SavedFrame)
|
||||
DeclMarker(Object, ScopeObject)
|
||||
DeclMarker(Script, JSScript)
|
||||
|
|
|
@ -637,10 +637,7 @@ GCMarker::markBufferedGrayRoots(JS::Zone *zone)
|
|||
#ifdef DEBUG
|
||||
setTracingDetails(elem->debugPrinter, elem->debugPrintArg, elem->debugPrintIndex);
|
||||
#endif
|
||||
void *tmp = elem->thing;
|
||||
setTracingLocation((void *)&elem->thing);
|
||||
MarkKind(this, &tmp, elem->kind);
|
||||
JS_ASSERT(tmp == elem->thing);
|
||||
MarkKind(this, &elem->thing, elem->kind);
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -20,6 +20,7 @@
|
|||
#include "gc/Marking.h"
|
||||
#include "jit/JitCompartment.h"
|
||||
#include "js/RootingAPI.h"
|
||||
#include "vm/Debugger.h"
|
||||
#include "vm/StopIterationObject.h"
|
||||
#include "vm/WrapperObject.h"
|
||||
|
||||
|
@ -627,6 +628,17 @@ JSCompartment::sweep(FreeOp *fop, bool releaseTypes)
|
|||
ni->unlink();
|
||||
ni = next;
|
||||
}
|
||||
|
||||
/* For each debuggee being GC'd, detach it from all its debuggers. */
|
||||
for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
|
||||
GlobalObject *global = e.front();
|
||||
if (IsObjectAboutToBeFinalized(&global)) {
|
||||
// See infallibility note above.
|
||||
Debugger::detachAllDebuggersFromGlobal(fop, global, &e);
|
||||
} else if (global != e.front()) {
|
||||
e.rekeyFront(global);
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -881,7 +893,7 @@ JSCompartment::updateJITForDebugMode(JSContext *maybecx, AutoDebugModeInvalidati
|
|||
}
|
||||
|
||||
bool
|
||||
JSCompartment::addDebuggee(JSContext *cx, js::GlobalObject *global)
|
||||
JSCompartment::addDebuggee(JSContext *cx, JS::Handle<js::GlobalObject *> global)
|
||||
{
|
||||
AutoDebugModeInvalidation invalidate(this);
|
||||
return addDebuggee(cx, global, invalidate);
|
||||
|
@ -889,11 +901,9 @@ JSCompartment::addDebuggee(JSContext *cx, js::GlobalObject *global)
|
|||
|
||||
bool
|
||||
JSCompartment::addDebuggee(JSContext *cx,
|
||||
GlobalObject *globalArg,
|
||||
JS::Handle<GlobalObject *> global,
|
||||
AutoDebugModeInvalidation &invalidate)
|
||||
{
|
||||
Rooted<GlobalObject*> global(cx, globalArg);
|
||||
|
||||
bool wasEnabled = debugMode();
|
||||
if (!debuggees.put(global)) {
|
||||
js_ReportOutOfMemory(cx);
|
||||
|
|
|
@ -423,8 +423,8 @@ struct JSCompartment
|
|||
|
||||
public:
|
||||
js::GlobalObjectSet &getDebuggees() { return debuggees; }
|
||||
bool addDebuggee(JSContext *cx, js::GlobalObject *global);
|
||||
bool addDebuggee(JSContext *cx, js::GlobalObject *global,
|
||||
bool addDebuggee(JSContext *cx, JS::Handle<js::GlobalObject *> global);
|
||||
bool addDebuggee(JSContext *cx, JS::Handle<js::GlobalObject *> global,
|
||||
js::AutoDebugModeInvalidation &invalidate);
|
||||
bool removeDebuggee(JSContext *cx, js::GlobalObject *global,
|
||||
js::GlobalObjectSet::Enum *debuggeesEnum = nullptr);
|
||||
|
|
|
@ -1158,6 +1158,9 @@ GCRuntime::GCRuntime(JSRuntime *rt) :
|
|||
sliceBudget(SliceBudget::Unlimited),
|
||||
incrementalAllowed(true),
|
||||
generationalDisabled(0),
|
||||
#ifdef JSGC_COMPACTING
|
||||
compactingDisabled(0),
|
||||
#endif
|
||||
manipulatingDeadZones(false),
|
||||
objectsMarkedInDeadZones(0),
|
||||
poked(false),
|
||||
|
@ -2004,7 +2007,7 @@ bool
|
|||
GCRuntime::shouldCompact()
|
||||
{
|
||||
#ifdef JSGC_COMPACTING
|
||||
return invocationKind == GC_SHRINK;
|
||||
return invocationKind == GC_SHRINK && !compactingDisabled;
|
||||
#else
|
||||
return false;
|
||||
#endif
|
||||
|
@ -2012,6 +2015,30 @@ GCRuntime::shouldCompact()
|
|||
|
||||
#ifdef JSGC_COMPACTING
|
||||
|
||||
void
|
||||
GCRuntime::disableCompactingGC()
|
||||
{
|
||||
++rt->gc.compactingDisabled;
|
||||
}
|
||||
|
||||
void
|
||||
GCRuntime::enableCompactingGC()
|
||||
{
|
||||
JS_ASSERT(compactingDisabled > 0);
|
||||
--compactingDisabled;
|
||||
}
|
||||
|
||||
AutoDisableCompactingGC::AutoDisableCompactingGC(JSRuntime *rt)
|
||||
: gc(rt->gc)
|
||||
{
|
||||
gc.disableCompactingGC();
|
||||
}
|
||||
|
||||
AutoDisableCompactingGC::~AutoDisableCompactingGC()
|
||||
{
|
||||
gc.enableCompactingGC();
|
||||
}
|
||||
|
||||
static void
|
||||
ForwardCell(Cell *dest, Cell *src)
|
||||
{
|
||||
|
@ -2048,9 +2075,11 @@ CanRelocateArena(ArenaHeader *arena)
|
|||
/*
|
||||
* We can't currently move global objects because their address is baked
|
||||
* into compiled code. We therefore skip moving the contents of any arena
|
||||
* containing a global.
|
||||
* containing a global if ion or baseline are enabled.
|
||||
*/
|
||||
return arena->getAllocKind() <= FINALIZE_OBJECT_LAST && !ArenaContainsGlobal(arena);
|
||||
JSRuntime *rt = arena->zone->runtimeFromMainThread();
|
||||
return arena->getAllocKind() <= FINALIZE_OBJECT_LAST &&
|
||||
((!rt->options().baseline() && !rt->options().ion()) || !ArenaContainsGlobal(arena));
|
||||
}
|
||||
|
||||
static bool
|
||||
|
@ -2103,6 +2132,14 @@ ArenaList::pickArenasToRelocate()
|
|||
return head;
|
||||
}
|
||||
|
||||
#ifdef DEBUG
|
||||
inline bool
|
||||
PtrIsInRange(void *ptr, void *start, size_t length)
|
||||
{
|
||||
return uintptr_t(ptr) - uintptr_t(start) < length;
|
||||
}
|
||||
#endif
|
||||
|
||||
static bool
|
||||
RelocateCell(Zone *zone, Cell *src, AllocKind thingKind, size_t thingSize)
|
||||
{
|
||||
|
@ -2116,22 +2153,38 @@ RelocateCell(Zone *zone, Cell *src, AllocKind thingKind, size_t thingSize)
|
|||
// Copy source cell contents to destination.
|
||||
memcpy(dst, src, thingSize);
|
||||
|
||||
// Mark source cell as forwarded and leave a pointer to the destination.
|
||||
ForwardCell(static_cast<Cell *>(dst), src);
|
||||
|
||||
// Fixup the pointer to inline object elements if necessary.
|
||||
if (thingKind <= FINALIZE_OBJECT_LAST) {
|
||||
JSObject *srcObj = static_cast<JSObject *>(src);
|
||||
JSObject *dstObj = static_cast<JSObject *>(dst);
|
||||
if (srcObj->hasFixedElements())
|
||||
dstObj->setFixedElements();
|
||||
JS_ASSERT(
|
||||
uintptr_t((HeapSlot*)dstObj->getElementsHeader()) - uintptr_t(srcObj) >= thingSize);
|
||||
|
||||
if (srcObj->is<ArrayBufferObject>()) {
|
||||
// We must fix up any inline data pointers while we know the source
|
||||
// object and before we mark any of the views.
|
||||
ArrayBufferObject::fixupDataPointerAfterMovingGC(
|
||||
srcObj->as<ArrayBufferObject>(), dstObj->as<ArrayBufferObject>());
|
||||
} else if (srcObj->is<TypedArrayObject>()) {
|
||||
TypedArrayObject &typedArray = srcObj->as<TypedArrayObject>();
|
||||
if (!typedArray.hasBuffer()) {
|
||||
JS_ASSERT(srcObj->getPrivate() ==
|
||||
srcObj->fixedData(TypedArrayObject::FIXED_DATA_START));
|
||||
dstObj->setPrivate(dstObj->fixedData(TypedArrayObject::FIXED_DATA_START));
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
JS_ASSERT_IF(dstObj->isNative(),
|
||||
!PtrIsInRange((HeapSlot*)dstObj->getDenseElements(), src, thingSize));
|
||||
}
|
||||
|
||||
// Copy the mark bits.
|
||||
static_cast<Cell *>(dst)->copyMarkBitsFrom(src);
|
||||
|
||||
// Mark source cell as forwarded and leave a pointer to the destination.
|
||||
ForwardCell(static_cast<Cell *>(dst), src);
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -2251,10 +2304,12 @@ void
|
|||
MovingTracer::Visit(JSTracer *jstrc, void **thingp, JSGCTraceKind kind)
|
||||
{
|
||||
Cell *thing = static_cast<Cell *>(*thingp);
|
||||
if (!thing->tenuredZone()->isGCCompacting()) {
|
||||
Zone *zone = thing->tenuredZoneFromAnyThread();
|
||||
if (!zone->isGCCompacting()) {
|
||||
JS_ASSERT(!IsForwarded(thing));
|
||||
return;
|
||||
}
|
||||
JS_ASSERT(CurrentThreadCanAccessZone(zone));
|
||||
|
||||
if (IsForwarded(thing)) {
|
||||
Cell *dst = Forwarded(thing);
|
||||
|
@ -2280,7 +2335,6 @@ MovingTracer::Sweep(JSTracer *jstrc)
|
|||
|
||||
for (CompartmentsInZoneIter c(zone); !c.done(); c.next()) {
|
||||
c->sweep(fop, false);
|
||||
ArrayBufferObject::sweep(c);
|
||||
}
|
||||
} else {
|
||||
/* Update cross compartment wrappers into moved zones. */
|
||||
|
@ -2291,6 +2345,9 @@ MovingTracer::Sweep(JSTracer *jstrc)
|
|||
|
||||
/* Type inference may put more blocks here to free. */
|
||||
rt->freeLifoAlloc.freeAll();
|
||||
|
||||
/* Clear the new object cache as this can contain cell pointers. */
|
||||
rt->newObjectCache.purge();
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -2359,12 +2416,15 @@ GCRuntime::updatePointersToRelocatedCells()
|
|||
Debugger::markCrossCompartmentDebuggerObjectReferents(&trc);
|
||||
|
||||
for (GCCompartmentsIter c(rt); !c.done(); c.next()) {
|
||||
WeakMapBase::markAll(c, &trc);
|
||||
if (c->watchpointMap)
|
||||
c->watchpointMap->markAll(&trc);
|
||||
}
|
||||
|
||||
// Mark all gray roots, making sure we call the trace callback to get the
|
||||
// current set.
|
||||
marker.resetBufferedGrayRoots();
|
||||
markAllGrayReferences(gcstats::PHASE_COMPACT_UPDATE_GRAY);
|
||||
markAllWeakReferences(gcstats::PHASE_COMPACT_UPDATE_GRAY);
|
||||
|
||||
MovingTracer::Sweep(&trc);
|
||||
}
|
||||
|
|
|
@ -1427,6 +1427,20 @@ struct AutoDisableProxyCheck
|
|||
};
|
||||
#endif
|
||||
|
||||
struct AutoDisableCompactingGC
|
||||
{
|
||||
#ifdef JSGC_COMPACTING
|
||||
explicit AutoDisableCompactingGC(JSRuntime *rt);
|
||||
~AutoDisableCompactingGC();
|
||||
|
||||
private:
|
||||
gc::GCRuntime &gc;
|
||||
#else
|
||||
explicit AutoDisableCompactingGC(JSRuntime *rt) {}
|
||||
~AutoDisableCompactingGC() {}
|
||||
#endif
|
||||
};
|
||||
|
||||
void
|
||||
PurgeJITCaches(JS::Zone *zone);
|
||||
|
||||
|
|
|
@ -13,7 +13,7 @@ namespace js {
|
|||
|
||||
/*
|
||||
* Used to add entries to a js::HashMap or HashSet where the key depends on a GC
|
||||
* thing that may be moved by generational collection between the call to
|
||||
* thing that may be moved by generational or compacting GC between the call to
|
||||
* lookupForAdd() and relookupOrAdd().
|
||||
*/
|
||||
template <class T>
|
||||
|
|
|
@ -68,6 +68,13 @@ WeakMapBase::unmarkCompartment(JSCompartment *c)
|
|||
m->marked = false;
|
||||
}
|
||||
|
||||
void
|
||||
WeakMapBase::markAll(JSCompartment *c, JSTracer *tracer)
|
||||
{
|
||||
for (WeakMapBase *m = c->gcWeakMapList; m; m = m->next)
|
||||
m->markIteratively(tracer);
|
||||
}
|
||||
|
||||
bool
|
||||
WeakMapBase::markCompartmentIteratively(JSCompartment *c, JSTracer *tracer)
|
||||
{
|
||||
|
|
|
@ -49,6 +49,9 @@ class WeakMapBase {
|
|||
// Unmark all weak maps in a compartment.
|
||||
static void unmarkCompartment(JSCompartment *c);
|
||||
|
||||
// Mark all the weakmaps in a compartment.
|
||||
static void markAll(JSCompartment *c, JSTracer *tracer);
|
||||
|
||||
// Check all weak maps in a compartment that have been marked as live in this garbage
|
||||
// collection, and mark the values of all entries that have become strong references
|
||||
// to them. Return true if we marked any new values, indicating that we need to make
|
||||
|
|
|
@ -157,6 +157,7 @@ class HeapReverser : public JSTracer, public JS::CustomAutoRooter
|
|||
: JSTracer(cx->runtime(), traverseEdgeWithThis),
|
||||
JS::CustomAutoRooter(cx),
|
||||
noggc(JS_GetRuntime(cx)),
|
||||
nocgc(JS_GetRuntime(cx)),
|
||||
runtime(JS_GetRuntime(cx)),
|
||||
parent(nullptr)
|
||||
{
|
||||
|
@ -169,6 +170,7 @@ class HeapReverser : public JSTracer, public JS::CustomAutoRooter
|
|||
|
||||
private:
|
||||
JS::AutoDisableGenerationalGC noggc;
|
||||
js::AutoDisableCompactingGC nocgc;
|
||||
|
||||
/* A runtime pointer for use by the destructor. */
|
||||
JSRuntime *runtime;
|
||||
|
|
|
@ -932,6 +932,15 @@ ArrayBufferObject::sweep(JSCompartment *compartment)
|
|||
gcLiveArrayBuffers.clear();
|
||||
}
|
||||
|
||||
/* static */ void
|
||||
ArrayBufferObject::fixupDataPointerAfterMovingGC(const ArrayBufferObject &src, ArrayBufferObject &dst)
|
||||
{
|
||||
// Fix up possible inline data pointer.
|
||||
const size_t reservedSlots = JSCLASS_RESERVED_SLOTS(&ArrayBufferObject::class_);
|
||||
if (src.dataPointer() == src.fixedData(reservedSlots))
|
||||
dst.setSlot(DATA_SLOT, PrivateValue(dst.fixedData(reservedSlots)));
|
||||
}
|
||||
|
||||
void
|
||||
ArrayBufferObject::resetArrayBufferList(JSCompartment *comp)
|
||||
{
|
||||
|
@ -992,7 +1001,7 @@ ArrayBufferViewObject::trace(JSTracer *trc, JSObject *obj)
|
|||
// Update obj's data pointer if the array buffer moved. Note that during
|
||||
// initialization, bufSlot may still contain |undefined|.
|
||||
if (bufSlot.isObject()) {
|
||||
ArrayBufferObject &buf = AsArrayBuffer(&bufSlot.toObject());
|
||||
ArrayBufferObject &buf = AsArrayBuffer(MaybeForwarded(&bufSlot.toObject()));
|
||||
int32_t offset = obj->getReservedSlot(BYTEOFFSET_SLOT).toInt32();
|
||||
MOZ_ASSERT(buf.dataPointer() != nullptr);
|
||||
obj->initPrivate(buf.dataPointer() + offset);
|
||||
|
|
|
@ -95,6 +95,8 @@ class ArrayBufferObject : public JSObject
|
|||
|
||||
static void sweep(JSCompartment *rt);
|
||||
|
||||
static void fixupDataPointerAfterMovingGC(const ArrayBufferObject &src, ArrayBufferObject &dst);
|
||||
|
||||
static void resetArrayBufferList(JSCompartment *rt);
|
||||
static bool saveArrayBufferList(JSCompartment *c, ArrayBufferVector &vector);
|
||||
static void restoreArrayBufferLists(ArrayBufferVector &vector);
|
||||
|
|
|
@ -1752,20 +1752,6 @@ Debugger::sweepAll(FreeOp *fop)
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
for (gc::GCCompartmentGroupIter comp(rt); !comp.done(); comp.next()) {
|
||||
/* For each debuggee being GC'd, detach it from all its debuggers. */
|
||||
GlobalObjectSet &debuggees = comp->getDebuggees();
|
||||
for (GlobalObjectSet::Enum e(debuggees); !e.empty(); e.popFront()) {
|
||||
GlobalObject *global = e.front();
|
||||
if (IsObjectAboutToBeFinalized(&global)) {
|
||||
// See infallibility note above.
|
||||
detachAllDebuggersFromGlobal(fop, global, &e);
|
||||
} else if (global != e.front()) {
|
||||
e.rekeyFront(global);
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
void
|
||||
|
|
|
@ -10,6 +10,7 @@
|
|||
#include "jsapi.h"
|
||||
#include "jscompartment.h"
|
||||
#include "jsfriendapi.h"
|
||||
#include "jshashutil.h"
|
||||
#include "jsnum.h"
|
||||
|
||||
#include "gc/Marking.h"
|
||||
|
@ -562,7 +563,7 @@ SavedStacks::insertFrames(JSContext *cx, FrameIter &iter, MutableHandleSavedFram
|
|||
SavedFrame *
|
||||
SavedStacks::getOrCreateSavedFrame(JSContext *cx, SavedFrame::HandleLookup lookup)
|
||||
{
|
||||
SavedFrame::Set::AddPtr p = frames.lookupForAdd(lookup);
|
||||
DependentAddPtr<SavedFrame::Set> p(cx, frames, lookup);
|
||||
if (p)
|
||||
return *p;
|
||||
|
||||
|
@ -570,7 +571,7 @@ SavedStacks::getOrCreateSavedFrame(JSContext *cx, SavedFrame::HandleLookup looku
|
|||
if (!frame)
|
||||
return nullptr;
|
||||
|
||||
if (!frames.relookupOrAdd(p, lookup, frame))
|
||||
if (!p.add(cx, frames, lookup, frame))
|
||||
return nullptr;
|
||||
|
||||
return frame;
|
||||
|
|
|
@ -1070,6 +1070,15 @@ ScopeIterKey::match(ScopeIterKey si1, ScopeIterKey si2)
|
|||
si1.type_ == si2.type_));
|
||||
}
|
||||
|
||||
void
|
||||
ScopeIterVal::sweep()
|
||||
{
|
||||
/* We need to update possibly moved pointers on sweep. */
|
||||
MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(cur_.unsafeGet()));
|
||||
if (staticScope_)
|
||||
MOZ_ALWAYS_FALSE(IsObjectAboutToBeFinalized(staticScope_.unsafeGet()));
|
||||
}
|
||||
|
||||
// Live ScopeIter values may be added to DebugScopes::liveScopes, as
|
||||
// ScopeIterVal instances. They need to have write barriers when they are added
|
||||
// to the hash table, but no barriers when rehashing inside GC. It's a nasty
|
||||
|
@ -1792,7 +1801,7 @@ DebugScopes::sweep(JSRuntime *rt)
|
|||
key.updateCur(js::gc::Forwarded(key.cur()));
|
||||
needsUpdate = true;
|
||||
}
|
||||
if (IsForwarded(key.staticScope())) {
|
||||
if (key.staticScope() && IsForwarded(key.staticScope())) {
|
||||
key.updateStaticScope(Forwarded(key.staticScope()));
|
||||
needsUpdate = true;
|
||||
}
|
||||
|
@ -1804,6 +1813,8 @@ DebugScopes::sweep(JSRuntime *rt)
|
|||
for (LiveScopeMap::Enum e(liveScopes); !e.empty(); e.popFront()) {
|
||||
ScopeObject *scope = e.front().key();
|
||||
|
||||
e.front().value().sweep();
|
||||
|
||||
/*
|
||||
* Scopes can be finalized when a debugger-synthesized ScopeObject is
|
||||
* no longer reachable via its DebugScopeObject.
|
||||
|
|
|
@ -733,6 +733,8 @@ class ScopeIterVal
|
|||
ScopeIter::Type type_;
|
||||
bool hasScopeObject_;
|
||||
|
||||
void sweep();
|
||||
|
||||
static void staticAsserts();
|
||||
|
||||
public:
|
||||
|
|
|
@ -445,7 +445,7 @@ js::ObjectImpl::toDictionaryMode(ThreadSafeContext *cx)
|
|||
|
||||
#ifdef JSGC_COMPACTING
|
||||
// TODO: This crashes if we run a compacting GC here.
|
||||
js::gc::AutoSuppressGC nogc(zone()->runtimeFromAnyThread());
|
||||
js::AutoDisableCompactingGC nogc(zone()->runtimeFromAnyThread());
|
||||
#endif
|
||||
|
||||
/* We allocate the shapes from cx->compartment(), so make sure it's right. */
|
||||
|
|
|
@ -80,6 +80,9 @@ class TypedArrayObject : public ArrayBufferViewObject
|
|||
ensureHasBuffer(JSContext *cx, Handle<TypedArrayObject *> tarray);
|
||||
|
||||
ArrayBufferObject *sharedBuffer() const;
|
||||
bool hasBuffer() const {
|
||||
return bufferValue(const_cast<TypedArrayObject*>(this)).isObject();
|
||||
}
|
||||
ArrayBufferObject *buffer() const {
|
||||
JSObject *obj = bufferValue(const_cast<TypedArrayObject*>(this)).toObjectOrNull();
|
||||
if (!obj)
|
||||
|
|
Загрузка…
Ссылка в новой задаче