Bug 1288780 - Don't destroy GCPtrs while there may be store buffer entries pointing into them r=terrence

This commit is contained in:
Jon Coppeard 2016-07-28 11:49:06 +01:00
Родитель a3b23bd33c
Коммит 8e2dedbc9f
21 изменённых файлов: 296 добавлений и 137 удалений

Просмотреть файл

@ -131,8 +131,7 @@ SetWeakMapEntryInternal(JSContext* cx, Handle<WeakMapObject*> mapObj,
{
ObjectValueMap* map = mapObj->getMap();
if (!map) {
AutoInitGCManagedObject<ObjectValueMap> newMap(
cx->make_unique<ObjectValueMap>(cx, mapObj.get()));
auto newMap = cx->make_unique<ObjectValueMap>(cx, mapObj.get());
if (!newMap)
return false;
if (!newMap->init()) {

Просмотреть файл

@ -65,10 +65,10 @@ CurrentThreadIsGCSweeping()
}
bool
CurrentThreadIsHandlingInitFailure()
CurrentThreadCanSkipPostBarrier(bool inNursery)
{
JSRuntime* rt = TlsPerThreadData.get()->runtimeIfOnOwnerThread();
return rt && rt->handlingInitFailure;
bool onMainThread = TlsPerThreadData.get()->runtimeIfOnOwnerThread() != nullptr;
return !onMainThread && !inNursery;
}
#endif // DEBUG

Просмотреть файл

@ -243,7 +243,7 @@ bool
CurrentThreadIsGCSweeping();
bool
CurrentThreadIsHandlingInitFailure();
CurrentThreadCanSkipPostBarrier(bool inNursery);
#endif
namespace gc {
@ -270,6 +270,8 @@ struct InternalBarrierMethods<T*>
static void postBarrier(T** vp, T* prev, T* next) { T::writeBarrierPost(vp, prev, next); }
static void readBarrier(T* v) { T::readBarrier(v); }
static bool isInsideNursery(T* v) { return IsInsideNursery(v); }
};
template <typename S> struct PreBarrierFunctor : public VoidDefaultAdaptor<S> {
@ -314,6 +316,10 @@ struct InternalBarrierMethods<Value>
static void readBarrier(const Value& v) {
DispatchTyped(ReadBarrierFunctor<Value>(), v);
}
static bool isInsideNursery(const Value& v) {
return v.isMarkable() && IsInsideNursery(v.toGCThing());
}
};
template <>
@ -324,6 +330,8 @@ struct InternalBarrierMethods<jsid>
static void preBarrier(jsid id) { DispatchTyped(PreBarrierFunctor<jsid>(), id); }
static void postBarrier(jsid* idp, jsid prev, jsid next) {}
static bool isInsideNursery(jsid id) { return false; }
};
// Barrier classes can use Mixins to add methods to a set of barrier
@ -443,8 +451,11 @@ class GCPtr : public WriteBarrieredBase<T>
#ifdef DEBUG
~GCPtr() {
// No prebarrier necessary as this only happens when we are sweeping or
// before the containing object becomes part of the GC graph.
MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsHandlingInitFailure());
// after we have just collected the nursery.
bool inNursery = InternalBarrierMethods<T>::isInsideNursery(this->value);
MOZ_ASSERT(CurrentThreadIsGCSweeping() ||
CurrentThreadCanSkipPostBarrier(inNursery));
Poison(this, JS_FREED_HEAP_PTR_PATTERN, sizeof(*this));
}
#endif

Просмотреть файл

@ -877,6 +877,11 @@ class GCRuntime
void freeAllLifoBlocksAfterSweeping(LifoAlloc* lifo);
void freeAllLifoBlocksAfterMinorGC(LifoAlloc* lifo);
// Queue a thunk to run after the next minor GC.
void callAfterMinorGC(void (*thunk)(void* data), void* data) {
nursery.queueSweepAction(thunk, data);
}
// Public here for ReleaseArenaLists and FinalizeTypedArenas.
void releaseArena(Arena* arena, const AutoLockGC& lock);
@ -1451,6 +1456,7 @@ inline bool GCRuntime::needZealousGC() { return false; }
#endif
} /* namespace gc */
} /* namespace js */
#endif

Просмотреть файл

@ -58,6 +58,22 @@ struct js::Nursery::FreeMallocedBuffersTask : public GCParallelTask
virtual void run() override;
};
struct js::Nursery::SweepAction
{
SweepAction(SweepThunk thunk, void* data, SweepAction* next)
: thunk(thunk), data(data), next(next)
{}
SweepThunk thunk;
void* data;
SweepAction* next;
#if JS_BITS_PER_WORD == 32
protected:
uint32_t padding;
#endif
};
js::Nursery::Nursery(JSRuntime* rt)
: runtime_(rt)
, position_(0)
@ -73,6 +89,7 @@ js::Nursery::Nursery(JSRuntime* rt)
, enableProfiling_(false)
, minorGcCount_(0)
, freeMallocedBuffersTask(nullptr)
, sweepActions_(nullptr)
#ifdef JS_GC_ZEAL
, lastCanary_(nullptr)
#endif
@ -723,6 +740,8 @@ js::Nursery::sweep()
}
cellsWithUid_.clear();
runSweepActions();
#ifdef JS_GC_ZEAL
/* Poison the nursery contents so touching a freed object will crash. */
JS_POISON((void*)start(), JS_SWEPT_NURSERY_PATTERN, nurserySize());
@ -793,3 +812,37 @@ js::Nursery::updateNumActiveChunks(int newCount)
}
#endif // !defined(JS_GC_ZEAL)
}
void
js::Nursery::queueSweepAction(SweepThunk thunk, void* data)
{
static_assert(sizeof(SweepAction) % CellSize == 0,
"SweepAction size must be a multiple of cell size");
MOZ_ASSERT(!runtime()->mainThread.suppressGC);
SweepAction* action = nullptr;
if (isEnabled() && !js::oom::ShouldFailWithOOM())
action = reinterpret_cast<SweepAction*>(allocate(sizeof(SweepAction)));
if (!action) {
runtime()->gc.evictNursery();
AutoSetThreadIsSweeping threadIsSweeping;
thunk(data);
return;
}
new (action) SweepAction(thunk, data, sweepActions_);
sweepActions_ = action;
}
void
js::Nursery::runSweepActions()
{
// The hazard analysis doesn't know whether the thunks can GC.
JS::AutoSuppressGCAnalysis nogc;
AutoSetThreadIsSweeping threadIsSweeping;
for (auto action = sweepActions_; action; action = action->next)
action->thunk(action->data);
sweepActions_ = nullptr;
}

Просмотреть файл

@ -208,6 +208,9 @@ class Nursery
return cellsWithUid_.put(cell);
}
using SweepThunk = void (*)(void *data);
void queueSweepAction(SweepThunk thunk, void* data);
size_t sizeOfHeapCommitted() const {
return numActiveChunks_ * gc::ChunkSize;
}
@ -335,6 +338,10 @@ class Nursery
using CellsWithUniqueIdSet = HashSet<gc::Cell*, PointerHasher<gc::Cell*, 3>, SystemAllocPolicy>;
CellsWithUniqueIdSet cellsWithUid_;
struct SweepAction;
SweepAction* sweepActions_;
SweepAction* reservedSweepAction_;
#ifdef JS_GC_ZEAL
struct Canary
{
@ -429,6 +436,8 @@ class Nursery
*/
void sweep();
void runSweepActions();
/* Change the allocable space provided by the nursery. */
void growAllocableSpace();
void shrinkAllocableSpace();

Просмотреть файл

@ -9,6 +9,34 @@
#include "js/RootingAPI.h"
#include "jsapi-tests/tests.h"
#include "vm/Runtime.h"
template <typename T>
static T* CreateGCThing(JSContext* cx)
{
MOZ_CRASH();
return nullptr;
}
template <>
JSObject* CreateGCThing(JSContext* cx)
{
JS::RootedObject obj(cx, JS_NewPlainObject(cx));
if (!obj)
return nullptr;
JS_DefineProperty(cx, obj, "x", 42, 0);
return obj;
}
template <>
JSFunction* CreateGCThing(JSContext* cx)
{
/*
* We don't actually use the function as a function, so here we cheat and
* cast a JSObject.
*/
return static_cast<JSFunction*>(CreateGCThing<JSObject>(cx));
}
BEGIN_TEST(testGCHeapPostBarriers)
{
@ -18,15 +46,15 @@ BEGIN_TEST(testGCHeapPostBarriers)
/* Sanity check - objects start in the nursery and then become tenured. */
JS_GC(cx);
JS::RootedObject obj(cx, NurseryObject());
JS::RootedObject obj(cx, CreateGCThing<JSObject>(cx));
CHECK(js::gc::IsInsideNursery(obj.get()));
JS_GC(cx);
CHECK(!js::gc::IsInsideNursery(obj.get()));
JS::RootedObject tenuredObject(cx, obj);
/* Currently JSObject and JSFunction objects are nursery allocated. */
CHECK(TestHeapPostBarriers(NurseryObject()));
CHECK(TestHeapPostBarriers(NurseryFunction()));
CHECK(TestHeapPostBarriersForType<JSObject>());
CHECK(TestHeapPostBarriersForType<JSFunction>());
return true;
}
@ -38,54 +66,95 @@ Passthrough(bool value)
return value;
}
template <typename T>
bool
TestHeapPostBarriers(T initialObj)
CanAccessObject(JSObject* obj)
{
CHECK(initialObj != nullptr);
CHECK(js::gc::IsInsideNursery(initialObj));
/* Construct Heap<> wrapper. */
auto heapDataStorage = mozilla::MakeUnique<char[]>(sizeof(JS::Heap<T>));
auto* heapData = new (heapDataStorage.get()) JS::Heap<T>();
CHECK(heapData);
CHECK(Passthrough(*heapData == nullptr));
*heapData = initialObj;
/* Store the pointer as an integer so that the hazard analysis will miss it. */
uintptr_t initialObjAsInt = uintptr_t(initialObj);
/* Perform minor GC and check heap wrapper is udated with new pointer. */
cx->minorGC(JS::gcreason::API);
CHECK(uintptr_t(heapData) != initialObjAsInt);
CHECK(!js::gc::IsInsideNursery(*heapData));
/* Check object is definitely still alive. */
JS::Rooted<T> obj(cx, *heapData);
JS::RootedObject rootedObj(cx, obj);
JS::RootedValue value(cx);
CHECK(JS_GetProperty(cx, obj, "x", &value));
CHECK(JS_GetProperty(cx, rootedObj, "x", &value));
CHECK(value.isInt32());
CHECK(value.toInt32() == 42);
return true;
}
template <typename T>
bool
TestHeapPostBarriersForType()
{
CHECK((TestHeapPostBarriersForWrapper<T, JS::Heap<T*>>()));
CHECK((TestHeapPostBarriersForWrapper<T, js::GCPtr<T*>>()));
CHECK((TestHeapPostBarriersForWrapper<T, js::HeapPtr<T*>>()));
return true;
}
template <typename T, typename W>
bool
TestHeapPostBarriersForWrapper()
{
CHECK((TestHeapPostBarrierUpdate<T, W>()));
CHECK((TestHeapPostBarrierInitFailure<T, W>()));
return true;
}
template <typename T, typename W>
bool
TestHeapPostBarrierUpdate()
{
// Normal case - allocate a heap object, write a nursery pointer into it and
// check that it gets updated on minor GC.
T* initialObj = CreateGCThing<T>(cx);
CHECK(initialObj != nullptr);
CHECK(js::gc::IsInsideNursery(initialObj));
uintptr_t initialObjAsInt = uintptr_t(initialObj);
W* ptr = nullptr;
{
auto heapPtr = cx->make_unique<W>();
CHECK(heapPtr);
W& wrapper = *heapPtr;
CHECK(Passthrough(wrapper.get() == nullptr));
wrapper = initialObj;
CHECK(Passthrough(wrapper == initialObj));
ptr = heapPtr.release();
}
cx->minorGC(JS::gcreason::API);
CHECK(uintptr_t(ptr->get()) != initialObjAsInt);
CHECK(!js::gc::IsInsideNursery(ptr->get()));
CHECK(CanAccessObject(ptr->get()));
return true;
}
JSObject* NurseryObject()
template <typename T, typename W>
bool
TestHeapPostBarrierInitFailure()
{
JS::RootedObject obj(cx, JS_NewPlainObject(cx));
if (!obj)
return nullptr;
JS_DefineProperty(cx, obj, "x", 42, 0);
return obj;
}
// Failure case - allocate a heap object, write a nursery pointer into it
// and fail to complete initialization.
JSFunction* NurseryFunction()
{
/*
* We don't actually use the function as a function, so here we cheat and
* cast a JSObject.
*/
return static_cast<JSFunction*>(NurseryObject());
T* initialObj = CreateGCThing<T>(cx);
CHECK(initialObj != nullptr);
CHECK(js::gc::IsInsideNursery(initialObj));
{
auto heapPtr = cx->make_unique<W>();
CHECK(heapPtr);
W& wrapper = *heapPtr;
CHECK(Passthrough(wrapper.get() == nullptr));
wrapper = initialObj;
CHECK(Passthrough(wrapper == initialObj));
}
cx->minorGC(JS::gcreason::API);
return true;
}
END_TEST(testGCHeapPostBarriers)

Просмотреть файл

@ -328,6 +328,7 @@ PodSet(T* aDst, T aSrc, size_t aNElem)
#define JS_MOVED_TENURED_PATTERN 0x49
#define JS_SWEPT_TENURED_PATTERN 0x4B
#define JS_ALLOCATED_TENURED_PATTERN 0x4D
#define JS_FREED_HEAP_PTR_PATTERN 0x6B
/*
* Ensure JS_SWEPT_CODE_PATTERN is a byte pattern that will crash immediately

Просмотреть файл

@ -32,7 +32,7 @@ WeakMapBase::WeakMapBase(JSObject* memOf, Zone* zone)
WeakMapBase::~WeakMapBase()
{
MOZ_ASSERT(CurrentThreadIsGCSweeping() || CurrentThreadIsHandlingInitFailure());
MOZ_ASSERT(CurrentThreadIsGCSweeping());
}
void

Просмотреть файл

@ -412,4 +412,12 @@ class ObjectWeakMap
} /* namespace js */
namespace JS {
template <>
struct DeletePolicy<js::ObjectValueMap> : public js::GCManagedDeletePolicy<js::ObjectValueMap>
{};
} /* namespace JS */
#endif /* jsweakmap_h */

Просмотреть файл

@ -647,7 +647,6 @@ Debugger::Debugger(JSContext* cx, NativeObject* dbg)
{
assertSameCompartment(cx, dbg);
cx->runtime()->debuggerList.insertBack(this);
JS_INIT_CLIST(&breakpoints);
JS_INIT_CLIST(&onNewGlobalObjectWatchersLink);
}
@ -670,19 +669,23 @@ Debugger::~Debugger()
bool
Debugger::init(JSContext* cx)
{
bool ok = debuggees.init() &&
debuggeeZones.init() &&
frames.init() &&
scripts.init() &&
sources.init() &&
objects.init() &&
observedGCs.init() &&
environments.init() &&
wasmInstanceScripts.init() &&
wasmInstanceSources.init();
if (!ok)
if (!debuggees.init() ||
!debuggeeZones.init() ||
!frames.init() ||
!scripts.init() ||
!sources.init() ||
!objects.init() ||
!observedGCs.init() ||
!environments.init() ||
!wasmInstanceScripts.init() ||
!wasmInstanceSources.init())
{
ReportOutOfMemory(cx);
return ok;
return false;
}
cx->runtime()->debuggerList.insertBack(this);
return true;
}
JS_STATIC_ASSERT(unsigned(JSSLOT_DEBUGFRAME_OWNER) == unsigned(JSSLOT_DEBUGSCRIPT_OWNER));
@ -3688,7 +3691,7 @@ Debugger::construct(JSContext* cx, unsigned argc, Value* vp)
Debugger* debugger;
{
/* Construct the underlying C++ object. */
AutoInitGCManagedObject<Debugger> dbg(cx->make_unique<Debugger>(cx, obj.get()));
auto dbg = cx->make_unique<Debugger>(cx, obj.get());
if (!dbg || !dbg->init(cx))
return false;

Просмотреть файл

@ -1537,5 +1537,12 @@ MOZ_MUST_USE bool ReportObjectRequired(JSContext* cx);
} /* namespace js */
namespace JS {
template <>
struct DeletePolicy<js::Debugger> : public js::GCManagedDeletePolicy<js::Debugger>
{};
} /* namespace JS */
#endif /* vm_Debugger_h */

Просмотреть файл

@ -201,9 +201,6 @@ JSRuntime::JSRuntime(JSRuntime* parentRuntime)
profilingScripts(false),
suppressProfilerSampling(false),
hadOutOfMemory(false),
#ifdef DEBUG
handlingInitFailure(false),
#endif
#if defined(DEBUG) || defined(JS_OOM_BREAKPOINT)
runningOOMTest(false),
#endif

Просмотреть файл

@ -941,11 +941,6 @@ struct JSRuntime : public JS::shadow::Runtime,
/* Had an out-of-memory error which did not populate an exception. */
bool hadOutOfMemory;
#ifdef DEBUG
/* We are currently deleting an object due to an initialization failure. */
bool handlingInitFailure;
#endif
#if defined(DEBUG) || defined(JS_OOM_BREAKPOINT)
/* We are currently running a simulated OOM test. */
bool runningOOMTest;
@ -1720,76 +1715,44 @@ class MOZ_RAII AutoEnterIonCompilation
};
/*
* AutoInitGCManagedObject is a wrapper for use when initializing a object whose
* lifetime is managed by the GC. It ensures that the object is destroyed if
* initialization fails but also allows us to assert the invariant that such
* objects are only destroyed in this way or by the GC.
* Provides a delete policy that can be used for objects which have their
* lifetime managed by the GC and can only safely be destroyed while the nursery
* is empty.
*
* It has a limited interface but is a drop-in replacement for UniquePtr<T> is
* this situation. For example:
*
* AutoInitGCManagedObject<MyClass> ptr(cx->make_unique<MyClass>());
* if (!ptr) {
* ReportOutOfMemory(cx);
* return nullptr;
* }
*
* if (!ptr->init(cx))
* return nullptr; // Object destroyed here if init() failed.
*
* object->setPrivate(ptr.release());
* // Initialization successful, ptr is now owned through another object.
* This is necessary when initializing such an object may fail after the initial
* allocation. The partially-initialized object must be destroyed, but it may
* not be safe to do so at the current time. This policy puts the object on a
* queue to be destroyed at a safe time.
*/
template <typename T>
class MOZ_STACK_CLASS AutoInitGCManagedObject
struct GCManagedDeletePolicy
{
typedef UniquePtr<T> UniquePtrT;
UniquePtrT ptr_;
public:
explicit AutoInitGCManagedObject(UniquePtrT&& ptr)
: ptr_(mozilla::Move(ptr))
{}
~AutoInitGCManagedObject() {
#ifdef DEBUG
if (ptr_) {
JSRuntime* rt = TlsPerThreadData.get()->runtimeFromMainThread();
MOZ_ASSERT(!rt->handlingInitFailure);
rt->handlingInitFailure = true;
ptr_.reset(nullptr);
rt->handlingInitFailure = false;
void operator()(const T* ptr) {
if (ptr) {
JSRuntime* rt = TlsPerThreadData.get()->runtimeIfOnOwnerThread();
if (rt)
rt->gc.callAfterMinorGC(deletePtr, const_cast<T*>(ptr));
else
js_delete(const_cast<T*>(ptr));
}
#endif
}
T& operator*() const {
return *get();
private:
static void deletePtr(void* data) {
js_delete(reinterpret_cast<T*>(data));
}
T* operator->() const {
return get();
}
explicit operator bool() const {
return get() != nullptr;
}
T* get() const {
return ptr_.get();
}
T* release() {
return ptr_.release();
}
AutoInitGCManagedObject(const AutoInitGCManagedObject<T>& other) = delete;
AutoInitGCManagedObject& operator=(const AutoInitGCManagedObject<T>& other) = delete;
};
} /* namespace js */
namespace JS {
template <typename T>
struct DeletePolicy<js::GCPtr<T>> : public js::GCManagedDeletePolicy<js::GCPtr<T>>
{};
} /* namespace JS */
#ifdef _MSC_VER
#pragma warning(pop)
#endif

Просмотреть файл

@ -2593,7 +2593,7 @@ DebugScopes::ensureCompartmentData(JSContext* cx)
if (c->debugScopes)
return c->debugScopes;
AutoInitGCManagedObject<DebugScopes> debugScopes(cx->make_unique<DebugScopes>(cx));
auto debugScopes = cx->make_unique<DebugScopes>(cx);
if (!debugScopes || !debugScopes->init()) {
ReportOutOfMemory(cx);
return nullptr;

Просмотреть файл

@ -1611,4 +1611,18 @@ AnalyzeEntrainedVariables(JSContext* cx, HandleScript script);
} // namespace js
namespace JS {
template <>
struct DeletePolicy<js::DebugScopeObject>
{
explicit DeletePolicy(JSRuntime* rt) : rt_(rt) {}
void operator()(const js::DebugScopeObject* ptr);
private:
JSRuntime* rt_;
};
} // namespace JS
#endif /* vm_ScopeObject_h */

Просмотреть файл

@ -72,6 +72,11 @@ struct InternalBarrierMethods<TaggedProto>
static bool isMarkable(TaggedProto proto) {
return proto.isObject();
}
static bool isInsideNursery(TaggedProto proto) {
return proto.isObject() &&
gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(proto.toObject()));
}
};
template<class Outer>

Просмотреть файл

@ -276,7 +276,11 @@ TypeIdString(jsid id)
*/
struct AutoEnterAnalysis
{
/* Prevent GC activity in the middle of analysis. */
// For use when initializing an UnboxedLayout. The UniquePtr's destructor
// must run when GC is not suppressed.
UniquePtr<UnboxedLayout> unboxedLayoutToCleanUp;
// Prevent GC activity in the middle of analysis.
gc::AutoSuppressGC suppressGC;
// Allow clearing inference info on OOM during incremental sweeping.

Просмотреть файл

@ -3513,7 +3513,7 @@ PreliminaryObjectArrayWithTemplate::maybeAnalyze(ExclusiveContext* cx, ObjectGro
}
}
TryConvertToUnboxedLayout(cx, shape(), group, preliminaryObjects);
TryConvertToUnboxedLayout(cx, enter, shape(), group, preliminaryObjects);
if (group->maybeUnboxedLayout())
return;
@ -3794,7 +3794,7 @@ TypeNewScript::maybeAnalyze(JSContext* cx, ObjectGroup* group, bool* regenerate,
}
// Try to use an unboxed representation for the group.
if (!TryConvertToUnboxedLayout(cx, templateObject()->lastProperty(), group, preliminaryObjects))
if (!TryConvertToUnboxedLayout(cx, enter, templateObject()->lastProperty(), group, preliminaryObjects))
return false;
js_delete(preliminaryObjects);

Просмотреть файл

@ -1920,7 +1920,7 @@ UnboxedPlainObject::fillAfterConvert(ExclusiveContext* cx,
}
bool
js::TryConvertToUnboxedLayout(ExclusiveContext* cx, Shape* templateShape,
js::TryConvertToUnboxedLayout(ExclusiveContext* cx, AutoEnterAnalysis& enter, Shape* templateShape,
ObjectGroup* group, PreliminaryObjectArray* objects)
{
bool isArray = !templateShape;
@ -2015,7 +2015,9 @@ js::TryConvertToUnboxedLayout(ExclusiveContext* cx, Shape* templateShape,
return true;
}
AutoInitGCManagedObject<UnboxedLayout> layout(group->zone()->make_unique<UnboxedLayout>());
UniquePtr<UnboxedLayout>& layout = enter.unboxedLayoutToCleanUp;
MOZ_ASSERT(!layout);
layout = group->zone()->make_unique<UnboxedLayout>();
if (!layout)
return false;
@ -2089,7 +2091,6 @@ js::TryConvertToUnboxedLayout(ExclusiveContext* cx, Shape* templateShape,
}
MOZ_ASSERT(valueCursor == values.length());
layout.release();
return true;
}

Просмотреть файл

@ -10,6 +10,7 @@
#include "jsgc.h"
#include "jsobj.h"
#include "vm/Runtime.h"
#include "vm/TypeInference.h"
namespace js {
@ -320,7 +321,7 @@ class UnboxedPlainObject : public JSObject
// provided they all match the template shape. If successful, converts the
// preliminary objects and their group to the new unboxed representation.
bool
TryConvertToUnboxedLayout(ExclusiveContext* cx, Shape* templateShape,
TryConvertToUnboxedLayout(ExclusiveContext* cx, AutoEnterAnalysis& enter, Shape* templateShape,
ObjectGroup* group, PreliminaryObjectArray* objects);
inline gc::AllocKind
@ -519,4 +520,12 @@ class UnboxedArrayObject : public JSObject
} // namespace js
namespace JS {
template <>
struct DeletePolicy<js::UnboxedLayout> : public js::GCManagedDeletePolicy<js::UnboxedLayout>
{};
} /* namespace JS */
#endif /* vm_UnboxedObject_h */