Bug 1175642 - Fix the interface that RelocatablePtr uses to interact with the StoreBuffer; r=jonco

--HG--
extra : rebase_source : 974375bcb15e889187f7138c82f23c60021e9d21
This commit is contained in:
Terrence Cole 2015-06-18 10:23:49 -07:00
Родитель 86d691dd69
Коммит 629acf9edc
12 изменённых файлов: 102 добавлений и 210 удалений

Просмотреть файл

@ -92,19 +92,19 @@ struct GCMethods<nsXBLMaybeCompiled<UncompiledT> >
static nsXBLMaybeCompiled<UncompiledT> initial() { return nsXBLMaybeCompiled<UncompiledT>(); }
static bool needsPostBarrier(nsXBLMaybeCompiled<UncompiledT> function)
static void postBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp,
nsXBLMaybeCompiled<UncompiledT> prev,
nsXBLMaybeCompiled<UncompiledT> next)
{
return function.IsCompiled() && Base::needsPostBarrier(function.GetJSFunction());
}
static void postBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp)
{
Base::postBarrier(&functionp->UnsafeGetJSFunction());
}
static void relocate(nsXBLMaybeCompiled<UncompiledT>* functionp)
{
Base::relocate(&functionp->UnsafeGetJSFunction());
if (next.IsCompiled()) {
Base::postBarrier(&functionp->UnsafeGetJSFunction(),
prev.IsCompiled() ? prev.UnsafeGetJSFunction() : nullptr,
next.UnsafeGetJSFunction());
} else if (prev.IsCompiled()) {
Base::postBarrier(&prev.UnsafeGetJSFunction(),
prev.UnsafeGetJSFunction(),
nullptr);
}
}
};

Просмотреть файл

@ -171,9 +171,7 @@ namespace js {
template <> struct GCMethods<jsid>
{
static jsid initial() { return JSID_VOID; }
static bool needsPostBarrier(jsid id) { return false; }
static void postBarrier(jsid* idp) {}
static void relocate(jsid* idp) {}
static void postBarrier(jsid* idp, jsid prev, jsid next) {}
};
// If the jsid is a GC pointer type, convert to that type and call |f| with

Просмотреть файл

@ -174,8 +174,7 @@ template <typename T> class PersistentRooted;
/* This is exposing internal state of the GC for inlining purposes. */
JS_FRIEND_API(bool) isGCEnabled();
JS_FRIEND_API(void) HeapObjectPostBarrier(JSObject** objp);
JS_FRIEND_API(void) HeapObjectRelocate(JSObject** objp);
JS_FRIEND_API(void) HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next);
#ifdef JS_DEBUG
/*
@ -232,8 +231,7 @@ class Heap : public js::HeapBase<T>
explicit Heap(const Heap<T>& p) { init(p.ptr); }
~Heap() {
if (js::GCMethods<T>::needsPostBarrier(ptr))
relocate();
post(ptr, js::GCMethods<T>::initial());
}
DECLARE_POINTER_CONSTREF_OPS(T);
@ -257,29 +255,17 @@ class Heap : public js::HeapBase<T>
private:
void init(T newPtr) {
ptr = newPtr;
if (js::GCMethods<T>::needsPostBarrier(ptr))
post();
post(js::GCMethods<T>::initial(), ptr);
}
void set(T newPtr) {
if (js::GCMethods<T>::needsPostBarrier(newPtr)) {
ptr = newPtr;
post();
} else if (js::GCMethods<T>::needsPostBarrier(ptr)) {
relocate(); /* Called before overwriting ptr. */
ptr = newPtr;
} else {
ptr = newPtr;
}
T tmp = ptr;
ptr = newPtr;
post(tmp, ptr);
}
void post() {
MOZ_ASSERT(js::GCMethods<T>::needsPostBarrier(ptr));
js::GCMethods<T>::postBarrier(&ptr);
}
void relocate() {
js::GCMethods<T>::relocate(&ptr);
void post(const T& prev, const T& next) {
js::GCMethods<T>::postBarrier(&ptr, prev, next);
}
enum {
@ -604,10 +590,9 @@ template <typename T>
struct GCMethods<T*>
{
static T* initial() { return nullptr; }
static bool needsPostBarrier(T* v) { return false; }
static void postBarrier(T** vp) {
if (vp)
JS::AssertGCThingIsNotAnObjectSubclass(reinterpret_cast<js::gc::Cell*>(vp));
static void postBarrier(T** vp, T* prev, T* next) {
if (next)
JS::AssertGCThingIsNotAnObjectSubclass(reinterpret_cast<js::gc::Cell*>(next));
}
static void relocate(T** vp) {}
};
@ -622,14 +607,8 @@ struct GCMethods<JSObject*>
MOZ_ASSERT(uintptr_t(v) > 32);
return reinterpret_cast<gc::Cell*>(v);
}
static bool needsPostBarrier(JSObject* v) {
return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(v));
}
static void postBarrier(JSObject** vp) {
JS::HeapObjectPostBarrier(vp);
}
static void relocate(JSObject** vp) {
JS::HeapObjectRelocate(vp);
static void postBarrier(JSObject** vp, JSObject* prev, JSObject* next) {
JS::HeapObjectPostBarrier(vp, prev, next);
}
};
@ -637,14 +616,10 @@ template <>
struct GCMethods<JSFunction*>
{
static JSFunction* initial() { return nullptr; }
static bool needsPostBarrier(JSFunction* v) {
return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(v));
}
static void postBarrier(JSFunction** vp) {
JS::HeapObjectPostBarrier(reinterpret_cast<JSObject**>(vp));
}
static void relocate(JSFunction** vp) {
JS::HeapObjectRelocate(reinterpret_cast<JSObject**>(vp));
static void postBarrier(JSFunction** vp, JSFunction* prev, JSFunction* next) {
JS::HeapObjectPostBarrier(reinterpret_cast<JSObject**>(vp),
reinterpret_cast<JSObject*>(prev),
reinterpret_cast<JSObject*>(next));
}
};

Просмотреть файл

@ -1629,8 +1629,7 @@ SameType(const Value& lhs, const Value& rhs)
/************************************************************************/
namespace JS {
JS_PUBLIC_API(void) HeapValuePostBarrier(Value* valuep);
JS_PUBLIC_API(void) HeapValueRelocate(Value* valuep);
JS_PUBLIC_API(void) HeapValuePostBarrier(Value* valuep, const Value& prev, const Value& next);
}
namespace js {
@ -1646,11 +1645,9 @@ template <> struct GCMethods<JS::Value>
static gc::Cell* asGCThingOrNull(const JS::Value& v) {
return v.isMarkable() ? v.toGCThing() : nullptr;
}
static bool needsPostBarrier(const JS::Value& v) {
return v.isObject() && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(&v.toObject()));
static void postBarrier(JS::Value* v, const JS::Value& prev, const JS::Value& next) {
JS::HeapValuePostBarrier(v, prev, next);
}
static void postBarrier(JS::Value* v) { JS::HeapValuePostBarrier(v); }
static void relocate(JS::Value* v) { JS::HeapValueRelocate(v); }
};
template <class Outer> class MutableValueOperations;

Просмотреть файл

@ -90,31 +90,15 @@ template void js::PreBarrierFunctor<jsid>::operator()<JS::Symbol>(JS::Symbol*);
template void js::PreBarrierFunctor<jsid>::operator()<JSString>(JSString*);
JS_PUBLIC_API(void)
JS::HeapObjectPostBarrier(JSObject** objp)
JS::HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next)
{
MOZ_ASSERT(objp);
MOZ_ASSERT(*objp);
js::InternalGCMethods<JSObject*>::postBarrierRelocate(objp);
js::InternalGCMethods<JSObject*>::postBarrier(objp, prev, next);
}
JS_PUBLIC_API(void)
JS::HeapObjectRelocate(JSObject** objp)
{
MOZ_ASSERT(objp);
MOZ_ASSERT(*objp);
js::InternalGCMethods<JSObject*>::postBarrierRemove(objp);
}
JS_PUBLIC_API(void)
JS::HeapValuePostBarrier(JS::Value* valuep)
JS::HeapValuePostBarrier(JS::Value* valuep, const Value& prev, const Value& next)
{
MOZ_ASSERT(valuep);
js::InternalGCMethods<JS::Value>::postBarrierRelocate(valuep);
}
JS_PUBLIC_API(void)
JS::HeapValueRelocate(JS::Value* valuep)
{
MOZ_ASSERT(valuep);
js::InternalGCMethods<JS::Value>::postBarrierRemove(valuep);
js::InternalGCMethods<JS::Value>::postBarrier(valuep, prev, next);
}

Просмотреть файл

@ -239,9 +239,9 @@ struct InternalGCMethods<T*>
static void preBarrier(T* v) { T::writeBarrierPre(v); }
static void postBarrier(T** vp) { T::writeBarrierPost(*vp, vp); }
static void postBarrierRelocate(T** vp) { T::writeBarrierPostRelocate(*vp, vp); }
static void postBarrierRemove(T** vp) { T::writeBarrierPostRemove(*vp, vp); }
static void postBarrier(T** vp, T* prior, T* next) {
return T::writeBarrierPost(vp, prior, next);
}
static void readBarrier(T* v) { T::readBarrier(v); }
};
@ -263,31 +263,25 @@ struct InternalGCMethods<Value>
DispatchValueTyped(PreBarrierFunctor<Value>(), v);
}
static void postBarrier(Value* vp) {
static void postBarrier(Value* vp, const Value& prev, const Value& next) {
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
if (vp->isObject()) {
gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(&vp->toObject())->storeBuffer();
if (sb)
sb->putValueFromAnyThread(vp);
}
}
static void postBarrierRelocate(Value* vp) {
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
if (vp->isObject()) {
gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(&vp->toObject())->storeBuffer();
if (sb)
sb->putValueFromAnyThread(vp);
}
}
static void postBarrierRemove(Value* vp) {
MOZ_ASSERT(vp);
MOZ_ASSERT(vp->isMarkable());
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
JSRuntime* rt = static_cast<js::gc::Cell*>(vp->toGCThing())->runtimeFromAnyThread();
JS::shadow::Runtime* shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
shadowRuntime->gcStoreBufferPtr()->unputValueFromAnyThread(vp);
// If the target needs an entry, add it.
js::gc::StoreBuffer* sb;
if (next.isObject() && (sb = reinterpret_cast<gc::Cell*>(&next.toObject())->storeBuffer())) {
// If we know that the prev has already inserted an entry, we can skip
// doing the lookup to add the new entry.
if (prev.isObject() && reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()) {
sb->assertHasValueEdge(vp);
return;
}
sb->putValueFromAnyThread(vp);
return;
}
// Remove the prev entry if the new value does not need it.
if (prev.isObject() && (sb = reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()))
sb->unputValueFromAnyThread(vp);
}
static void readBarrier(const Value& v) {
@ -301,9 +295,7 @@ struct InternalGCMethods<jsid>
static bool isMarkable(jsid id) { return JSID_IS_STRING(id) || JSID_IS_SYMBOL(id); }
static void preBarrier(jsid id) { DispatchIdTyped(PreBarrierFunctor<jsid>(), id); }
static void postBarrier(jsid* idp) {}
static void postBarrierRelocate(jsid* idp) {}
static void postBarrierRemove(jsid* idp) {}
static void postBarrier(jsid* idp, jsid prev, jsid next) {}
};
template <typename T>
@ -341,7 +333,6 @@ class BarrieredBase : public BarrieredBaseMixins<T>
/* For users who need to manually barrier the raw types. */
static void writeBarrierPre(const T& v) { InternalGCMethods<T>::preBarrier(v); }
static void writeBarrierPost(const T& v, T* vp) { InternalGCMethods<T>::postBarrier(vp); }
protected:
void pre() { InternalGCMethods<T>::preBarrier(value); }
@ -409,8 +400,8 @@ class HeapPtr : public BarrieredBase<T>
{
public:
HeapPtr() : BarrieredBase<T>(GCMethods<T>::initial()) {}
explicit HeapPtr(T v) : BarrieredBase<T>(v) { post(); }
explicit HeapPtr(const HeapPtr<T>& v) : BarrieredBase<T>(v) { post(); }
explicit HeapPtr(T v) : BarrieredBase<T>(v) { post(GCMethods<T>::initial(), v); }
explicit HeapPtr(const HeapPtr<T>& v) : BarrieredBase<T>(v) { post(GCMethods<T>::initial(), v); }
#ifdef DEBUG
~HeapPtr() {
// No prebarrier necessary as this only happens when we are sweeping or
@ -421,19 +412,20 @@ class HeapPtr : public BarrieredBase<T>
void init(T v) {
this->value = v;
post();
post(GCMethods<T>::initial(), v);
}
DECLARE_POINTER_ASSIGN_OPS(HeapPtr, T);
protected:
void post() { InternalGCMethods<T>::postBarrier(&this->value); }
void post(T prev, T next) { InternalGCMethods<T>::postBarrier(&this->value, prev, next); }
private:
void set(const T& v) {
this->pre();
T tmp = this->value;
this->value = v;
post();
post(tmp, this->value);
}
/*
@ -494,8 +486,7 @@ class RelocatablePtr : public BarrieredBase<T>
public:
RelocatablePtr() : BarrieredBase<T>(GCMethods<T>::initial()) {}
explicit RelocatablePtr(T v) : BarrieredBase<T>(v) {
if (GCMethods<T>::needsPostBarrier(v))
post();
post(GCMethods<T>::initial(), this->value);
}
/*
@ -505,14 +496,12 @@ class RelocatablePtr : public BarrieredBase<T>
* simply omit the rvalue variant.
*/
RelocatablePtr(const RelocatablePtr<T>& v) : BarrieredBase<T>(v) {
if (GCMethods<T>::needsPostBarrier(this->value))
post();
post(GCMethods<T>::initial(), this->value);
}
~RelocatablePtr() {
this->pre();
if (GCMethods<T>::needsPostBarrier(this->value))
relocate();
post(this->value, GCMethods<T>::initial());
}
DECLARE_POINTER_ASSIGN_OPS(RelocatablePtr, T);
@ -531,25 +520,13 @@ class RelocatablePtr : public BarrieredBase<T>
}
void postBarrieredSet(const T& v) {
if (GCMethods<T>::needsPostBarrier(v)) {
this->value = v;
post();
} else if (GCMethods<T>::needsPostBarrier(this->value)) {
relocate();
this->value = v;
} else {
this->value = v;
}
T tmp = this->value;
this->value = v;
post(tmp, this->value);
}
void post() {
MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
InternalGCMethods<T>::postBarrierRelocate(&this->value);
}
void relocate() {
MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
InternalGCMethods<T>::postBarrierRemove(&this->value);
void post(T prev, T next) {
InternalGCMethods<T>::postBarrier(&this->value, prev, next);
}
};

Просмотреть файл

@ -285,9 +285,8 @@ class TenuredCell : public Cell
static MOZ_ALWAYS_INLINE void readBarrier(TenuredCell* thing);
static MOZ_ALWAYS_INLINE void writeBarrierPre(TenuredCell* thing);
static MOZ_ALWAYS_INLINE void writeBarrierPost(TenuredCell* thing, void* cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(TenuredCell* thing, void* cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(TenuredCell* thing, void* cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPost(void* cellp, TenuredCell* prior,
TenuredCell* next);
#ifdef DEBUG
inline bool isAligned() const;
@ -1470,21 +1469,9 @@ AssertValidToSkipBarrier(TenuredCell* thing)
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::writeBarrierPost(TenuredCell* thing, void* cellp)
TenuredCell::writeBarrierPost(void* cellp, TenuredCell* prior, TenuredCell* next)
{
AssertValidToSkipBarrier(thing);
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::writeBarrierPostRelocate(TenuredCell* thing, void* cellp)
{
AssertValidToSkipBarrier(thing);
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::writeBarrierPostRemove(TenuredCell* thing, void* cellp)
{
AssertValidToSkipBarrier(thing);
AssertValidToSkipBarrier(next);
}
#ifdef DEBUG

Просмотреть файл

@ -91,6 +91,10 @@ class StoreBuffer
stores_.remove(v);
}
bool has(const T& v) const {
return stores_.has(v);
}
/* Trace the source of all edges in the store buffer. */
void trace(StoreBuffer* owner, TenuringTracer& mover);
@ -403,6 +407,9 @@ class StoreBuffer
putFromAnyThread(bufferGeneric, CallbackRef<Key>(callback, key, data));
}
void assertHasCellEdge(Cell** cellp) const { MOZ_ASSERT(bufferCell.has(CellPtrEdge(cellp))); }
void assertHasValueEdge(Value* vp) const { MOZ_ASSERT(bufferVal.has(ValueEdge(vp))); }
void setShouldCancelIonCompilations() {
cancelIonCompilations_ = true;
}

Просмотреть файл

@ -291,9 +291,7 @@ class JSObject : public js::gc::Cell
}
static MOZ_ALWAYS_INLINE void readBarrier(JSObject* obj);
static MOZ_ALWAYS_INLINE void writeBarrierPre(JSObject* obj);
static MOZ_ALWAYS_INLINE void writeBarrierPost(JSObject* obj, void* cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(JSObject* obj, void* cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(JSObject* obj, void* cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPost(void* cellp, JSObject* prev, JSObject* next);
/* Return the allocKind we would use if we were to tenure this object. */
js::gc::AllocKind allocKindForTenure(const js::Nursery& nursery) const;
@ -611,36 +609,26 @@ JSObject::writeBarrierPre(JSObject* obj)
}
/* static */ MOZ_ALWAYS_INLINE void
JSObject::writeBarrierPost(JSObject* obj, void* cellp)
JSObject::writeBarrierPost(void* cellp, JSObject* prev, JSObject* next)
{
MOZ_ASSERT(cellp);
if (IsNullTaggedPointer(obj))
// If the target needs an entry, add it.
js::gc::StoreBuffer* buffer;
if (!IsNullTaggedPointer(next) && (buffer = next->storeBuffer())) {
// If we know that the prev has already inserted an entry, we can skip
// doing the lookup to add the new entry.
if (!IsNullTaggedPointer(prev) && prev->storeBuffer()) {
buffer->assertHasCellEdge(static_cast<js::gc::Cell**>(cellp));
return;
}
buffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
return;
MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
js::gc::StoreBuffer* storeBuffer = obj->storeBuffer();
if (storeBuffer)
storeBuffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
}
}
/* static */ MOZ_ALWAYS_INLINE void
JSObject::writeBarrierPostRelocate(JSObject* obj, void* cellp)
{
MOZ_ASSERT(cellp);
MOZ_ASSERT(obj);
MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
js::gc::StoreBuffer* storeBuffer = obj->storeBuffer();
if (storeBuffer)
storeBuffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
}
/* static */ MOZ_ALWAYS_INLINE void
JSObject::writeBarrierPostRemove(JSObject* obj, void* cellp)
{
MOZ_ASSERT(cellp);
MOZ_ASSERT(obj);
MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
obj->shadowRuntimeFromAnyThread()->gcStoreBufferPtr()->unputCellFromAnyThread(
static_cast<js::gc::Cell**>(cellp));
// Remove the prev entry if the new value does not need it.
if (!IsNullTaggedPointer(prev) && (buffer = prev->storeBuffer()))
buffer->unputCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
}
namespace js {

Просмотреть файл

@ -347,8 +347,8 @@ class NewObjectCache
static void copyCachedToObject(NativeObject* dst, NativeObject* src, gc::AllocKind kind) {
js_memcpy(dst, src, gc::Arena::thingSize(kind));
Shape::writeBarrierPost(dst->shape_, &dst->shape_);
ObjectGroup::writeBarrierPost(dst->group_, &dst->group_);
Shape::writeBarrierPost(&dst->shape_, nullptr, dst->shape_);
ObjectGroup::writeBarrierPost(&dst->group_, nullptr, dst->group_);
}
};

Просмотреть файл

@ -107,25 +107,6 @@ InterpreterFrame::initExecuteFrame(JSContext* cx, HandleScript script, AbstractF
#endif
}
void
InterpreterFrame::writeBarrierPost()
{
/* This needs to follow the same rules as in InterpreterFrame::mark. */
if (scopeChain_)
JSObject::writeBarrierPost(scopeChain_, &scopeChain_);
if (flags_ & HAS_ARGS_OBJ)
JSObject::writeBarrierPost(argsObj_, &argsObj_);
if (isFunctionFrame()) {
JSFunction::writeBarrierPost(exec.fun, &exec.fun);
if (isEvalFrame())
JSScript::writeBarrierPost(u.evalScript, &u.evalScript);
} else {
JSScript::writeBarrierPost(exec.script, &exec.script);
}
if (hasReturnValue())
HeapValue::writeBarrierPost(rval_, &rval_);
}
bool
InterpreterFrame::copyRawFrameSlots(AutoValueVector* vec)
{

Просмотреть файл

@ -381,8 +381,6 @@ class InterpreterFrame
JS_STATIC_ASSERT(sizeof(InterpreterFrame) % sizeof(Value) == 0);
}
void writeBarrierPost();
/*
* The utilities are private since they are not able to assert that only
* unaliased vars/formals are accessed. Normal code should prefer the