Backout 6b847a10bbb1 (Bug 1175642) for being on top of a regression.

--HG--
extra : rebase_source : a87230e8b9ffe2e09cba41874e60f44bc0aa7e16
This commit is contained in:
Terrence Cole 2015-06-24 16:25:12 -07:00
Родитель 8ebb5a1b72
Коммит 929078d8c7
12 изменённых файлов: 210 добавлений и 102 удалений

Просмотреть файл

@ -92,19 +92,19 @@ struct GCMethods<nsXBLMaybeCompiled<UncompiledT> >
static nsXBLMaybeCompiled<UncompiledT> initial() { return nsXBLMaybeCompiled<UncompiledT>(); }
static void postBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp,
nsXBLMaybeCompiled<UncompiledT> prev,
nsXBLMaybeCompiled<UncompiledT> next)
static bool needsPostBarrier(nsXBLMaybeCompiled<UncompiledT> function)
{
if (next.IsCompiled()) {
Base::postBarrier(&functionp->UnsafeGetJSFunction(),
prev.IsCompiled() ? prev.UnsafeGetJSFunction() : nullptr,
next.UnsafeGetJSFunction());
} else if (prev.IsCompiled()) {
Base::postBarrier(&prev.UnsafeGetJSFunction(),
prev.UnsafeGetJSFunction(),
nullptr);
}
return function.IsCompiled() && Base::needsPostBarrier(function.GetJSFunction());
}
static void postBarrier(nsXBLMaybeCompiled<UncompiledT>* functionp)
{
Base::postBarrier(&functionp->UnsafeGetJSFunction());
}
static void relocate(nsXBLMaybeCompiled<UncompiledT>* functionp)
{
Base::relocate(&functionp->UnsafeGetJSFunction());
}
};

Просмотреть файл

@ -171,7 +171,9 @@ namespace js {
template <> struct GCMethods<jsid>
{
static jsid initial() { return JSID_VOID; }
static void postBarrier(jsid* idp, jsid prev, jsid next) {}
static bool needsPostBarrier(jsid id) { return false; }
static void postBarrier(jsid* idp) {}
static void relocate(jsid* idp) {}
};
// If the jsid is a GC pointer type, convert to that type and call |f| with

Просмотреть файл

@ -174,7 +174,8 @@ template <typename T> class PersistentRooted;
/* This is exposing internal state of the GC for inlining purposes. */
JS_FRIEND_API(bool) isGCEnabled();
JS_FRIEND_API(void) HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next);
JS_FRIEND_API(void) HeapObjectPostBarrier(JSObject** objp);
JS_FRIEND_API(void) HeapObjectRelocate(JSObject** objp);
#ifdef JS_DEBUG
/*
@ -231,7 +232,8 @@ class Heap : public js::HeapBase<T>
explicit Heap(const Heap<T>& p) { init(p.ptr); }
~Heap() {
post(ptr, js::GCMethods<T>::initial());
if (js::GCMethods<T>::needsPostBarrier(ptr))
relocate();
}
DECLARE_POINTER_CONSTREF_OPS(T);
@ -255,17 +257,29 @@ class Heap : public js::HeapBase<T>
private:
void init(T newPtr) {
ptr = newPtr;
post(js::GCMethods<T>::initial(), ptr);
if (js::GCMethods<T>::needsPostBarrier(ptr))
post();
}
void set(T newPtr) {
T tmp = ptr;
ptr = newPtr;
post(tmp, ptr);
if (js::GCMethods<T>::needsPostBarrier(newPtr)) {
ptr = newPtr;
post();
} else if (js::GCMethods<T>::needsPostBarrier(ptr)) {
relocate(); /* Called before overwriting ptr. */
ptr = newPtr;
} else {
ptr = newPtr;
}
}
void post(const T& prev, const T& next) {
js::GCMethods<T>::postBarrier(&ptr, prev, next);
void post() {
MOZ_ASSERT(js::GCMethods<T>::needsPostBarrier(ptr));
js::GCMethods<T>::postBarrier(&ptr);
}
void relocate() {
js::GCMethods<T>::relocate(&ptr);
}
enum {
@ -590,9 +604,10 @@ template <typename T>
struct GCMethods<T*>
{
static T* initial() { return nullptr; }
static void postBarrier(T** vp, T* prev, T* next) {
if (next)
JS::AssertGCThingIsNotAnObjectSubclass(reinterpret_cast<js::gc::Cell*>(next));
static bool needsPostBarrier(T* v) { return false; }
static void postBarrier(T** vp) {
if (vp)
JS::AssertGCThingIsNotAnObjectSubclass(reinterpret_cast<js::gc::Cell*>(vp));
}
static void relocate(T** vp) {}
};
@ -607,8 +622,14 @@ struct GCMethods<JSObject*>
MOZ_ASSERT(uintptr_t(v) > 32);
return reinterpret_cast<gc::Cell*>(v);
}
static void postBarrier(JSObject** vp, JSObject* prev, JSObject* next) {
JS::HeapObjectPostBarrier(vp, prev, next);
static bool needsPostBarrier(JSObject* v) {
return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(v));
}
static void postBarrier(JSObject** vp) {
JS::HeapObjectPostBarrier(vp);
}
static void relocate(JSObject** vp) {
JS::HeapObjectRelocate(vp);
}
};
@ -616,10 +637,14 @@ template <>
struct GCMethods<JSFunction*>
{
static JSFunction* initial() { return nullptr; }
static void postBarrier(JSFunction** vp, JSFunction* prev, JSFunction* next) {
JS::HeapObjectPostBarrier(reinterpret_cast<JSObject**>(vp),
reinterpret_cast<JSObject*>(prev),
reinterpret_cast<JSObject*>(next));
static bool needsPostBarrier(JSFunction* v) {
return v != nullptr && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(v));
}
static void postBarrier(JSFunction** vp) {
JS::HeapObjectPostBarrier(reinterpret_cast<JSObject**>(vp));
}
static void relocate(JSFunction** vp) {
JS::HeapObjectRelocate(reinterpret_cast<JSObject**>(vp));
}
};

Просмотреть файл

@ -1629,7 +1629,8 @@ SameType(const Value& lhs, const Value& rhs)
/************************************************************************/
namespace JS {
JS_PUBLIC_API(void) HeapValuePostBarrier(Value* valuep, const Value& prev, const Value& next);
JS_PUBLIC_API(void) HeapValuePostBarrier(Value* valuep);
JS_PUBLIC_API(void) HeapValueRelocate(Value* valuep);
}
namespace js {
@ -1645,9 +1646,11 @@ template <> struct GCMethods<JS::Value>
static gc::Cell* asGCThingOrNull(const JS::Value& v) {
return v.isMarkable() ? v.toGCThing() : nullptr;
}
static void postBarrier(JS::Value* v, const JS::Value& prev, const JS::Value& next) {
JS::HeapValuePostBarrier(v, prev, next);
static bool needsPostBarrier(const JS::Value& v) {
return v.isObject() && gc::IsInsideNursery(reinterpret_cast<gc::Cell*>(&v.toObject()));
}
static void postBarrier(JS::Value* v) { JS::HeapValuePostBarrier(v); }
static void relocate(JS::Value* v) { JS::HeapValueRelocate(v); }
};
template <class Outer> class MutableValueOperations;

Просмотреть файл

@ -90,15 +90,31 @@ template void js::PreBarrierFunctor<jsid>::operator()<JS::Symbol>(JS::Symbol*);
template void js::PreBarrierFunctor<jsid>::operator()<JSString>(JSString*);
JS_PUBLIC_API(void)
JS::HeapObjectPostBarrier(JSObject** objp, JSObject* prev, JSObject* next)
JS::HeapObjectPostBarrier(JSObject** objp)
{
MOZ_ASSERT(objp);
js::InternalGCMethods<JSObject*>::postBarrier(objp, prev, next);
MOZ_ASSERT(*objp);
js::InternalGCMethods<JSObject*>::postBarrierRelocate(objp);
}
JS_PUBLIC_API(void)
JS::HeapValuePostBarrier(JS::Value* valuep, const Value& prev, const Value& next)
JS::HeapObjectRelocate(JSObject** objp)
{
MOZ_ASSERT(objp);
MOZ_ASSERT(*objp);
js::InternalGCMethods<JSObject*>::postBarrierRemove(objp);
}
JS_PUBLIC_API(void)
JS::HeapValuePostBarrier(JS::Value* valuep)
{
MOZ_ASSERT(valuep);
js::InternalGCMethods<JS::Value>::postBarrier(valuep, prev, next);
js::InternalGCMethods<JS::Value>::postBarrierRelocate(valuep);
}
JS_PUBLIC_API(void)
JS::HeapValueRelocate(JS::Value* valuep)
{
MOZ_ASSERT(valuep);
js::InternalGCMethods<JS::Value>::postBarrierRemove(valuep);
}

Просмотреть файл

@ -239,9 +239,9 @@ struct InternalGCMethods<T*>
static void preBarrier(T* v) { T::writeBarrierPre(v); }
static void postBarrier(T** vp, T* prior, T* next) {
return T::writeBarrierPost(vp, prior, next);
}
static void postBarrier(T** vp) { T::writeBarrierPost(*vp, vp); }
static void postBarrierRelocate(T** vp) { T::writeBarrierPostRelocate(*vp, vp); }
static void postBarrierRemove(T** vp) { T::writeBarrierPostRemove(*vp, vp); }
static void readBarrier(T* v) { T::readBarrier(v); }
};
@ -263,25 +263,31 @@ struct InternalGCMethods<Value>
DispatchValueTyped(PreBarrierFunctor<Value>(), v);
}
static void postBarrier(Value* vp, const Value& prev, const Value& next) {
static void postBarrier(Value* vp) {
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
MOZ_ASSERT(vp);
// If the target needs an entry, add it.
js::gc::StoreBuffer* sb;
if (next.isObject() && (sb = reinterpret_cast<gc::Cell*>(&next.toObject())->storeBuffer())) {
// If we know that the prev has already inserted an entry, we can skip
// doing the lookup to add the new entry.
if (prev.isObject() && reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()) {
sb->assertHasValueEdge(vp);
return;
}
sb->putValueFromAnyThread(vp);
return;
if (vp->isObject()) {
gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(&vp->toObject())->storeBuffer();
if (sb)
sb->putValueFromAnyThread(vp);
}
// Remove the prev entry if the new value does not need it.
if (prev.isObject() && (sb = reinterpret_cast<gc::Cell*>(&prev.toObject())->storeBuffer()))
sb->unputValueFromAnyThread(vp);
}
static void postBarrierRelocate(Value* vp) {
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
if (vp->isObject()) {
gc::StoreBuffer* sb = reinterpret_cast<gc::Cell*>(&vp->toObject())->storeBuffer();
if (sb)
sb->putValueFromAnyThread(vp);
}
}
static void postBarrierRemove(Value* vp) {
MOZ_ASSERT(vp);
MOZ_ASSERT(vp->isMarkable());
MOZ_ASSERT(!CurrentThreadIsIonCompiling());
JSRuntime* rt = static_cast<js::gc::Cell*>(vp->toGCThing())->runtimeFromAnyThread();
JS::shadow::Runtime* shadowRuntime = JS::shadow::Runtime::asShadowRuntime(rt);
shadowRuntime->gcStoreBufferPtr()->unputValueFromAnyThread(vp);
}
static void readBarrier(const Value& v) {
@ -295,7 +301,9 @@ struct InternalGCMethods<jsid>
static bool isMarkable(jsid id) { return JSID_IS_STRING(id) || JSID_IS_SYMBOL(id); }
static void preBarrier(jsid id) { DispatchIdTyped(PreBarrierFunctor<jsid>(), id); }
static void postBarrier(jsid* idp, jsid prev, jsid next) {}
static void postBarrier(jsid* idp) {}
static void postBarrierRelocate(jsid* idp) {}
static void postBarrierRemove(jsid* idp) {}
};
template <typename T>
@ -333,6 +341,7 @@ class BarrieredBase : public BarrieredBaseMixins<T>
/* For users who need to manually barrier the raw types. */
static void writeBarrierPre(const T& v) { InternalGCMethods<T>::preBarrier(v); }
static void writeBarrierPost(const T& v, T* vp) { InternalGCMethods<T>::postBarrier(vp); }
protected:
void pre() { InternalGCMethods<T>::preBarrier(value); }
@ -400,8 +409,8 @@ class HeapPtr : public BarrieredBase<T>
{
public:
HeapPtr() : BarrieredBase<T>(GCMethods<T>::initial()) {}
explicit HeapPtr(T v) : BarrieredBase<T>(v) { post(GCMethods<T>::initial(), v); }
explicit HeapPtr(const HeapPtr<T>& v) : BarrieredBase<T>(v) { post(GCMethods<T>::initial(), v); }
explicit HeapPtr(T v) : BarrieredBase<T>(v) { post(); }
explicit HeapPtr(const HeapPtr<T>& v) : BarrieredBase<T>(v) { post(); }
#ifdef DEBUG
~HeapPtr() {
// No prebarrier necessary as this only happens when we are sweeping or
@ -412,20 +421,19 @@ class HeapPtr : public BarrieredBase<T>
void init(T v) {
this->value = v;
post(GCMethods<T>::initial(), v);
post();
}
DECLARE_POINTER_ASSIGN_OPS(HeapPtr, T);
protected:
void post(T prev, T next) { InternalGCMethods<T>::postBarrier(&this->value, prev, next); }
void post() { InternalGCMethods<T>::postBarrier(&this->value); }
private:
void set(const T& v) {
this->pre();
T tmp = this->value;
this->value = v;
post(tmp, this->value);
post();
}
/*
@ -486,7 +494,8 @@ class RelocatablePtr : public BarrieredBase<T>
public:
RelocatablePtr() : BarrieredBase<T>(GCMethods<T>::initial()) {}
explicit RelocatablePtr(T v) : BarrieredBase<T>(v) {
post(GCMethods<T>::initial(), this->value);
if (GCMethods<T>::needsPostBarrier(v))
post();
}
/*
@ -496,12 +505,14 @@ class RelocatablePtr : public BarrieredBase<T>
* simply omit the rvalue variant.
*/
RelocatablePtr(const RelocatablePtr<T>& v) : BarrieredBase<T>(v) {
post(GCMethods<T>::initial(), this->value);
if (GCMethods<T>::needsPostBarrier(this->value))
post();
}
~RelocatablePtr() {
this->pre();
post(this->value, GCMethods<T>::initial());
if (GCMethods<T>::needsPostBarrier(this->value))
relocate();
}
DECLARE_POINTER_ASSIGN_OPS(RelocatablePtr, T);
@ -520,13 +531,25 @@ class RelocatablePtr : public BarrieredBase<T>
}
void postBarrieredSet(const T& v) {
T tmp = this->value;
this->value = v;
post(tmp, this->value);
if (GCMethods<T>::needsPostBarrier(v)) {
this->value = v;
post();
} else if (GCMethods<T>::needsPostBarrier(this->value)) {
relocate();
this->value = v;
} else {
this->value = v;
}
}
void post(T prev, T next) {
InternalGCMethods<T>::postBarrier(&this->value, prev, next);
void post() {
MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
InternalGCMethods<T>::postBarrierRelocate(&this->value);
}
void relocate() {
MOZ_ASSERT(GCMethods<T>::needsPostBarrier(this->value));
InternalGCMethods<T>::postBarrierRemove(&this->value);
}
};

Просмотреть файл

@ -285,8 +285,9 @@ class TenuredCell : public Cell
static MOZ_ALWAYS_INLINE void readBarrier(TenuredCell* thing);
static MOZ_ALWAYS_INLINE void writeBarrierPre(TenuredCell* thing);
static MOZ_ALWAYS_INLINE void writeBarrierPost(void* cellp, TenuredCell* prior,
TenuredCell* next);
static MOZ_ALWAYS_INLINE void writeBarrierPost(TenuredCell* thing, void* cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(TenuredCell* thing, void* cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(TenuredCell* thing, void* cellp);
#ifdef DEBUG
inline bool isAligned() const;
@ -1469,9 +1470,21 @@ AssertValidToSkipBarrier(TenuredCell* thing)
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::writeBarrierPost(void* cellp, TenuredCell* prior, TenuredCell* next)
TenuredCell::writeBarrierPost(TenuredCell* thing, void* cellp)
{
AssertValidToSkipBarrier(next);
AssertValidToSkipBarrier(thing);
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::writeBarrierPostRelocate(TenuredCell* thing, void* cellp)
{
AssertValidToSkipBarrier(thing);
}
/* static */ MOZ_ALWAYS_INLINE void
TenuredCell::writeBarrierPostRemove(TenuredCell* thing, void* cellp)
{
AssertValidToSkipBarrier(thing);
}
#ifdef DEBUG

Просмотреть файл

@ -91,10 +91,6 @@ class StoreBuffer
stores_.remove(v);
}
bool has(const T& v) const {
return stores_.has(v);
}
/* Trace the source of all edges in the store buffer. */
void trace(StoreBuffer* owner, TenuringTracer& mover);
@ -407,9 +403,6 @@ class StoreBuffer
putFromAnyThread(bufferGeneric, CallbackRef<Key>(callback, key, data));
}
void assertHasCellEdge(Cell** cellp) const { MOZ_ASSERT(bufferCell.has(CellPtrEdge(cellp))); }
void assertHasValueEdge(Value* vp) const { MOZ_ASSERT(bufferVal.has(ValueEdge(vp))); }
void setShouldCancelIonCompilations() {
cancelIonCompilations_ = true;
}

Просмотреть файл

@ -310,7 +310,9 @@ class JSObject : public js::gc::Cell
}
static MOZ_ALWAYS_INLINE void readBarrier(JSObject* obj);
static MOZ_ALWAYS_INLINE void writeBarrierPre(JSObject* obj);
static MOZ_ALWAYS_INLINE void writeBarrierPost(void* cellp, JSObject* prev, JSObject* next);
static MOZ_ALWAYS_INLINE void writeBarrierPost(JSObject* obj, void* cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRelocate(JSObject* obj, void* cellp);
static MOZ_ALWAYS_INLINE void writeBarrierPostRemove(JSObject* obj, void* cellp);
/* Return the allocKind we would use if we were to tenure this object. */
js::gc::AllocKind allocKindForTenure(const js::Nursery& nursery) const;
@ -628,26 +630,36 @@ JSObject::writeBarrierPre(JSObject* obj)
}
/* static */ MOZ_ALWAYS_INLINE void
JSObject::writeBarrierPost(void* cellp, JSObject* prev, JSObject* next)
JSObject::writeBarrierPost(JSObject* obj, void* cellp)
{
MOZ_ASSERT(cellp);
// If the target needs an entry, add it.
js::gc::StoreBuffer* buffer;
if (!IsNullTaggedPointer(next) && (buffer = next->storeBuffer())) {
// If we know that the prev has already inserted an entry, we can skip
// doing the lookup to add the new entry.
if (!IsNullTaggedPointer(prev) && prev->storeBuffer()) {
buffer->assertHasCellEdge(static_cast<js::gc::Cell**>(cellp));
return;
}
buffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
if (IsNullTaggedPointer(obj))
return;
}
MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
js::gc::StoreBuffer* storeBuffer = obj->storeBuffer();
if (storeBuffer)
storeBuffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
}
// Remove the prev entry if the new value does not need it.
if (!IsNullTaggedPointer(prev) && (buffer = prev->storeBuffer()))
buffer->unputCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
/* static */ MOZ_ALWAYS_INLINE void
JSObject::writeBarrierPostRelocate(JSObject* obj, void* cellp)
{
MOZ_ASSERT(cellp);
MOZ_ASSERT(obj);
MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
js::gc::StoreBuffer* storeBuffer = obj->storeBuffer();
if (storeBuffer)
storeBuffer->putCellFromAnyThread(static_cast<js::gc::Cell**>(cellp));
}
/* static */ MOZ_ALWAYS_INLINE void
JSObject::writeBarrierPostRemove(JSObject* obj, void* cellp)
{
MOZ_ASSERT(cellp);
MOZ_ASSERT(obj);
MOZ_ASSERT(obj == *static_cast<JSObject**>(cellp));
obj->shadowRuntimeFromAnyThread()->gcStoreBufferPtr()->unputCellFromAnyThread(
static_cast<js::gc::Cell**>(cellp));
}
namespace js {

Просмотреть файл

@ -347,8 +347,8 @@ class NewObjectCache
static void copyCachedToObject(NativeObject* dst, NativeObject* src, gc::AllocKind kind) {
js_memcpy(dst, src, gc::Arena::thingSize(kind));
Shape::writeBarrierPost(&dst->shape_, nullptr, dst->shape_);
ObjectGroup::writeBarrierPost(&dst->group_, nullptr, dst->group_);
Shape::writeBarrierPost(dst->shape_, &dst->shape_);
ObjectGroup::writeBarrierPost(dst->group_, &dst->group_);
}
};

Просмотреть файл

@ -107,6 +107,25 @@ InterpreterFrame::initExecuteFrame(JSContext* cx, HandleScript script, AbstractF
#endif
}
void
InterpreterFrame::writeBarrierPost()
{
/* This needs to follow the same rules as in InterpreterFrame::mark. */
if (scopeChain_)
JSObject::writeBarrierPost(scopeChain_, &scopeChain_);
if (flags_ & HAS_ARGS_OBJ)
JSObject::writeBarrierPost(argsObj_, &argsObj_);
if (isFunctionFrame()) {
JSFunction::writeBarrierPost(exec.fun, &exec.fun);
if (isEvalFrame())
JSScript::writeBarrierPost(u.evalScript, &u.evalScript);
} else {
JSScript::writeBarrierPost(exec.script, &exec.script);
}
if (hasReturnValue())
HeapValue::writeBarrierPost(rval_, &rval_);
}
bool
InterpreterFrame::copyRawFrameSlots(AutoValueVector* vec)
{

Просмотреть файл

@ -381,6 +381,8 @@ class InterpreterFrame
JS_STATIC_ASSERT(sizeof(InterpreterFrame) % sizeof(Value) == 0);
}
void writeBarrierPost();
/*
* The utilities are private since they are not able to assert that only
* unaliased vars/formals are accessed. Normal code should prefer the