Bug 1260785 - Make hashcode generation fallible for cell pointers that can be moved by GC r=terrence

This commit is contained in:
Jon Coppeard 2016-07-13 10:20:00 +01:00
Родитель 9800e39e70
Коммит 17af524d85
11 изменённых файлов: 151 добавлений и 27 удалений

Просмотреть файл

@ -62,6 +62,7 @@ class HashMap
struct MapHashPolicy : HashPolicy
{
using Base = HashPolicy;
typedef Key KeyType;
static const Key& getKey(TableEntry& e) { return e.key(); }
static void setKey(TableEntry& e, Key& k) { HashPolicy::rekey(e.mutableKey(), k); }
@ -316,6 +317,7 @@ class HashSet
{
struct SetOps : HashPolicy
{
using Base = HashPolicy;
typedef T KeyType;
static const KeyType& getKey(const T& t) { return t; }
static void setKey(T& t, KeyType& k) { HashPolicy::rekey(t, k); }
@ -675,6 +677,38 @@ struct CStringHasher
}
};
// Fallible hashing interface.
//
// Most of the time generating a hash code is infallible so this class provides
// default methods that always succeed. Specialize this class for your own hash
// policy to provide fallible hashing.
//
// This is used by MovableCellHasher to handle the fact that generating a unique
// ID for cell pointer may fail due to OOM.
template <typename HashPolicy>
struct FallibleHashMethods
{
// Return true if a hashcode is already available for its argument. Once
// this returns true for a specific argument it must continue to do so.
template <typename Lookup> static bool hasHash(Lookup&& l) { return true; }
// Fallible method to ensure a hashcode exists for its argument and create
// one if not. Returns false on error, e.g. out of memory.
template <typename Lookup> static bool ensureHash(Lookup&& l) { return true; }
};
template <typename HashPolicy, typename Lookup>
static bool
HasHash(Lookup&& l) {
return FallibleHashMethods<typename HashPolicy::Base>::hasHash(mozilla::Forward<Lookup>(l));
}
template <typename HashPolicy, typename Lookup>
static bool
EnsureHash(Lookup&& l) {
return FallibleHashMethods<typename HashPolicy::Base>::ensureHash(mozilla::Forward<Lookup>(l));
}
/*****************************************************************************/
// Both HashMap and HashSet are implemented by a single HashTable that is even
@ -852,14 +886,21 @@ class HashTable : private AllocPolicy
{}
public:
// Leaves Ptr uninitialized.
Ptr() {
Ptr()
: entry_(nullptr)
#ifdef JS_DEBUG
entry_ = (Entry*)0xbad;
, table_(nullptr)
, generation(0)
#endif
{}
bool isValid() const {
return !entry_;
}
bool found() const {
if (isValid())
return false;
#ifdef JS_DEBUG
MOZ_ASSERT(generation == table_->generation());
#endif
@ -884,6 +925,7 @@ class HashTable : private AllocPolicy
T& operator*() const {
#ifdef JS_DEBUG
MOZ_ASSERT(found());
MOZ_ASSERT(generation == table_->generation());
#endif
return entry_->get();
@ -891,6 +933,7 @@ class HashTable : private AllocPolicy
T* operator->() const {
#ifdef JS_DEBUG
MOZ_ASSERT(found());
MOZ_ASSERT(generation == table_->generation());
#endif
return &entry_->get();
@ -915,8 +958,7 @@ class HashTable : private AllocPolicy
{}
public:
// Leaves AddPtr uninitialized.
AddPtr() {}
AddPtr() : keyHash(0) {}
};
// A collection of hash table entries. The collection is enumerated by
@ -1682,12 +1724,16 @@ class HashTable : private AllocPolicy
Ptr lookup(const Lookup& l) const
{
mozilla::ReentrancyGuard g(*this);
if (!HasHash<HashPolicy>(l))
return Ptr();
HashNumber keyHash = prepareHash(l);
return Ptr(lookup(l, keyHash, 0), *this);
}
Ptr readonlyThreadsafeLookup(const Lookup& l) const
{
if (!HasHash<HashPolicy>(l))
return Ptr();
HashNumber keyHash = prepareHash(l);
return Ptr(lookup(l, keyHash, 0), *this);
}
@ -1695,6 +1741,8 @@ class HashTable : private AllocPolicy
AddPtr lookupForAdd(const Lookup& l) const
{
mozilla::ReentrancyGuard g(*this);
if (!EnsureHash<HashPolicy>(l))
return AddPtr();
HashNumber keyHash = prepareHash(l);
Entry& entry = lookup(l, keyHash, sCollisionBit);
AddPtr p(entry, *this, keyHash);
@ -1709,6 +1757,10 @@ class HashTable : private AllocPolicy
MOZ_ASSERT(!p.found());
MOZ_ASSERT(!(p.keyHash & sCollisionBit));
// Check for error from ensureHash() here.
if (p.isValid())
return false;
// Changing an entry from removed to live does not affect whether we
// are overloaded and can be handled separately.
if (p.entry_->isRemoved()) {
@ -1756,6 +1808,9 @@ class HashTable : private AllocPolicy
if (!this->checkSimulatedOOM())
return false;
if (!EnsureHash<HashPolicy>(l))
return false;
if (checkOverloaded() == RehashFailed)
return false;
@ -1768,6 +1823,10 @@ class HashTable : private AllocPolicy
template <typename... Args>
MOZ_MUST_USE bool relookupOrAdd(AddPtr& p, const Lookup& l, Args&&... args)
{
// Check for error from ensureHash() here.
if (p.isValid())
return false;
#ifdef JS_DEBUG
p.generation = generation();
p.mutationCount = mutationCount;

Просмотреть файл

@ -27,10 +27,6 @@
#include "js/TypeDecls.h"
#include "js/Utility.h"
namespace js {
template <typename T> struct DefaultHasher;
} // namespace js
struct jsid
{
size_t asBits;

Просмотреть файл

@ -564,6 +564,8 @@ struct JS_PUBLIC_API(MovableCellHasher)
using Key = T;
using Lookup = T;
static bool hasHash(const Lookup& l);
static bool ensureHash(const Lookup& l);
static HashNumber hash(const Lookup& l);
static bool match(const Key& k, const Lookup& l);
static void rekey(Key& k, const Key& newKey) { k = newKey; }
@ -575,11 +577,24 @@ struct JS_PUBLIC_API(MovableCellHasher<JS::Heap<T>>)
using Key = JS::Heap<T>;
using Lookup = T;
static bool hasHash(const Lookup& l) { return MovableCellHasher<T>::hasHash(l); }
static bool ensureHash(const Lookup& l) { return MovableCellHasher<T>::ensureHash(l); }
static HashNumber hash(const Lookup& l) { return MovableCellHasher<T>::hash(l); }
static bool match(const Key& k, const Lookup& l) { return MovableCellHasher<T>::match(k, l); }
static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
};
template <typename T>
struct FallibleHashMethods<MovableCellHasher<T>>
{
template <typename Lookup> static bool hasHash(Lookup&& l) {
return MovableCellHasher<T>::hasHash(mozilla::Forward<Lookup>(l));
}
template <typename Lookup> static bool ensureHash(Lookup&& l) {
return MovableCellHasher<T>::ensureHash(mozilla::Forward<Lookup>(l));
}
};
} /* namespace js */
namespace js {

Просмотреть файл

@ -106,6 +106,27 @@ JS_FOR_EACH_TRACEKIND(JS_EXPAND_DEF);
template void PreBarrierFunctor<jsid>::operator()<JS::Symbol>(JS::Symbol*);
template void PreBarrierFunctor<jsid>::operator()<JSString>(JSString*);
template <typename T>
/* static */ bool
MovableCellHasher<T>::hasHash(const Lookup& l)
{
if (!l)
return true;
return l->zoneFromAnyThread()->hasUniqueId(l);
}
template <typename T>
/* static */ bool
MovableCellHasher<T>::ensureHash(const Lookup& l)
{
if (!l)
return true;
uint64_t unusedId;
return l->zoneFromAnyThread()->getUniqueId(l, &unusedId);
}
template <typename T>
/* static */ HashNumber
MovableCellHasher<T>::hash(const Lookup& l)

Просмотреть файл

@ -801,6 +801,8 @@ struct MovableCellHasher<PreBarriered<T>>
using Key = PreBarriered<T>;
using Lookup = T;
static bool hasHash(const Lookup& l) { return MovableCellHasher<T>::hasHash(l); }
static bool ensureHash(const Lookup& l) { return MovableCellHasher<T>::ensureHash(l); }
static HashNumber hash(const Lookup& l) { return MovableCellHasher<T>::hash(l); }
static bool match(const Key& k, const Lookup& l) { return MovableCellHasher<T>::match(k, l); }
static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
@ -812,6 +814,8 @@ struct MovableCellHasher<HeapPtr<T>>
using Key = HeapPtr<T>;
using Lookup = T;
static bool hasHash(const Lookup& l) { return MovableCellHasher<T>::hasHash(l); }
static bool ensureHash(const Lookup& l) { return MovableCellHasher<T>::ensureHash(l); }
static HashNumber hash(const Lookup& l) { return MovableCellHasher<T>::hash(l); }
static bool match(const Key& k, const Lookup& l) { return MovableCellHasher<T>::match(k, l); }
static void rekey(Key& k, const Key& newKey) { k.unsafeSet(newKey); }
@ -823,6 +827,8 @@ struct MovableCellHasher<ReadBarriered<T>>
using Key = ReadBarriered<T>;
using Lookup = T;
static bool hasHash(const Lookup& l) { return MovableCellHasher<T>::hasHash(l); }
static bool ensureHash(const Lookup& l) { return MovableCellHasher<T>::ensureHash(l); }
static HashNumber hash(const Lookup& l) { return MovableCellHasher<T>::hash(l); }
static bool match(const Key& k, const Lookup& l) {
return MovableCellHasher<T>::match(k.unbarrieredGet(), l);

Просмотреть файл

@ -428,7 +428,12 @@ struct Zone : public JS::shadow::Zone,
// If the cell was in the nursery, hopefully unlikely, then we need to
// tell the nursery about it so that it can sweep the uid if the thing
// does not get tenured.
return runtimeFromAnyThread()->gc.nursery.addedUniqueIdToCell(cell);
if (!runtimeFromAnyThread()->gc.nursery.addedUniqueIdToCell(cell)) {
uniqueIds_.remove(cell);
return false;
}
return true;
}
js::HashNumber getHashCodeInfallible(js::gc::Cell* cell) {

Просмотреть файл

@ -24,9 +24,11 @@ BEGIN_TEST(testWeakCacheSet)
JS::RootedObject nursery1(cx, JS_NewPlainObject(cx));
JS::RootedObject nursery2(cx, JS_NewPlainObject(cx));
using ObjectSet = js::GCHashSet<JS::Heap<JSObject*>, js::MovableCellHasher<JS::Heap<JSObject*>>>;
using ObjectSet = js::GCHashSet<JS::Heap<JSObject*>,
js::MovableCellHasher<JS::Heap<JSObject*>>,
js::SystemAllocPolicy>;
using Cache = JS::WeakCache<ObjectSet>;
auto cache = Cache(JS::GetObjectZone(tenured1), ObjectSet(cx));
auto cache = Cache(JS::GetObjectZone(tenured1), ObjectSet());
CHECK(cache.init());
cache.put(tenured1);

Просмотреть файл

@ -62,8 +62,10 @@ js::AutoCycleDetector::init()
AutoCycleDetector::Set& set = cx->cycleDetectorSet;
hashsetAddPointer = set.lookupForAdd(obj);
if (!hashsetAddPointer) {
if (!set.add(hashsetAddPointer, obj))
if (!set.add(hashsetAddPointer, obj)) {
ReportOutOfMemory(cx);
return false;
}
cyclic = false;
hashsetGenerationAtInit = set.generation();
}
@ -869,7 +871,6 @@ JSContext::JSContext(JSRuntime* parentRuntime)
reportGranularity(JS_DEFAULT_JITREPORT_GRANULARITY),
resolvingList(nullptr),
generatingError(false),
cycleDetectorSet(this),
outstandingRequests(0),
jitIsBroken(false)
{

Просмотреть файл

@ -36,7 +36,7 @@ typedef HashSet<Shape*> ShapeSet;
class MOZ_RAII AutoCycleDetector
{
public:
using Set = HashSet<JSObject*, MovableCellHasher<JSObject*>>;
using Set = HashSet<JSObject*, MovableCellHasher<JSObject*>, SystemAllocPolicy>;
AutoCycleDetector(JSContext* cx, HandleObject objArg
MOZ_GUARD_OBJECT_NOTIFIER_PARAM)

Просмотреть файл

@ -128,6 +128,8 @@ Quote(JSContext* cx, StringBuffer& sb, JSString* str)
namespace {
using ObjectSet = GCHashSet<JSObject*, MovableCellHasher<JSObject*>, SystemAllocPolicy>;
class StringifyContext
{
public:
@ -137,7 +139,7 @@ class StringifyContext
: sb(sb),
gap(gap),
replacer(cx, replacer),
stack(cx, GCHashSet<JSObject*, MovableCellHasher<JSObject*>>(cx)),
stack(cx),
propertyList(propertyList),
depth(0),
maybeSafely(maybeSafely)
@ -153,7 +155,7 @@ class StringifyContext
StringBuffer& sb;
const StringBuffer& gap;
RootedObject replacer;
Rooted<GCHashSet<JSObject*, MovableCellHasher<JSObject*>>> stack;
Rooted<ObjectSet> stack;
const AutoIdVector& propertyList;
uint32_t depth;
bool maybeSafely;
@ -311,7 +313,11 @@ class CycleDetector
js_object_str);
return false;
}
return stack.add(addPtr, obj_);
if (!stack.add(addPtr, obj_)) {
ReportOutOfMemory(cx);
return false;
}
return true;
}
~CycleDetector() {
@ -319,7 +325,7 @@ class CycleDetector
}
private:
MutableHandle<GCHashSet<JSObject*, MovableCellHasher<JSObject*>>> stack;
MutableHandle<ObjectSet> stack;
HandleObject obj_;
};

Просмотреть файл

@ -277,14 +277,20 @@ struct JSStructuredCloneWriter {
Value tVal)
: out(cx), objs(out.context()),
counts(out.context()), entries(out.context()),
memory(out.context(), CloneMemory(out.context())), callbacks(cb),
memory(out.context()), callbacks(cb),
closure(cbClosure), transferable(out.context(), tVal),
transferableObjects(out.context(), GCHashSet<JSObject*>(cx))
{}
~JSStructuredCloneWriter();
bool init() { return memory.init() && parseTransferable() && writeTransferMap(); }
bool init() {
if (!memory.init()) {
ReportOutOfMemory(context());
return false;
}
return parseTransferable() && writeTransferMap();
}
bool write(HandleValue v);
@ -339,10 +345,13 @@ struct JSStructuredCloneWriter {
// For SavedFrame: parent SavedFrame
AutoValueVector entries;
// The "memory" list described in the HTML5 internal structured cloning algorithm.
// memory is a superset of objs; items are never removed from Memory
// until a serialization operation is finished
using CloneMemory = GCHashMap<JSObject*, uint32_t, MovableCellHasher<JSObject*>>;
// The "memory" list described in the HTML5 internal structured cloning
// algorithm. memory is a superset of objs; items are never removed from
// Memory until a serialization operation is finished
using CloneMemory = GCHashMap<JSObject*,
uint32_t,
MovableCellHasher<JSObject*>,
SystemAllocPolicy>;
Rooted<CloneMemory> memory;
// The user defined callbacks that will be used for cloning.
@ -968,8 +977,10 @@ JSStructuredCloneWriter::startObject(HandleObject obj, bool* backref)
CloneMemory::AddPtr p = memory.lookupForAdd(obj);
if ((*backref = p.found()))
return out.writePair(SCTAG_BACK_REFERENCE_OBJECT, p->value());
if (!memory.add(p, obj, memory.count()))
if (!memory.add(p, obj, memory.count())) {
ReportOutOfMemory(context());
return false;
}
if (memory.count() == UINT32_MAX) {
JS_ReportErrorNumber(context(), GetErrorMessage, nullptr,
@ -1268,8 +1279,10 @@ JSStructuredCloneWriter::writeTransferMap()
for (auto tr = transferableObjects.all(); !tr.empty(); tr.popFront()) {
obj = tr.front();
if (!memory.put(obj, memory.count()))
if (!memory.put(obj, memory.count())) {
ReportOutOfMemory(context());
return false;
}
// Emit a placeholder pointer. We defer stealing the data until later
// (and, if necessary, detaching this object if it's an ArrayBuffer).