Bug 1324002 - Mark atoms separately in each zone, r=jonco,mccr8,peterv.

--HG--
extra : rebase_source : 53cf4fa94f122a991c2adbd7bae3714de4391bb4
This commit is contained in:
Brian Hackett 2017-01-30 06:31:47 -07:00
Родитель b398d33261
Коммит 8033ed29a8
54 изменённых файлов: 1007 добавлений и 154 удалений

Просмотреть файл

@ -812,15 +812,17 @@ MaybeWrapNonDOMObjectOrNullValue(JSContext* cx, JS::MutableHandle<JS::Value> rva
MOZ_ALWAYS_INLINE bool
MaybeWrapValue(JSContext* cx, JS::MutableHandle<JS::Value> rval)
{
if (rval.isString()) {
return MaybeWrapStringValue(cx, rval);
if (rval.isGCThing()) {
if (rval.isString()) {
return MaybeWrapStringValue(cx, rval);
}
if (rval.isObject()) {
return MaybeWrapObjectValue(cx, rval);
}
MOZ_ASSERT(rval.isSymbol());
JS_MarkCrossZoneId(cx, SYMBOL_TO_JSID(rval.toSymbol()));
}
if (!rval.isObject()) {
return true;
}
return MaybeWrapObjectValue(cx, rval);
return true;
}
namespace binding_detail {

Просмотреть файл

@ -10955,6 +10955,7 @@ class CGResolveOwnPropertyViaResolve(CGAbstractBindingMethod):
// to avoid re-resolving the properties if someone deletes
// them.
JSAutoCompartment ac(cx, obj);
JS_MarkCrossZoneId(cx, id);
JS::Rooted<JS::PropertyDescriptor> objDesc(cx);
if (!self->DoResolve(cx, obj, id, &objDesc)) {
return false;

Просмотреть файл

@ -722,6 +722,12 @@ GetProperty(JSContext *cx, JSObject *objArg, NPIdentifier npid, JS::MutableHandl
return ::JS_GetPropertyById(cx, obj, id, rval);
}
static void
MarkCrossZoneNPIdentifier(JSContext* cx, NPIdentifier npid)
{
JS_MarkCrossZoneId(cx, NPIdentifierToJSId(npid));
}
// static
bool
nsJSObjWrapper::NP_HasMethod(NPObject *npobj, NPIdentifier id)
@ -745,6 +751,7 @@ nsJSObjWrapper::NP_HasMethod(NPObject *npobj, NPIdentifier id)
nsJSObjWrapper *npjsobj = (nsJSObjWrapper *)npobj;
JSAutoCompartment ac(cx, npjsobj->mJSObj);
MarkCrossZoneNPIdentifier(cx, id);
AutoJSExceptionSuppressor suppressor(aes, npjsobj);
@ -784,6 +791,7 @@ doInvoke(NPObject *npobj, NPIdentifier method, const NPVariant *args,
JS::Rooted<JSObject*> jsobj(cx, npjsobj->mJSObj);
JSAutoCompartment ac(cx, jsobj);
MarkCrossZoneNPIdentifier(cx, method);
JS::Rooted<JS::Value> fv(cx);
AutoJSExceptionSuppressor suppressor(aes, npjsobj);
@ -876,6 +884,7 @@ nsJSObjWrapper::NP_HasProperty(NPObject *npobj, NPIdentifier npid)
AutoJSExceptionSuppressor suppressor(aes, npjsobj);
JS::Rooted<JSObject*> jsobj(cx, npjsobj->mJSObj);
JSAutoCompartment ac(cx, jsobj);
MarkCrossZoneNPIdentifier(cx, npid);
NS_ASSERTION(NPIdentifierIsInt(npid) || NPIdentifierIsString(npid),
"id must be either string or int!\n");
@ -912,6 +921,7 @@ nsJSObjWrapper::NP_GetProperty(NPObject *npobj, NPIdentifier id,
AutoJSExceptionSuppressor suppressor(aes, npjsobj);
JSAutoCompartment ac(cx, npjsobj->mJSObj);
MarkCrossZoneNPIdentifier(cx, id);
JS::Rooted<JS::Value> v(cx);
return (GetProperty(cx, npjsobj->mJSObj, id, &v) &&
@ -948,6 +958,7 @@ nsJSObjWrapper::NP_SetProperty(NPObject *npobj, NPIdentifier npid,
AutoJSExceptionSuppressor suppressor(aes, npjsobj);
JS::Rooted<JSObject*> jsObj(cx, npjsobj->mJSObj);
JSAutoCompartment ac(cx, jsObj);
MarkCrossZoneNPIdentifier(cx, npid);
JS::Rooted<JS::Value> v(cx, NPVariantToJSVal(npp, cx, value));
@ -985,6 +996,7 @@ nsJSObjWrapper::NP_RemoveProperty(NPObject *npobj, NPIdentifier npid)
JS::ObjectOpResult result;
JS::Rooted<JSObject*> obj(cx, npjsobj->mJSObj);
JSAutoCompartment ac(cx, obj);
MarkCrossZoneNPIdentifier(cx, npid);
NS_ASSERTION(NPIdentifierIsInt(npid) || NPIdentifierIsString(npid),
"id must be either string or int!\n");
@ -2313,6 +2325,7 @@ nsJSObjWrapper::HasOwnProperty(NPObject *npobj, NPIdentifier npid)
AutoJSExceptionSuppressor suppressor(aes, npjsobj);
JS::Rooted<JSObject*> jsobj(cx, npjsobj->mJSObj);
JSAutoCompartment ac(cx, jsobj);
MarkCrossZoneNPIdentifier(cx, npid);
NS_ASSERTION(NPIdentifierIsInt(npid) || NPIdentifierIsString(npid),
"id must be either string or int!\n");

Просмотреть файл

@ -507,6 +507,7 @@ struct RuntimeSizes
#define FOR_EACH_SIZE(macro) \
macro(_, MallocHeap, object) \
macro(_, MallocHeap, atomsTable) \
macro(_, MallocHeap, atomsMarkBitmaps) \
macro(_, MallocHeap, contexts) \
macro(_, MallocHeap, temporary) \
macro(_, MallocHeap, interpreterStack) \

Просмотреть файл

@ -2850,10 +2850,12 @@ js::intl_IsValidTimeZoneName(JSContext* cx, unsigned argc, Value* vp)
if (!sharedIntlData.validateTimeZoneName(cx, timeZone, &validatedTimeZone))
return false;
if (validatedTimeZone)
if (validatedTimeZone) {
cx->markAtom(validatedTimeZone);
args.rval().setString(validatedTimeZone);
else
} else {
args.rval().setNull();
}
return true;
}

Просмотреть файл

@ -345,6 +345,7 @@ regexp_compile_impl(JSContext* cx, const CallArgs& args)
sourceAtom = g->getSource();
flags = g->getFlags();
}
cx->markAtom(sourceAtom);
// Step 5, minus lastIndex zeroing.
regexp->initIgnoringLastIndex(sourceAtom, flags);

351
js/src/gc/AtomMarking.cpp Normal file
Просмотреть файл

@ -0,0 +1,351 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "gc/AtomMarking.h"
#include "jscompartment.h"
#include "jsgcinlines.h"
#include "gc/Heap-inl.h"
namespace js {
namespace gc {
// Atom Marking Overview
//
// Things in the atoms zone (which includes atomized strings and other things,
// all of which we will refer to as 'atoms' here) may be pointed to freely by
// things in other zones. To avoid the need to perform garbage collections of
// the entire runtime to collect atoms, we compute a separate atom mark bitmap
// for each zone that is always an overapproximation of the atoms that zone is
// using. When an atom is not in the mark bitmap for any zone, it can be
// destroyed.
//
// To minimize interference with the rest of the GC, atom marking and sweeping
// is done by manipulating the mark bitmaps in the chunks used for the atoms.
// When the atoms zone is being collected, the mark bitmaps for the chunk(s)
// used by the atoms are updated normally during marking. After marking
// finishes, the chunk mark bitmaps are translated to a more efficient atom
// mark bitmap (see below) that is stored on the zones which the GC collected
// (computeBitmapFromChunkMarkBits). Before sweeping begins, the chunk mark
// bitmaps are updated with any atoms that might be referenced by zones which
// weren't collected (updateChunkMarkBits). The GC sweeping will then release
// all atoms which are not marked by any zone.
//
// The representation of atom mark bitmaps is as follows:
//
// Each arena in the atoms zone has an atomBitmapStart() value indicating the
// word index into the bitmap of the first thing in the arena. Each arena uses
// ArenaBitmapWords of data to store its bitmap, which uses the same
// representation as chunk mark bitmaps: one bit is allocated per Cell, with
// bits for space between things being unused when things are larger than a
// single Cell.
static inline void
SetBit(uintptr_t* bitmap, size_t bit)
{
bitmap[bit / JS_BITS_PER_WORD] |= uintptr_t(1) << (bit % JS_BITS_PER_WORD);
}
static inline bool
GetBit(uintptr_t* bitmap, size_t bit)
{
return bitmap[bit / JS_BITS_PER_WORD] & (uintptr_t(1) << (bit % JS_BITS_PER_WORD));
}
static inline bool
EnsureBitmapLength(AtomMarkingRuntime::Bitmap& bitmap, size_t nwords)
{
if (nwords > bitmap.length()) {
size_t needed = nwords - bitmap.length();
if (needed)
return bitmap.appendN(0, needed);
}
return true;
}
void
AtomMarkingRuntime::registerArena(Arena* arena)
{
MOZ_ASSERT(arena->getThingSize() != 0);
MOZ_ASSERT(arena->getThingSize() % CellSize == 0);
MOZ_ASSERT(arena->zone->isAtomsZone());
MOZ_ASSERT(arena->zone->runtimeFromAnyThread()->currentThreadHasExclusiveAccess());
// We need to find a range of bits from the atoms bitmap for this arena.
// Look for a free range of bits compatible with this arena.
if (freeArenaIndexes.length()) {
arena->atomBitmapStart() = freeArenaIndexes.popCopy();
return;
}
// Allocate a range of bits from the end for this arena.
arena->atomBitmapStart() = allocatedWords;
allocatedWords += ArenaBitmapWords;
}
void
AtomMarkingRuntime::unregisterArena(Arena* arena)
{
MOZ_ASSERT(arena->zone->isAtomsZone());
// Leak these atom bits if we run out of memory.
mozilla::Unused << freeArenaIndexes.emplaceBack(arena->atomBitmapStart());
}
bool
AtomMarkingRuntime::computeBitmapFromChunkMarkBits(JSRuntime* runtime, Bitmap& bitmap)
{
MOZ_ASSERT(runtime->currentThreadHasExclusiveAccess());
MOZ_ASSERT(bitmap.empty());
if (!EnsureBitmapLength(bitmap, allocatedWords))
return false;
Zone* atomsZone = runtime->unsafeAtomsCompartment()->zone();
for (auto thingKind : AllAllocKinds()) {
for (ArenaIter aiter(atomsZone, thingKind); !aiter.done(); aiter.next()) {
Arena* arena = aiter.get();
uintptr_t* chunkWords = arena->chunk()->bitmap.arenaBits(arena);
uintptr_t* bitmapWords = &bitmap[arena->atomBitmapStart()];
mozilla::PodCopy(bitmapWords, chunkWords, ArenaBitmapWords);
}
}
return true;
}
void
AtomMarkingRuntime::updateZoneBitmap(Zone* zone, const Bitmap& bitmap)
{
if (zone->isAtomsZone())
return;
// |bitmap| was produced by computeBitmapFromChunkMarkBits, so it should
// have the maximum possible size.
MOZ_ASSERT(zone->markedAtoms.length() <= bitmap.length());
// Take the bitwise and between the two mark bitmaps to get the best new
// overapproximation we can. |bitmap| might include bits that are not in
// the zone's mark bitmap, if additional zones were collected by the GC.
for (size_t i = 0; i < zone->markedAtoms.length(); i++)
zone->markedAtoms[i] &= bitmap[i];
}
// Set any bits in the chunk mark bitmaps for atoms which are marked in bitmap.
static void
AddBitmapToChunkMarkBits(JSRuntime* runtime, AtomMarkingRuntime::Bitmap& bitmap)
{
// Make sure that by copying the mark bits for one arena in word sizes we
// do not affect the mark bits for other arenas.
static_assert(ArenaBitmapBits == ArenaBitmapWords * JS_BITS_PER_WORD,
"ArenaBitmapWords must evenly divide ArenaBitmapBits");
Zone* atomsZone = runtime->unsafeAtomsCompartment()->zone();
for (auto thingKind : AllAllocKinds()) {
for (ArenaIter aiter(atomsZone, thingKind); !aiter.done(); aiter.next()) {
Arena* arena = aiter.get();
uintptr_t* chunkWords = arena->chunk()->bitmap.arenaBits(arena);
// The bitmap might not be long enough, in which case remaining
// bits are implicitly zero.
if (bitmap.length() <= arena->atomBitmapStart())
continue;
MOZ_ASSERT(bitmap.length() >= arena->atomBitmapStart() + ArenaBitmapWords);
uintptr_t* bitmapWords = &bitmap[arena->atomBitmapStart()];
for (size_t i = 0; i < ArenaBitmapWords; i++)
chunkWords[i] |= bitmapWords[i];
}
}
}
void
AtomMarkingRuntime::updateChunkMarkBits(JSRuntime* runtime)
{
MOZ_ASSERT(runtime->currentThreadHasExclusiveAccess());
// Try to compute a simple union of the zone atom bitmaps before updating
// the chunk mark bitmaps. If this allocation fails then fall back to
// updating the chunk mark bitmaps separately for each zone.
Bitmap markedUnion;
if (EnsureBitmapLength(markedUnion, allocatedWords)) {
for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
// We only need to update the chunk mark bits for zones which were
// not collected in the current GC. Atoms which are referenced by
// collected zones have already been marked.
if (!zone->isCollectingFromAnyThread()) {
MOZ_ASSERT(zone->markedAtoms.length() <= allocatedWords);
for (size_t i = 0; i < zone->markedAtoms.length(); i++)
markedUnion[i] |= zone->markedAtoms[i];
}
}
AddBitmapToChunkMarkBits(runtime, markedUnion);
} else {
for (ZonesIter zone(runtime, SkipAtoms); !zone.done(); zone.next()) {
if (!zone->isCollectingFromAnyThread())
AddBitmapToChunkMarkBits(runtime, zone->markedAtoms);
}
}
}
static inline size_t
GetAtomBit(TenuredCell* thing)
{
MOZ_ASSERT(thing->zoneFromAnyThread()->isAtomsZone());
Arena* arena = thing->arena();
size_t arenaBit = (reinterpret_cast<uintptr_t>(thing) - arena->address()) / CellSize;
return arena->atomBitmapStart() * JS_BITS_PER_WORD + arenaBit;
}
static bool
ThingIsPermanent(TenuredCell* thing)
{
JS::TraceKind kind = thing->getTraceKind();
if (kind == JS::TraceKind::String && static_cast<JSString*>(thing)->isPermanentAtom())
return true;
if (kind == JS::TraceKind::Symbol && static_cast<JS::Symbol*>(thing)->isWellKnownSymbol())
return true;
return false;
}
void
AtomMarkingRuntime::markAtom(ExclusiveContext* cx, TenuredCell* thing)
{
// The context's zone will be null during initialization of the runtime.
if (!thing || !cx->zone())
return;
MOZ_ASSERT(!cx->zone()->isAtomsZone());
if (ThingIsPermanent(thing) || !thing->zoneFromAnyThread()->isAtomsZone())
return;
size_t bit = GetAtomBit(thing);
{
AutoEnterOOMUnsafeRegion oomUnsafe;
if (!EnsureBitmapLength(cx->zone()->markedAtoms, allocatedWords))
oomUnsafe.crash("Atom bitmap OOM");
}
SetBit(cx->zone()->markedAtoms.begin(), bit);
if (cx->isJSContext()) {
// Trigger a read barrier on the atom, in case there is an incremental
// GC in progress. This is necessary if the atom is being marked
// because a reference to it was obtained from another zone which is
// not being collected by the incremental GC.
TenuredCell::readBarrier(thing);
}
}
void
AtomMarkingRuntime::markId(ExclusiveContext* cx, jsid id)
{
if (JSID_IS_GCTHING(id))
markAtom(cx, &JSID_TO_GCTHING(id).asCell()->asTenured());
}
void
AtomMarkingRuntime::markAtomValue(ExclusiveContext* cx, const Value& value)
{
if (value.isGCThing()) {
Cell* thing = value.toGCThing();
if (thing && !IsInsideNursery(thing))
markAtom(cx, &thing->asTenured());
}
}
void
AtomMarkingRuntime::adoptMarkedAtoms(Zone* target, Zone* source)
{
MOZ_ASSERT(target->runtimeFromAnyThread()->currentThreadHasExclusiveAccess());
Bitmap* targetBitmap = &target->markedAtoms;
Bitmap* sourceBitmap = &source->markedAtoms;
if (targetBitmap->length() < sourceBitmap->length())
std::swap(targetBitmap, sourceBitmap);
for (size_t i = 0; i < sourceBitmap->length(); i++)
(*targetBitmap)[i] |= (*sourceBitmap)[i];
if (targetBitmap != &target->markedAtoms)
target->markedAtoms = Move(source->markedAtoms);
else
source->markedAtoms.clear();
}
#ifdef DEBUG
bool
AtomMarkingRuntime::atomIsMarked(Zone* zone, Cell* thingArg)
{
if (!thingArg || IsInsideNursery(thingArg))
return true;
TenuredCell* thing = &thingArg->asTenured();
if (!zone->runtimeFromAnyThread()->permanentAtoms)
return true;
if (ThingIsPermanent(thing) || !thing->zoneFromAnyThread()->isAtomsZone())
return true;
JS::TraceKind kind = thing->getTraceKind();
if (kind == JS::TraceKind::String) {
JSAtom* atom = static_cast<JSAtom*>(thing);
if (AtomIsPinnedInRuntime(zone->runtimeFromAnyThread(), atom))
return true;
}
size_t bit = GetAtomBit(thing);
if (bit >= zone->markedAtoms.length() * JS_BITS_PER_WORD)
return false;
return GetBit(zone->markedAtoms.begin(), bit);
}
bool
AtomMarkingRuntime::idIsMarked(Zone* zone, jsid id)
{
if (JSID_IS_GCTHING(id))
return atomIsMarked(zone, JSID_TO_GCTHING(id).asCell());
return true;
}
bool
AtomMarkingRuntime::valueIsMarked(Zone* zone, const Value& value)
{
if (value.isGCThing())
return atomIsMarked(zone, value.toGCThing());
return true;
}
#endif // DEBUG
} // namespace gc
#ifdef DEBUG
bool
AtomIsMarked(Zone* zone, JSAtom* atom)
{
return zone->runtimeFromAnyThread()->gc.atomMarking.atomIsMarked(zone, atom);
}
bool
AtomIsMarked(Zone* zone, jsid id)
{
return zone->runtimeFromAnyThread()->gc.atomMarking.idIsMarked(zone, id);
}
bool
AtomIsMarked(Zone* zone, const Value& value)
{
return zone->runtimeFromAnyThread()->gc.atomMarking.valueIsMarked(zone, value);
}
#endif // DEBUG
} // namespace js

72
js/src/gc/AtomMarking.h Normal file
Просмотреть файл

@ -0,0 +1,72 @@
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
* vim: set ts=8 sts=4 et sw=4 tw=99:
* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#ifndef gc_AtomMarking_h
#define gc_AtomMarking_h
#include "NamespaceImports.h"
#include "gc/Heap.h"
namespace js {
namespace gc {
// This class manages state used for marking atoms during GCs.
// See AtomMarking.cpp for details.
class AtomMarkingRuntime
{
// Unused arena atom bitmap indexes. Protected by the GC lock.
Vector<size_t, 0, SystemAllocPolicy> freeArenaIndexes;
// The extent of all allocated and free words in atom mark bitmaps.
// This monotonically increases and may be read from without locking.
mozilla::Atomic<size_t> allocatedWords;
public:
typedef Vector<uintptr_t, 0, SystemAllocPolicy> Bitmap;
AtomMarkingRuntime()
: allocatedWords(0)
{}
// Mark an arena as holding things in the atoms zone.
void registerArena(Arena* arena);
// Mark an arena as no longer holding things in the atoms zone.
void unregisterArena(Arena* arena);
// Fill |bitmap| with an atom marking bitmap based on the things that are
// currently marked in the chunks used by atoms zone arenas. This returns
// false on an allocation failure (but does not report an exception).
bool computeBitmapFromChunkMarkBits(JSRuntime* runtime, Bitmap& bitmap);
// Update the atom marking bitmap in |zone| according to another
// overapproximation of the reachable atoms in |bitmap|.
void updateZoneBitmap(Zone* zone, const Bitmap& bitmap);
// Set any bits in the chunk mark bitmaps for atoms which are marked in any
// zone in the runtime.
void updateChunkMarkBits(JSRuntime* runtime);
// Mark an atom or id as being newly reachable by the context's zone.
void markAtom(ExclusiveContext* cx, TenuredCell* thing);
void markId(ExclusiveContext* cx, jsid id);
void markAtomValue(ExclusiveContext* cx, const Value& value);
// Mark all atoms in |source| as being reachable within |target|.
void adoptMarkedAtoms(Zone* target, Zone* source);
#ifdef DEBUG
// Return whether |thing/id| is in the atom marking bitmap for |zone|.
bool atomIsMarked(Zone* zone, Cell* thing);
bool idIsMarked(Zone* zone, jsid id);
bool valueIsMarked(Zone* zone, const Value& value);
#endif
};
} // namespace gc
} // namespace js
#endif // gc_AtomMarking_h

Просмотреть файл

@ -13,6 +13,7 @@
#include "jsfriendapi.h"
#include "jsgc.h"
#include "gc/AtomMarking.h"
#include "gc/Heap.h"
#include "gc/Nursery.h"
#include "gc/Statistics.h"
@ -1040,6 +1041,10 @@ class GCRuntime
MemProfiler mMemProfiler;
// State used for managing atom mark bitmaps in each zone. Protected by the
// exclusive access lock.
AtomMarkingRuntime atomMarking;
private:
// When empty, chunks reside in the emptyChunks pool and are re-used as
// needed or eventually expired if not re-used. The emptyChunks pool gets

Просмотреть файл

@ -7,7 +7,10 @@
#ifndef gc_Heap_inl_h
#define gc_Heap_inl_h
#include "gc/Heap.h"
#include "gc/StoreBuffer.h"
#include "gc/Zone.h"
inline void
js::gc::Arena::init(JS::Zone* zoneArg, AllocKind kind)
@ -23,7 +26,32 @@ js::gc::Arena::init(JS::Zone* zoneArg, AllocKind kind)
zone = zoneArg;
allocKind = size_t(kind);
setAsFullyUnused();
bufferedCells = &ArenaCellSet::Empty;
if (zone->isAtomsZone())
zone->runtimeFromAnyThread()->gc.atomMarking.registerArena(this);
else
bufferedCells() = &ArenaCellSet::Empty;
}
inline void
js::gc::Arena::release()
{
if (zone->isAtomsZone())
zone->runtimeFromAnyThread()->gc.atomMarking.unregisterArena(this);
setAsNotAllocated();
}
inline js::gc::ArenaCellSet*&
js::gc::Arena::bufferedCells()
{
MOZ_ASSERT(zone && !zone->isAtomsZone());
return bufferedCells_;
}
inline size_t&
js::gc::Arena::atomBitmapStart()
{
MOZ_ASSERT(zone && zone->isAtomsZone());
return atomBitmapStart_;
}
#endif

Просмотреть файл

@ -532,11 +532,24 @@ class Arena
"Arena::auxNextLink packing assumes that ArenaShift has "
"enough bits to cover allocKind and hasDelayedMarking.");
/*
* If non-null, points to an ArenaCellSet that represents the set of cells
* in this arena that are in the nursery's store buffer.
*/
ArenaCellSet* bufferedCells;
private:
union {
/*
* For arenas in zones other than the atoms zone, if non-null, points
* to an ArenaCellSet that represents the set of cells in this arena
* that are in the nursery's store buffer.
*/
ArenaCellSet* bufferedCells_;
/*
* For arenas in the atoms zone, the starting index into zone atom
* marking bitmaps (see AtomMarking.h) of the things in this zone.
* Atoms never refer to nursery things, so no store buffer index is
* needed.
*/
size_t atomBitmapStart_;
};
public:
/*
* The size of data should be |ArenaSize - offsetof(data)|, but the offset
@ -558,6 +571,8 @@ class Arena
last->initAsEmpty();
}
// Initialize an arena to its unallocated state. For arenas that were
// previously allocated for some zone, use release() instead.
void setAsNotAllocated() {
firstFreeSpan.initAsEmpty();
zone = nullptr;
@ -566,9 +581,12 @@ class Arena
allocatedDuringIncremental = 0;
markOverflow = 0;
auxNextLink = 0;
bufferedCells = nullptr;
bufferedCells_ = nullptr;
}
// Return an allocated arena to its unallocated state.
inline void release();
uintptr_t address() const {
checkAddress();
return uintptr_t(this);
@ -600,8 +618,9 @@ class Arena
size_t getThingSize() const { return thingSize(getAllocKind()); }
size_t getThingsPerArena() const { return thingsPerArena(getAllocKind()); }
size_t getThingsSpan() const { return getThingsPerArena() * getThingSize(); }
size_t getFirstThingOffset() const { return firstThingOffset(getAllocKind()); }
uintptr_t thingsStart() const { return address() + firstThingOffset(getAllocKind()); }
uintptr_t thingsStart() const { return address() + getFirstThingOffset(); }
uintptr_t thingsEnd() const { return address() + ArenaSize; }
bool isEmpty() const {
@ -685,16 +704,15 @@ class Arena
auxNextLink = 0;
}
inline ArenaCellSet*& bufferedCells();
inline size_t& atomBitmapStart();
template <typename T>
size_t finalize(FreeOp* fop, AllocKind thingKind, size_t thingSize);
static void staticAsserts();
void unmarkAll();
static size_t offsetOfBufferedCells() {
return offsetof(Arena, bufferedCells);
}
};
static_assert(ArenaZoneOffset == offsetof(Arena, zone),

Просмотреть файл

@ -928,6 +928,13 @@ CheckTraversedEdge(S source, T* target)
MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(target),
target->zone()->isAtomsZone() || target->zone() == source->zone());
// If we are marking an atom, that atom must be marked in the source zone's
// atom bitmap.
MOZ_ASSERT_IF(!ThingIsPermanentAtomOrWellKnownSymbol(target) &&
target->zone()->isAtomsZone() && !source->zone()->isAtomsZone(),
target->runtimeFromAnyThread()->gc.atomMarking
.atomIsMarked(source->zone(), reinterpret_cast<TenuredCell*>(target)));
// Atoms and Symbols do not have access to a compartment pointer, or we'd need
// to adjust the subsequent check to catch that case.
MOZ_ASSERT_IF(ThingIsPermanentAtomOrWellKnownSymbol(target), !target->maybeCompartment());
@ -2354,8 +2361,8 @@ js::gc::StoreBuffer::traceWholeCells(TenuringTracer& mover)
for (ArenaCellSet* cells = bufferWholeCell; cells; cells = cells->next) {
Arena* arena = cells->arena;
MOZ_ASSERT(arena->bufferedCells == cells);
arena->bufferedCells = &ArenaCellSet::Empty;
MOZ_ASSERT(arena->bufferedCells() == cells);
arena->bufferedCells() = &ArenaCellSet::Empty;
JS::TraceKind kind = MapAllocToTraceKind(arena->getAllocKind());
switch (kind) {

Просмотреть файл

@ -159,7 +159,6 @@ static const PhaseInfo phases[] = {
{ PHASE_WEAK_ZONEGROUP_CALLBACK, "Per-Slice Weak Callback", PHASE_FINALIZE_START, 57 },
{ PHASE_WEAK_COMPARTMENT_CALLBACK, "Per-Compartment Weak Callback", PHASE_FINALIZE_START, 58 },
{ PHASE_SWEEP_ATOMS, "Sweep Atoms", PHASE_SWEEP, 18 },
{ PHASE_SWEEP_SYMBOL_REGISTRY, "Sweep Symbol Registry", PHASE_SWEEP, 19 },
{ PHASE_SWEEP_COMPARTMENTS, "Sweep Compartments", PHASE_SWEEP, 20 },
{ PHASE_SWEEP_DISCARD_CODE, "Sweep Discard Code", PHASE_SWEEP_COMPARTMENTS, 21 },
{ PHASE_SWEEP_INNER_VIEWS, "Sweep Inner Views", PHASE_SWEEP_COMPARTMENTS, 22 },

Просмотреть файл

@ -49,7 +49,6 @@ enum Phase : uint8_t {
PHASE_WEAK_ZONEGROUP_CALLBACK,
PHASE_WEAK_COMPARTMENT_CALLBACK,
PHASE_SWEEP_ATOMS,
PHASE_SWEEP_SYMBOL_REGISTRY,
PHASE_SWEEP_COMPARTMENTS,
PHASE_SWEEP_DISCARD_CODE,
PHASE_SWEEP_INNER_VIEWS,

Просмотреть файл

@ -11,6 +11,8 @@
#include "gc/Heap.h"
#include "gc/Heap-inl.h"
namespace js {
namespace gc {
@ -48,7 +50,7 @@ ArenaCellSet::check() const
bool bitsZero = bits.isAllClear();
MOZ_ASSERT(isEmpty() == bitsZero);
MOZ_ASSERT(isEmpty() == !arena);
MOZ_ASSERT_IF(!isEmpty(), arena->bufferedCells == this);
MOZ_ASSERT_IF(!isEmpty(), arena->bufferedCells() == this);
#endif
}
@ -58,7 +60,7 @@ StoreBuffer::putWholeCell(Cell* cell)
MOZ_ASSERT(cell->isTenured());
Arena* arena = cell->asTenured().arena();
ArenaCellSet* cells = arena->bufferedCells;
ArenaCellSet* cells = arena->bufferedCells();
if (cells->isEmpty()) {
cells = AllocateWholeCellSet(arena);
if (!cells)

Просмотреть файл

@ -80,7 +80,7 @@ StoreBuffer::clear()
bufferGeneric.clear();
for (ArenaCellSet* set = bufferWholeCell; set; set = set->next)
set->arena->bufferedCells = nullptr;
set->arena->bufferedCells() = nullptr;
bufferWholeCell = nullptr;
}
@ -143,7 +143,7 @@ js::gc::AllocateWholeCellSet(Arena* arena)
auto cells = static_cast<ArenaCellSet*>(data);
new (cells) ArenaCellSet(arena);
arena->bufferedCells = cells;
arena->bufferedCells() = cells;
rt->gc.storeBuffer.addToWholeCellBuffer(cells);
return cells;
}

Просмотреть файл

@ -151,7 +151,8 @@ struct Zone : public JS::shadow::Zone,
size_t* typePool,
size_t* baselineStubsOptimized,
size_t* uniqueIdMap,
size_t* shapeTables);
size_t* shapeTables,
size_t* atomsMarkBitmaps);
void resetGCMallocBytes();
void setGCMaxMallocBytes(size_t value);
@ -381,6 +382,9 @@ struct Zone : public JS::shadow::Zone,
// types.
mozilla::Atomic<uint32_t, mozilla::ReleaseAcquire> gcMallocGCTriggered;
// Bitmap of atoms marked by this zone.
js::gc::AtomMarkingRuntime::Bitmap markedAtoms;
// Track heap usage under this Zone.
js::gc::HeapUsage usage;

Просмотреть файл

@ -3560,7 +3560,7 @@ EmitStoreBufferCheckForConstant(MacroAssembler& masm, JSObject* object,
gc::Arena* arena = cell->arena();
Register cells = temp;
masm.loadPtr(AbsoluteAddress(&arena->bufferedCells), cells);
masm.loadPtr(AbsoluteAddress(&arena->bufferedCells()), cells);
size_t index = gc::ArenaCellSet::getCellIndex(cell);
size_t word;

Просмотреть файл

@ -783,6 +783,18 @@ JS_GetCompartmentPrivate(JSCompartment* compartment)
return compartment->data;
}
JS_PUBLIC_API(void)
JS_MarkCrossZoneId(JSContext* cx, jsid id)
{
cx->markId(id);
}
JS_PUBLIC_API(void)
JS_MarkCrossZoneIdValue(JSContext* cx, const Value& value)
{
cx->markAtomValue(value);
}
JS_PUBLIC_API(JSAddonId*)
JS::NewAddonId(JSContext* cx, HandleString str)
{
@ -1142,6 +1154,7 @@ JS_IdToProtoKey(JSContext* cx, HandleId id)
{
AssertHeapIsIdle(cx);
CHECK_REQUEST(cx);
assertSameCompartment(cx, id);
if (!JSID_IS_ATOM(id))
return JSProto_Null;
@ -1569,6 +1582,7 @@ JS_IdToValue(JSContext* cx, jsid id, MutableHandleValue vp)
{
AssertHeapIsIdle(cx);
CHECK_REQUEST(cx);
assertSameCompartment(cx, id);
vp.set(IdToValue(id));
assertSameCompartment(cx, vp);
return true;
@ -2020,7 +2034,7 @@ JS_PUBLIC_API(bool)
JS_GetPropertyDescriptorById(JSContext* cx, HandleObject obj, HandleId id,
MutableHandle<PropertyDescriptor> desc)
{
assertSameCompartment(cx, obj);
assertSameCompartment(cx, obj, id);
return GetPropertyDescriptor(cx, obj, id, desc);
}
@ -3405,6 +3419,7 @@ JS::GetSelfHostedFunction(JSContext* cx, const char* selfHostedName, HandleId id
MOZ_ASSERT(!cx->runtime()->isAtomsCompartment(cx->compartment()));
AssertHeapIsIdle(cx);
CHECK_REQUEST(cx);
assertSameCompartment(cx, id);
RootedAtom name(cx, IdToFunctionName(cx, id));
if (!name)
@ -3423,6 +3438,8 @@ JS::GetSelfHostedFunction(JSContext* cx, const char* selfHostedName, HandleId id
JS_PUBLIC_API(JSFunction*)
JS::NewFunctionFromSpec(JSContext* cx, const JSFunctionSpec* fs, HandleId id)
{
assertSameCompartment(cx, id);
// Delay cloning self-hosted functions until they are called. This is
// achieved by passing DefineFunction a nullptr JSNative which produces an
// interpreted JSFunction where !hasScript. Interpreted call paths then
@ -3709,7 +3726,7 @@ JS_DefineFunctionById(JSContext* cx, HandleObject obj, HandleId id, JSNative cal
MOZ_ASSERT(!cx->runtime()->isAtomsCompartment(cx->compartment()));
AssertHeapIsIdle(cx);
CHECK_REQUEST(cx);
assertSameCompartment(cx, obj);
assertSameCompartment(cx, obj, id);
return DefineFunction(cx, obj, id, call, nargs, attrs);
}

Просмотреть файл

@ -1424,6 +1424,22 @@ extern JS_PUBLIC_API(void)
JS_IterateCompartments(JSContext* cx, void* data,
JSIterateCompartmentCallback compartmentCallback);
/**
* Mark a jsid after entering a new compartment. Different zones separately
* mark the ids in a runtime, and this must be used any time an id is obtained
* from one compartment and then used in another compartment, unless the two
* compartments are guaranteed to be in the same zone.
*/
extern JS_PUBLIC_API(void)
JS_MarkCrossZoneId(JSContext* cx, jsid id);
/**
* If value stores a jsid (an atomized string or symbol), mark that id as for
* JS_MarkCrossZoneId.
*/
extern JS_PUBLIC_API(void)
JS_MarkCrossZoneIdValue(JSContext* cx, const JS::Value& value);
/**
* Initialize standard JS class constructors, prototypes, and any top-level
* functions and constants associated with the standard classes (e.g. isNaN

Просмотреть файл

@ -307,6 +307,25 @@ AtomIsPinned(JSContext* cx, JSAtom* atom)
return p->isPinned();
}
#ifdef DEBUG
bool
AtomIsPinnedInRuntime(JSRuntime* rt, JSAtom* atom)
{
Maybe<AutoLockForExclusiveAccess> lock;
if (!rt->currentThreadHasExclusiveAccess())
lock.emplace(rt);
AtomHasher::Lookup lookup(atom);
AtomSet::Ptr p = rt->unsafeAtoms().lookup(lookup);
MOZ_ASSERT(p);
return p->isPinned();
}
#endif // DEBUG
/* |tbchars| must not point into an inline or short string. */
template <typename CharT>
MOZ_ALWAYS_INLINE
@ -336,31 +355,36 @@ AtomizeAndCopyChars(ExclusiveContext* cx, const CharT* tbchars, size_t length, P
if (p) {
JSAtom* atom = p->asPtr(cx);
p->setPinned(bool(pin));
cx->markAtom(atom);
return atom;
}
AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
JSAtom* atom;
{
AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
JSFlatString* flat = NewStringCopyN<NoGC>(cx, tbchars, length);
if (!flat) {
// Grudgingly forgo last-ditch GC. The alternative would be to release
// the lock, manually GC here, and retry from the top. If you fix this,
// please also fix or comment the similar case in Symbol::new_.
ReportOutOfMemory(cx);
return nullptr;
}
JSAtom* atom = flat->morphAtomizedStringIntoAtom(lookup.hash);
MOZ_ASSERT(atom->hash() == lookup.hash);
// We have held the lock since looking up p, and the operations we've done
// since then can't GC; therefore the atoms table has not been modified and
// p is still valid.
if (!atoms.add(p, AtomStateEntry(atom, bool(pin)))) {
ReportOutOfMemory(cx); /* SystemAllocPolicy does not report OOM. */
return nullptr;
JSFlatString* flat = NewStringCopyN<NoGC>(cx, tbchars, length);
if (!flat) {
// Grudgingly forgo last-ditch GC. The alternative would be to release
// the lock, manually GC here, and retry from the top. If you fix this,
// please also fix or comment the similar case in Symbol::new_.
ReportOutOfMemory(cx);
return nullptr;
}
atom = flat->morphAtomizedStringIntoAtom(lookup.hash);
MOZ_ASSERT(atom->hash() == lookup.hash);
// We have held the lock since looking up p, and the operations we've done
// since then can't GC; therefore the atoms table has not been modified and
// p is still valid.
if (!atoms.add(p, AtomStateEntry(atom, bool(pin)))) {
ReportOutOfMemory(cx); /* SystemAllocPolicy does not report OOM. */
return nullptr;
}
}
cx->markAtom(atom);
return atom;
}

Просмотреть файл

@ -132,6 +132,14 @@ class PropertyName;
extern bool
AtomIsPinned(JSContext* cx, JSAtom* atom);
#ifdef DEBUG
// This may be called either with or without the atoms lock held.
extern bool
AtomIsPinnedInRuntime(JSRuntime* rt, JSAtom* atom);
#endif // DEBUG
/* Well-known predefined C strings. */
#define DECLARE_PROTO_STR(name,code,init,clasp) extern const char js_##name##_str[];
JS_FOR_EACH_PROTOTYPE(DECLARE_PROTO_STR)
@ -235,6 +243,14 @@ template<XDRMode mode>
bool
XDRAtom(XDRState<mode>* xdr, js::MutableHandleAtom atomp);
#ifdef DEBUG
bool AtomIsMarked(Zone* zone, JSAtom* atom);
bool AtomIsMarked(Zone* zone, jsid id);
bool AtomIsMarked(Zone* zone, const Value& value);
#endif // DEBUG
} /* namespace js */
#endif /* jsatom_h */

Просмотреть файл

@ -312,6 +312,20 @@ class ExclusiveContext : public ContextFriendFields,
return runtime_->scriptDataTable(lock);
}
// Methods to access other runtime data that checks locking internally.
gc::AtomMarkingRuntime& atomMarking() {
return runtime_->gc.atomMarking;
}
void markAtom(gc::TenuredCell* atom) {
atomMarking().markAtom(this, atom);
}
void markId(jsid id) {
atomMarking().markId(this, id);
}
void markAtomValue(const Value& value) {
atomMarking().markAtomValue(this, value);
}
// Methods specific to any HelperThread for the context.
bool addPendingCompileError(frontend::CompileError** err);
void addPendingOverRecursed();
@ -369,6 +383,7 @@ struct JSContext : public js::ExclusiveContext,
using ExclusiveContext::staticStrings;
using ExclusiveContext::updateMallocCounter;
using ExclusiveContext::wellKnownSymbols;
using ExclusiveContext::atomMarking;
JSRuntime* runtime() { return this; }
js::PerThreadData& mainThread() { return this->JSRuntime::mainThread; }

Просмотреть файл

@ -83,10 +83,28 @@ class CompartmentChecker
check(handle.get());
}
void checkAtom(gc::Cell* cell) {
#ifdef DEBUG
// Atoms which move across zone boundaries need to be marked in the new
// zone, see JS_MarkCrossZoneId.
if (compartment) {
JSRuntime* rt = compartment->runtimeFromAnyThread();
MOZ_ASSERT(rt->gc.atomMarking.atomIsMarked(compartment->zone(), cell));
}
#endif
}
void check(JSString* str) {
MOZ_ASSERT(!str->isMarked(gc::GRAY));
if (!str->isAtom())
if (str->isAtom()) {
checkAtom(str);
} else {
checkZone(str->zone());
}
}
void check(JS::Symbol* symbol) {
checkAtom(symbol);
}
void check(const js::Value& v) {
@ -94,6 +112,8 @@ class CompartmentChecker
check(&v.toObject());
else if (v.isString())
check(v.toString());
else if (v.isSymbol())
check(v.toSymbol());
}
void check(const ValueArray& arr) {
@ -116,7 +136,10 @@ class CompartmentChecker
check(*p);
}
void check(jsid id) {}
void check(jsid id) {
if (JSID_IS_GCTHING(id))
checkAtom(JSID_TO_GCTHING(id).asCell());
}
void check(JSScript* script) {
MOZ_ASSERT_IF(script, !script->isMarked(gc::GRAY));

Просмотреть файл

@ -322,9 +322,12 @@ JSCompartment::wrap(JSContext* cx, MutableHandleString strp)
if (str->zoneFromAnyThread() == zone())
return true;
/* If the string is an atom, we don't have to copy. */
/*
* If the string is an atom, we don't have to copy, but we do need to mark
* the atom as being in use by the new zone.
*/
if (str->isAtom()) {
MOZ_ASSERT(str->isPermanentAtom() || str->zone()->isAtomsZone());
cx->markAtom(str);
return true;
}

Просмотреть файл

@ -66,10 +66,13 @@ JSCompartment::wrap(JSContext* cx, JS::MutableHandleValue vp)
/*
* Symbols are GC things, but never need to be wrapped or copied because
* they are always allocated in the atoms compartment.
* they are always allocated in the atoms compartment. They still need to
* be marked in the new compartment's zone, however.
*/
if (vp.isSymbol())
if (vp.isSymbol()) {
cx->markAtomValue(vp);
return true;
}
/* Handle strings. */
if (vp.isString()) {

Просмотреть файл

@ -479,6 +479,7 @@ js::NewFunctionByIdWithReserved(JSContext* cx, JSNative native, unsigned nargs,
MOZ_ASSERT(JSID_IS_STRING(id));
MOZ_ASSERT(!cx->runtime()->isAtomsCompartment(cx->compartment()));
CHECK_REQUEST(cx);
assertSameCompartment(cx, id);
RootedAtom atom(cx, JSID_TO_ATOM(id));
return (flags & JSFUN_CONSTRUCTOR) ?

Просмотреть файл

@ -14,12 +14,6 @@
* The collector can collect all zones at once, or a subset. These types of
* collection are referred to as a full GC and a zone GC respectively.
*
* The atoms zone is only collected in a full GC since objects in any zone may
* have pointers to atoms, and these are not recorded in the cross compartment
* pointer map. Also, the atoms zone is not collected if any thread has an
* AutoKeepAtoms instance on the stack, or there are any exclusive threads using
* the runtime.
*
* It is possible for an incremental collection that started out as a full GC to
* become a zone GC if new zones are created during the course of the
* collection.
@ -179,6 +173,16 @@
* - Arenas are selected for compaction.
* - The contents of those arenas are moved to new arenas.
* - All references to moved things are updated.
*
* Collecting Atoms
* ----------------
*
* Atoms are collected differently from other GC things. They are contained in
* a special zone and things in other zones may have pointers to them that are
* not recorded in the cross compartment pointer map. Each zone holds a bitmap
* with the atoms it might be keeping alive, and atoms are only collected if
* they are not included in any zone's atom bitmap. See AtomMarking.cpp for how
* this bitmap is managed.
*/
#include "jsgcinlines.h"
@ -238,6 +242,7 @@
#include "jsobjinlines.h"
#include "jsscriptinlines.h"
#include "gc/Heap-inl.h"
#include "vm/Stack-inl.h"
#include "vm/String-inl.h"
@ -722,7 +727,7 @@ Chunk::releaseArena(JSRuntime* rt, Arena* arena, const AutoLockGC& lock)
MOZ_ASSERT(arena->allocated());
MOZ_ASSERT(!arena->hasDelayedMarking);
arena->setAsNotAllocated();
arena->release();
addArenaToFreeList(rt, arena);
updateChunkListAfterFree(rt, lock);
}
@ -1940,7 +1945,7 @@ RelocateArena(Arena* arena, SliceBudget& sliceBudget)
MOZ_ASSERT(!arena->hasDelayedMarking);
MOZ_ASSERT(!arena->markOverflow);
MOZ_ASSERT(!arena->allocatedDuringIncremental);
MOZ_ASSERT(arena->bufferedCells->isEmpty());
MOZ_ASSERT(arena->bufferedCells()->isEmpty());
Zone* zone = arena->zone;
@ -3769,11 +3774,6 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces
}
/*
* Atoms are not in the cross-compartment map. If there are any zones that
* are not being collected then we cannot collect the atoms zone, otherwise
* the non-collected zones could contain pointers to atoms that we would
* miss.
*
* If keepAtoms() is true then either an instance of AutoKeepAtoms is
* currently on the stack or parsing is currently happening on another
* thread. In either case we don't have information about which atoms are
@ -3785,8 +3785,12 @@ GCRuntime::beginMarkPhase(JS::gcreason::Reason reason, AutoLockForExclusiveAcces
* Off-main-thread parsing is inhibited after the start of GC which prevents
* races between creating atoms during parsing and sweeping atoms on the
* main thread.
*
* Otherwise, we always schedule a GC in the atoms zone so that atoms which
* the other collected zones are using are marked, and we can update the
* set of atoms in use by the other collected zones at the end of the GC.
*/
if (isFull && !rt->keepAtoms()) {
if (!rt->keepAtoms()) {
Zone* atomsZone = rt->atomsCompartment(lock)->zone();
if (atomsZone->isGCScheduled()) {
MOZ_ASSERT(!atomsZone->isCollecting());
@ -4845,7 +4849,19 @@ MAKE_GC_SWEEP_TASK(SweepMiscTask);
/* virtual */ void
SweepAtomsTask::run()
{
AtomMarkingRuntime::Bitmap marked;
if (runtime->gc.atomMarking.computeBitmapFromChunkMarkBits(runtime, marked)) {
for (GCZonesIter zone(runtime); !zone.done(); zone.next())
runtime->gc.atomMarking.updateZoneBitmap(zone, marked);
} else {
// Ignore OOM in computeBitmapFromChunkMarkBits. The updateZoneBitmap
// call can only remove atoms from the zone bitmap, so it is
// conservative to just not call it.
}
runtime->gc.atomMarking.updateChunkMarkBits(runtime);
runtime->sweepAtoms();
runtime->unsafeSymbolRegistry().sweep();
for (CompartmentsIter comp(runtime, SkipAtoms); !comp.done(); comp.next())
comp->sweepVarNames();
}
@ -5072,11 +5088,6 @@ GCRuntime::beginSweepingZoneGroup(AutoLockForExclusiveAccess& lock)
}
}
if (sweepingAtoms) {
gcstats::AutoPhase ap(stats, gcstats::PHASE_SWEEP_SYMBOL_REGISTRY);
rt->symbolRegistry(lock).sweep();
}
// Rejoin our off-main-thread tasks.
if (sweepingAtoms) {
AutoLockHelperThreadState helperLock;
@ -6718,6 +6729,9 @@ gc::MergeCompartments(JSCompartment* source, JSCompartment* target)
// Merge other info in source's zone into target's zone.
target->zone()->types.typeLifoAlloc.transferFrom(&source->zone()->types.typeLifoAlloc);
// Atoms which are marked in source's zone are now marked in target's zone.
cx->atomMarking().adoptMarkedAtoms(target->zone(), source->zone());
}
void

Просмотреть файл

@ -1053,6 +1053,7 @@ JS_CopyPropertyFrom(JSContext* cx, HandleId id, HandleObject target,
}
JSAutoCompartment ac(cx, target);
cx->markId(id);
RootedId wrappedId(cx, id);
if (!cx->compartment()->wrap(cx, &desc))
return false;
@ -1214,6 +1215,8 @@ DeepCloneValue(JSContext* cx, Value* vp, NewObjectKind newKind)
if (!obj)
return false;
vp->setObject(*obj);
} else {
cx->markAtomValue(*vp);
}
return true;
}
@ -1252,6 +1255,7 @@ js::DeepCloneObjectLiteral(JSContext* cx, HandleObject obj, NewObjectKind newKin
return nullptr;
for (size_t i = 0; i < properties.length(); i++) {
cx->markId(properties[i].get().id);
if (!DeepCloneValue(cx, &properties[i].get().value, newKind))
return nullptr;
}

Просмотреть файл

@ -3265,6 +3265,11 @@ js::detail::CopyScript(JSContext* cx, HandleScript src, HandleScript dst,
if (dst->data)
memcpy(dst->data, src->data, size);
if (cx->zone() != src->zoneFromAnyThread()) {
for (size_t i = 0; i < src->scriptData()->natoms(); i++)
cx->markAtom(src->scriptData()->atoms()[i]);
}
/* Script filenames, bytecodes and atoms are runtime-wide. */
dst->setScriptData(src->scriptData());

Просмотреть файл

@ -185,6 +185,7 @@ UNIFIED_SOURCES += [
'frontend/ParseNode.cpp',
'frontend/TokenStream.cpp',
'gc/Allocator.cpp',
'gc/AtomMarking.cpp',
'gc/Barrier.cpp',
'gc/GCTrace.cpp',
'gc/Iteration.cpp',

Просмотреть файл

@ -27,12 +27,27 @@ using namespace js;
#define NOTHING (true)
static bool
MarkAtoms(JSContext* cx, jsid id)
{
cx->markId(id);
return true;
}
static bool
MarkAtoms(JSContext* cx, const AutoIdVector& ids)
{
for (size_t i = 0; i < ids.length(); i++)
cx->markId(ids[i]);
return true;
}
bool
CrossCompartmentWrapper::getPropertyDescriptor(JSContext* cx, HandleObject wrapper, HandleId id,
MutableHandle<PropertyDescriptor> desc) const
{
PIERCE(cx, wrapper,
NOTHING,
MarkAtoms(cx, id),
Wrapper::getPropertyDescriptor(cx, wrapper, id, desc),
cx->compartment()->wrap(cx, desc));
}
@ -42,7 +57,7 @@ CrossCompartmentWrapper::getOwnPropertyDescriptor(JSContext* cx, HandleObject wr
MutableHandle<PropertyDescriptor> desc) const
{
PIERCE(cx, wrapper,
NOTHING,
MarkAtoms(cx, id),
Wrapper::getOwnPropertyDescriptor(cx, wrapper, id, desc),
cx->compartment()->wrap(cx, desc));
}
@ -54,7 +69,7 @@ CrossCompartmentWrapper::defineProperty(JSContext* cx, HandleObject wrapper, Han
{
Rooted<PropertyDescriptor> desc2(cx, desc);
PIERCE(cx, wrapper,
cx->compartment()->wrap(cx, &desc2),
MarkAtoms(cx, id) && cx->compartment()->wrap(cx, &desc2),
Wrapper::defineProperty(cx, wrapper, id, desc2, result),
NOTHING);
}
@ -66,7 +81,7 @@ CrossCompartmentWrapper::ownPropertyKeys(JSContext* cx, HandleObject wrapper,
PIERCE(cx, wrapper,
NOTHING,
Wrapper::ownPropertyKeys(cx, wrapper, props),
NOTHING);
MarkAtoms(cx, props));
}
bool
@ -74,7 +89,7 @@ CrossCompartmentWrapper::delete_(JSContext* cx, HandleObject wrapper, HandleId i
ObjectOpResult& result) const
{
PIERCE(cx, wrapper,
NOTHING,
MarkAtoms(cx, id),
Wrapper::delete_(cx, wrapper, id, result),
NOTHING);
}
@ -162,7 +177,7 @@ bool
CrossCompartmentWrapper::has(JSContext* cx, HandleObject wrapper, HandleId id, bool* bp) const
{
PIERCE(cx, wrapper,
NOTHING,
MarkAtoms(cx, id),
Wrapper::has(cx, wrapper, id, bp),
NOTHING);
}
@ -171,7 +186,7 @@ bool
CrossCompartmentWrapper::hasOwn(JSContext* cx, HandleObject wrapper, HandleId id, bool* bp) const
{
PIERCE(cx, wrapper,
NOTHING,
MarkAtoms(cx, id),
Wrapper::hasOwn(cx, wrapper, id, bp),
NOTHING);
}
@ -203,7 +218,7 @@ CrossCompartmentWrapper::get(JSContext* cx, HandleObject wrapper, HandleValue re
RootedValue receiverCopy(cx, receiver);
{
AutoCompartment call(cx, wrappedObject(wrapper));
if (!WrapReceiver(cx, wrapper, &receiverCopy))
if (!MarkAtoms(cx, id) || !WrapReceiver(cx, wrapper, &receiverCopy))
return false;
if (!Wrapper::get(cx, wrapper, receiverCopy, id, vp))
@ -219,6 +234,7 @@ CrossCompartmentWrapper::set(JSContext* cx, HandleObject wrapper, HandleId id, H
RootedValue valCopy(cx, v);
RootedValue receiverCopy(cx, receiver);
PIERCE(cx, wrapper,
MarkAtoms(cx, id) &&
cx->compartment()->wrap(cx, &valCopy) &&
WrapReceiver(cx, wrapper, &receiverCopy),
Wrapper::set(cx, wrapper, id, valCopy, receiverCopy, result),
@ -232,7 +248,7 @@ CrossCompartmentWrapper::getOwnEnumerablePropertyKeys(JSContext* cx, HandleObjec
PIERCE(cx, wrapper,
NOTHING,
Wrapper::getOwnEnumerablePropertyKeys(cx, wrapper, props),
NOTHING);
MarkAtoms(cx, props));
}
/*

Просмотреть файл

@ -71,4 +71,15 @@ js::Mutex::unlock()
stack.popBack();
}
bool
js::Mutex::ownedByCurrentThread() const
{
auto& stack = heldMutexStack();
for (size_t i = 0; i < stack.length(); i++) {
if (stack[i] == this)
return true;
}
return false;
}
#endif

Просмотреть файл

@ -108,6 +108,7 @@ public:
void lock();
void unlock();
bool ownedByCurrentThread() const;
private:
const MutexId id_;

Просмотреть файл

@ -2305,6 +2305,7 @@ Debugger::appendAllocationSite(JSContext* cx, HandleObject obj, HandleSavedFrame
if (!JSObject::constructorDisplayAtom(cx, obj, &ctorName))
return false;
}
cx->markAtom(ctorName);
auto className = obj->getClass()->name;
auto size = JS::ubi::Node(obj.get()).size(cx->runtime()->debuggerMallocSizeOf);
@ -8073,6 +8074,7 @@ DebuggerGenericEval(JSContext* cx, const mozilla::Range<const char16_t> chars,
RootedId id(cx);
for (size_t i = 0; i < keys.length(); i++) {
id = keys[i];
cx->markId(id);
MutableHandleValue val = values[i];
if (!cx->compartment()->wrap(cx, val) ||
!NativeDefineProperty(cx, nenv, id, val, nullptr, nullptr, 0))
@ -9025,7 +9027,7 @@ DebuggerObject::nameGetter(JSContext* cx, unsigned argc, Value* vp)
return true;
}
RootedString result(cx, object->name());
RootedString result(cx, object->name(cx));
if (result)
args.rval().setString(result);
else
@ -9043,7 +9045,7 @@ DebuggerObject::displayNameGetter(JSContext* cx, unsigned argc, Value* vp)
return true;
}
RootedString result(cx, object->displayName());
RootedString result(cx, object->displayName(cx));
if (result)
args.rval().setString(result);
else
@ -10076,19 +10078,23 @@ DebuggerObject::getGlobal(JSContext* cx, HandleDebuggerObject object,
}
JSAtom*
DebuggerObject::name() const
DebuggerObject::name(JSContext* cx) const
{
MOZ_ASSERT(isFunction());
return referent()->as<JSFunction>().explicitName();
JSAtom* atom = referent()->as<JSFunction>().explicitName();
cx->markAtom(atom);
return atom;
}
JSAtom*
DebuggerObject::displayName() const
DebuggerObject::displayName(JSContext* cx) const
{
MOZ_ASSERT(isFunction());
return referent()->as<JSFunction>().displayAtom();
JSAtom* atom = referent()->as<JSFunction>().displayAtom();
cx->markAtom(atom);
return atom;
}
JS::PromiseState
@ -10132,7 +10138,9 @@ DebuggerObject::getParameterNames(JSContext* cx, HandleDebuggerObject object,
PositionalFormalParameterIter fi(script);
for (size_t i = 0; i < referent->nargs(); i++, fi++) {
MOZ_ASSERT(fi.argumentSlot() == i);
result[i].set(fi.name());
JSAtom* atom = fi.name();
cx->markAtom(atom);
result[i].set(atom);
}
}
} else {
@ -10394,6 +10402,9 @@ DebuggerObject::getOwnPropertyNames(JSContext* cx, HandleDebuggerObject object,
return false;
}
for (size_t i = 0; i < ids.length(); i++)
cx->markId(ids[i]);
return result.append(ids.begin(), ids.end());
}
@ -10415,6 +10426,9 @@ DebuggerObject::getOwnPropertySymbols(JSContext* cx, HandleDebuggerObject object
return false;
}
for (size_t i = 0; i < ids.length(); i++)
cx->markId(ids[i]);
return result.append(ids.begin(), ids.end());
}
@ -10429,6 +10443,7 @@ DebuggerObject::getOwnPropertyDescriptor(JSContext* cx, HandleDebuggerObject obj
{
Maybe<AutoCompartment> ac;
ac.emplace(cx, referent);
cx->markId(id);
ErrorCopier ec(ac);
if (!GetOwnPropertyDescriptor(cx, referent, id, desc))
@ -10512,6 +10527,7 @@ DebuggerObject::defineProperty(JSContext* cx, HandleDebuggerObject object, Handl
ac.emplace(cx, referent);
if (!cx->compartment()->wrap(cx, &desc))
return false;
cx->markId(id);
ErrorCopier ec(ac);
if (!DefineProperty(cx, referent, id, desc))
@ -10542,6 +10558,7 @@ DebuggerObject::defineProperties(JSContext* cx, HandleDebuggerObject object,
for (size_t i = 0; i < descs.length(); i++) {
if (!cx->compartment()->wrap(cx, descs[i]))
return false;
cx->markId(ids[i]);
}
ErrorCopier ec(ac);
@ -10562,6 +10579,8 @@ DebuggerObject::deleteProperty(JSContext* cx, HandleDebuggerObject object, Handl
Maybe<AutoCompartment> ac;
ac.emplace(cx, referent);
cx->markId(id);
ErrorCopier ec(ac);
return DeleteProperty(cx, referent, id, result);
}
@ -11272,6 +11291,7 @@ DebuggerEnvironment::getNames(JSContext* cx, HandleDebuggerEnvironment environme
for (size_t i = 0; i < ids.length(); ++i) {
jsid id = ids[i];
if (JSID_IS_ATOM(id) && IsIdentifier(JSID_TO_ATOM(id))) {
cx->markId(id);
if (!result.append(id))
return false;
}
@ -11293,6 +11313,8 @@ DebuggerEnvironment::find(JSContext* cx, HandleDebuggerEnvironment environment,
Maybe<AutoCompartment> ac;
ac.emplace(cx, env);
cx->markId(id);
/* This can trigger resolve hooks. */
ErrorCopier ec(ac);
for (; env; env = env->enclosingEnvironment()) {
@ -11325,6 +11347,8 @@ DebuggerEnvironment::getVariable(JSContext* cx, HandleDebuggerEnvironment enviro
Maybe<AutoCompartment> ac;
ac.emplace(cx, referent);
cx->markId(id);
/* This can trigger getters. */
ErrorCopier ec(ac);
@ -11381,6 +11405,7 @@ DebuggerEnvironment::setVariable(JSContext* cx, HandleDebuggerEnvironment enviro
ac.emplace(cx, referent);
if (!cx->compartment()->wrap(cx, &value))
return false;
cx->markId(id);
/* This can trigger setters. */
ErrorCopier ec(ac);

Просмотреть файл

@ -1445,8 +1445,8 @@ class DebuggerObject : public NativeObject
bool isGlobal() const;
bool isScriptedProxy() const;
bool isPromise() const;
JSAtom* name() const;
JSAtom* displayName() const;
JSAtom* name(JSContext* cx) const;
JSAtom* displayName(JSContext* cx) const;
JS::PromiseState promiseState() const;
double promiseLifetime() const;
double promiseTimeToResolution() const;

Просмотреть файл

@ -323,7 +323,8 @@ StatsZoneCallback(JSRuntime* rt, void* data, Zone* zone)
&zStats.typePool,
&zStats.baselineStubsOptimized,
&zStats.uniqueIdMap,
&zStats.shapeTables);
&zStats.shapeTables,
&rtStats->runtime.atomsMarkBitmaps);
}
static void

Просмотреть файл

@ -18,6 +18,8 @@
#include "jsobjinlines.h"
#include "gc/Heap-inl.h"
namespace js {
inline uint8_t*
@ -296,6 +298,14 @@ NativeObject::updateShapeAfterMovingGC()
shape_.unsafeSet(Forwarded(shape));
}
inline bool
NativeObject::isInWholeCellBuffer() const
{
const gc::TenuredCell* cell = &asTenured();
gc::ArenaCellSet* cells = cell->arena()->bufferedCells();
return cells && cells->hasCell(cell);
}
/* Make an object with pregenerated shape from a NEWOBJECT bytecode. */
static inline PlainObject*
CopyInitializerObject(JSContext* cx, HandlePlainObject baseobj, NewObjectKind newKind = GenericObject)

Просмотреть файл

@ -475,11 +475,7 @@ class NativeObject : public ShapedObject
elements_ = emptyObjectElementsShared;
}
bool isInWholeCellBuffer() const {
const gc::TenuredCell* cell = &asTenured();
gc::ArenaCellSet* cells = cell->arena()->bufferedCells;
return cells && cells->hasCell(cell);
}
inline bool isInWholeCellBuffer() const;
protected:
#ifdef DEBUG
@ -842,16 +838,22 @@ class NativeObject : public ShapedObject
return *getSlotAddress(slot);
}
// Check requirements on values stored to this object.
inline void checkStoredValue(const Value& v) {
MOZ_ASSERT(IsObjectValueInCompartment(v, compartment()));
MOZ_ASSERT(AtomIsMarked(zoneFromAnyThread(), v));
}
void setSlot(uint32_t slot, const Value& value) {
MOZ_ASSERT(slotInRange(slot));
MOZ_ASSERT(IsObjectValueInCompartment(value, compartment()));
checkStoredValue(value);
getSlotRef(slot).set(this, HeapSlot::Slot, slot, value);
}
void initSlot(uint32_t slot, const Value& value) {
MOZ_ASSERT(getSlot(slot).isUndefined());
MOZ_ASSERT(slotInRange(slot));
MOZ_ASSERT(IsObjectValueInCompartment(value, compartment()));
checkStoredValue(value);
initSlotUnchecked(slot, value);
}
@ -929,13 +931,13 @@ class NativeObject : public ShapedObject
void setFixedSlot(uint32_t slot, const Value& value) {
MOZ_ASSERT(slot < numFixedSlots());
MOZ_ASSERT(IsObjectValueInCompartment(value, compartment()));
checkStoredValue(value);
fixedSlots()[slot].set(this, HeapSlot::Slot, slot, value);
}
void initFixedSlot(uint32_t slot, const Value& value) {
MOZ_ASSERT(slot < numFixedSlots());
MOZ_ASSERT(IsObjectValueInCompartment(value, compartment()));
checkStoredValue(value);
fixedSlots()[slot].init(this, HeapSlot::Slot, slot, value);
}
@ -1033,6 +1035,7 @@ class NativeObject : public ShapedObject
void setDenseElementUnchecked(uint32_t index, const Value& val) {
MOZ_ASSERT(index < getDenseInitializedLength());
MOZ_ASSERT(!denseElementsAreCopyOnWrite());
checkStoredValue(val);
elements_[index].set(this, HeapSlot::Element, index, val);
}
@ -1054,6 +1057,7 @@ class NativeObject : public ShapedObject
MOZ_ASSERT(index < getDenseInitializedLength());
MOZ_ASSERT(!denseElementsAreCopyOnWrite());
MOZ_ASSERT(!denseElementsAreFrozen());
checkStoredValue(val);
elements_[index].init(this, HeapSlot::Element, index, val);
}
@ -1078,6 +1082,10 @@ class NativeObject : public ShapedObject
MOZ_ASSERT(dstStart + count <= getDenseCapacity());
MOZ_ASSERT(!denseElementsAreCopyOnWrite());
MOZ_ASSERT(!denseElementsAreFrozen());
#ifdef DEBUG
for (uint32_t i = 0; i < count; ++i)
checkStoredValue(src[i]);
#endif
if (JS::shadow::Zone::asShadowZone(zone())->needsIncrementalBarrier()) {
for (uint32_t i = 0; i < count; ++i)
elements_[dstStart + i].set(this, HeapSlot::Element, dstStart + i, src[i]);
@ -1091,6 +1099,10 @@ class NativeObject : public ShapedObject
MOZ_ASSERT(dstStart + count <= getDenseCapacity());
MOZ_ASSERT(!denseElementsAreCopyOnWrite());
MOZ_ASSERT(!denseElementsAreFrozen());
#ifdef DEBUG
for (uint32_t i = 0; i < count; ++i)
checkStoredValue(src[i]);
#endif
memcpy(&elements_[dstStart], src, count * sizeof(HeapSlot));
elementsRangeWriteBarrierPost(dstStart, count);
}

Просмотреть файл

@ -1536,6 +1536,8 @@ js::CloneScriptRegExpObject(JSContext* cx, RegExpObject& reobj)
/* NB: Keep this in sync with XDRScriptRegExpObject. */
RootedAtom source(cx, reobj.getSource());
cx->markAtom(source);
return RegExpObject::create(cx, source, reobj.getFlags(), nullptr, cx->tempLifoAlloc());
}

Просмотреть файл

@ -694,6 +694,13 @@ struct JSRuntime : public JS::shadow::Runtime,
return numExclusiveThreads > 0;
}
#ifdef DEBUG
bool currentThreadHasExclusiveAccess() const {
return (!exclusiveThreadsPresent() && mainThreadHasExclusiveAccess) ||
exclusiveAccessLock.ownedByCurrentThread();
}
#endif
// How many compartments there are across all zones. This number includes
// ExclusiveContext compartments, so it isn't necessarily equal to the
// number of compartments visited by CompartmentsIter.
@ -1080,9 +1087,15 @@ struct JSRuntime : public JS::shadow::Runtime,
js::AtomSet& atoms(js::AutoLockForExclusiveAccess& lock) {
return *atoms_;
}
js::AtomSet& unsafeAtoms() {
return *atoms_;
}
JSCompartment* atomsCompartment(js::AutoLockForExclusiveAccess& lock) {
return atomsCompartment_;
}
JSCompartment* unsafeAtomsCompartment() {
return atomsCompartment_;
}
bool isAtomsCompartment(JSCompartment* comp) {
return comp == atomsCompartment_;
@ -1096,6 +1109,9 @@ struct JSRuntime : public JS::shadow::Runtime,
js::SymbolRegistry& symbolRegistry(js::AutoLockForExclusiveAccess& lock) {
return symbolRegistry_;
}
js::SymbolRegistry& unsafeSymbolRegistry() {
return symbolRegistry_;
}
// Permanent atoms are fixed during initialization of the runtime and are
// not modified or collected until the runtime is destroyed. These may be

Просмотреть файл

@ -159,7 +159,7 @@ class SavedFrame : public NativeObject {
private:
static SavedFrame* create(JSContext* cx);
static MOZ_MUST_USE bool finishSavedFrameInit(JSContext* cx, HandleObject ctor, HandleObject proto);
void initFromLookup(HandleLookup lookup);
void initFromLookup(JSContext* cx, HandleLookup lookup);
void initSource(JSAtom* source);
void initLine(uint32_t line);
void initColumn(uint32_t column);

Просмотреть файл

@ -489,8 +489,19 @@ SavedFrame::initParent(SavedFrame* maybeParent)
}
void
SavedFrame::initFromLookup(SavedFrame::HandleLookup lookup)
SavedFrame::initFromLookup(JSContext* cx, SavedFrame::HandleLookup lookup)
{
// Make sure any atoms used in the lookup are marked in the current zone.
// Normally we would try to keep these mark bits up to date around the
// points where the context moves between compartments, but Lookups live on
// the stack (where the atoms are kept alive regardless) and this is a
// more convenient pinchpoint.
cx->markAtom(lookup->source);
if (lookup->functionDisplayName)
cx->markAtom(lookup->functionDisplayName);
if (lookup->asyncCause)
cx->markAtom(lookup->asyncCause);
initSource(lookup->source);
initLine(lookup->line);
initColumn(lookup->column);
@ -729,14 +740,17 @@ GetSavedFrameSource(JSContext* cx, HandleObject savedFrame, MutableHandleString
CHECK_REQUEST(cx);
MOZ_RELEASE_ASSERT(cx->compartment());
AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
bool skippedAsync;
js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, selfHosted, skippedAsync));
if (!frame) {
sourcep.set(cx->runtime()->emptyString);
return SavedFrameResult::AccessDenied;
{
AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
bool skippedAsync;
js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, selfHosted, skippedAsync));
if (!frame) {
sourcep.set(cx->runtime()->emptyString);
return SavedFrameResult::AccessDenied;
}
sourcep.set(frame->getSource());
}
sourcep.set(frame->getSource());
cx->markAtom(sourcep);
return SavedFrameResult::Ok;
}
@ -788,14 +802,18 @@ GetSavedFrameFunctionDisplayName(JSContext* cx, HandleObject savedFrame, Mutable
CHECK_REQUEST(cx);
MOZ_RELEASE_ASSERT(cx->compartment());
AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
bool skippedAsync;
js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, selfHosted, skippedAsync));
if (!frame) {
namep.set(nullptr);
return SavedFrameResult::AccessDenied;
{
AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
bool skippedAsync;
js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, selfHosted, skippedAsync));
if (!frame) {
namep.set(nullptr);
return SavedFrameResult::AccessDenied;
}
namep.set(frame->getFunctionDisplayName());
}
namep.set(frame->getFunctionDisplayName());
if (namep)
cx->markAtom(namep);
return SavedFrameResult::Ok;
}
@ -807,22 +825,26 @@ GetSavedFrameAsyncCause(JSContext* cx, HandleObject savedFrame, MutableHandleStr
CHECK_REQUEST(cx);
MOZ_RELEASE_ASSERT(cx->compartment());
AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
bool skippedAsync;
// This function is always called with self-hosted frames excluded by
// GetValueIfNotCached in dom/bindings/Exceptions.cpp. However, we want
// to include them because our Promise implementation causes us to have
// the async cause on a self-hosted frame. So we just ignore the
// parameter and always include self-hosted frames.
js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, SavedFrameSelfHosted::Include,
skippedAsync));
if (!frame) {
asyncCausep.set(nullptr);
return SavedFrameResult::AccessDenied;
{
AutoMaybeEnterFrameCompartment ac(cx, savedFrame);
bool skippedAsync;
// This function is always called with self-hosted frames excluded by
// GetValueIfNotCached in dom/bindings/Exceptions.cpp. However, we want
// to include them because our Promise implementation causes us to have
// the async cause on a self-hosted frame. So we just ignore the
// parameter and always include self-hosted frames.
js::RootedSavedFrame frame(cx, UnwrapSavedFrame(cx, savedFrame, SavedFrameSelfHosted::Include,
skippedAsync));
if (!frame) {
asyncCausep.set(nullptr);
return SavedFrameResult::AccessDenied;
}
asyncCausep.set(frame->getAsyncCause());
if (!asyncCausep && skippedAsync)
asyncCausep.set(cx->names().Async);
}
asyncCausep.set(frame->getAsyncCause());
if (!asyncCausep && skippedAsync)
asyncCausep.set(cx->names().Async);
if (asyncCausep)
cx->markAtom(asyncCausep);
return SavedFrameResult::Ok;
}
@ -1492,7 +1514,7 @@ SavedStacks::createFrameFromLookup(JSContext* cx, SavedFrame::HandleLookup looku
RootedSavedFrame frame(cx, SavedFrame::create(cx));
if (!frame)
return nullptr;
frame->initFromLookup(lookup);
frame->initFromLookup(cx, lookup);
if (!FreezeObject(cx, frame))
return nullptr;

Просмотреть файл

@ -128,6 +128,7 @@ CreateEnvironmentShape(ExclusiveContext* cx, BindingIter& bi, const Class* cls,
BindingLocation loc = bi.location();
if (loc.kind() == BindingLocation::Kind::Environment) {
name = bi.name();
cx->markAtom(name);
shape = NextEnvironmentShape(cx, name, bi.kind(), loc.slot(), stackBase, shape);
if (!shape)
return nullptr;
@ -141,6 +142,16 @@ template <typename ConcreteScope>
static UniquePtr<typename ConcreteScope::Data>
CopyScopeData(ExclusiveContext* cx, Handle<typename ConcreteScope::Data*> data)
{
// Make sure the binding names are marked in the context's zone, if we are
// copying data from another zone.
BindingName* names = nullptr;
uint32_t length = 0;
ConcreteScope::getDataNamesAndLength(data, &names, &length);
for (size_t i = 0; i < length; i++) {
if (JSAtom* name = names[i].name())
cx->markAtom(name);
}
size_t dataSize = ConcreteScope::sizeOfData(data->length);
uint8_t* copyBytes = cx->zone()->pod_malloc<uint8_t>(dataSize);
if (!copyBytes) {

Просмотреть файл

@ -374,6 +374,11 @@ class LexicalScope : public Scope
return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
}
static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
*names = data->names;
*length = data->length;
}
static LexicalScope* create(ExclusiveContext* cx, ScopeKind kind, Handle<Data*> data,
uint32_t firstFrameSlot, HandleScope enclosing);
@ -486,6 +491,11 @@ class FunctionScope : public Scope
return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
}
static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
*names = data->names;
*length = data->length;
}
static FunctionScope* create(ExclusiveContext* cx, Handle<Data*> data,
bool hasParameterExprs, bool needsEnvironment,
HandleFunction fun, HandleScope enclosing);
@ -579,6 +589,11 @@ class VarScope : public Scope
return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
}
static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
*names = data->names;
*length = data->length;
}
static VarScope* create(ExclusiveContext* cx, ScopeKind kind, Handle<Data*> data,
uint32_t firstFrameSlot, bool needsEnvironment,
HandleScope enclosing);
@ -667,6 +682,11 @@ class GlobalScope : public Scope
return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
}
static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
*names = data->names;
*length = data->length;
}
static GlobalScope* create(ExclusiveContext* cx, ScopeKind kind, Handle<Data*> data);
static GlobalScope* createEmpty(ExclusiveContext* cx, ScopeKind kind) {
@ -766,6 +786,11 @@ class EvalScope : public Scope
return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
}
static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
*names = data->names;
*length = data->length;
}
static EvalScope* create(ExclusiveContext* cx, ScopeKind kind, Handle<Data*> data,
HandleScope enclosing);
@ -867,6 +892,11 @@ class ModuleScope : public Scope
return sizeof(Data) + (length ? length - 1 : 0) * sizeof(BindingName);
}
static void getDataNamesAndLength(Data* data, BindingName** names, uint32_t* length) {
*names = data->names;
*length = data->length;
}
static ModuleScope* create(ExclusiveContext* cx, Handle<Data*> data,
Handle<ModuleObject*> module, HandleScope enclosing);

Просмотреть файл

@ -15,6 +15,7 @@
#include "mozilla/TemplateLib.h"
#include "jsapi.h"
#include "jsatom.h"
#include "jsfriendapi.h"
#include "jspropertytree.h"
#include "jstypes.h"
@ -1406,6 +1407,8 @@ Shape::Shape(const StackShape& other, uint32_t nfixed)
MOZ_ASSERT_IF(allocKind == gc::AllocKind::SHAPE, !other.isAccessorShape());
#endif
MOZ_ASSERT_IF(!isEmptyShape(), AtomIsMarked(zone(), propid()));
MOZ_ASSERT_IF(attrs & (JSPROP_GETTER | JSPROP_SETTER), attrs & JSPROP_SHARED);
kids.setNull();
}

Просмотреть файл

@ -2250,6 +2250,7 @@ JSStructuredCloneReader::readSavedFrame(uint32_t principalsTag)
if (!atomName)
return nullptr;
}
savedFrame->initFunctionDisplayName(atomName);
RootedValue cause(context());

Просмотреть файл

@ -47,8 +47,13 @@ Symbol::new_(ExclusiveContext* cx, JS::SymbolCode code, JSString* description)
// Lock to allocate. If symbol allocation becomes a bottleneck, this can
// probably be replaced with an assertion that we're on the main thread.
AutoLockForExclusiveAccess lock(cx);
AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
return newInternal(cx, code, cx->compartment()->randomHashCode(), atom, lock);
Symbol* sym;
{
AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
sym = newInternal(cx, code, cx->compartment()->randomHashCode(), atom, lock);
}
cx->markAtom(sym);
return sym;
}
Symbol*
@ -62,21 +67,27 @@ Symbol::for_(js::ExclusiveContext* cx, HandleString description)
SymbolRegistry& registry = cx->symbolRegistry(lock);
SymbolRegistry::AddPtr p = registry.lookupForAdd(atom);
if (p)
if (p) {
cx->markAtom(*p);
return *p;
AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
Symbol* sym = newInternal(cx, SymbolCode::InSymbolRegistry, atom->hash(), atom, lock);
if (!sym)
return nullptr;
// p is still valid here because we have held the lock since the
// lookupForAdd call, and newInternal can't GC.
if (!registry.add(p, sym)) {
// SystemAllocPolicy does not report OOM.
ReportOutOfMemory(cx);
return nullptr;
}
Symbol* sym;
{
AutoCompartment ac(cx, cx->atomsCompartment(lock), &lock);
sym = newInternal(cx, SymbolCode::InSymbolRegistry, atom->hash(), atom, lock);
if (!sym)
return nullptr;
// p is still valid here because we have held the lock since the
// lookupForAdd call, and newInternal can't GC.
if (!registry.add(p, sym)) {
// SystemAllocPolicy does not report OOM.
ReportOutOfMemory(cx);
return nullptr;
}
}
cx->markAtom(sym);
return sym;
}

Просмотреть файл

@ -4466,7 +4466,8 @@ Zone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
size_t* typePool,
size_t* baselineStubsOptimized,
size_t* uniqueIdMap,
size_t* shapeTables)
size_t* shapeTables,
size_t* atomsMarkBitmaps)
{
*typePool += types.typeLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
if (jitZone()) {
@ -4476,6 +4477,7 @@ Zone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
*uniqueIdMap += uniqueIds_.sizeOfExcludingThis(mallocSizeOf);
*shapeTables += baseShapes.sizeOfExcludingThis(mallocSizeOf)
+ initialShapes.sizeOfExcludingThis(mallocSizeOf);
*atomsMarkBitmaps += markedAtoms.sizeOfExcludingThis(mallocSizeOf);
}
TypeZone::TypeZone(Zone* zone)

Просмотреть файл

@ -1314,6 +1314,8 @@ mozJSComponentLoader::ImportInto(const nsACString& aLocation,
JSAutoCompartment target_ac(cx, targetObj);
JS_MarkCrossZoneId(cx, symbolId);
if (!JS_WrapValue(cx, &value) ||
!JS_SetPropertyById(cx, targetObj, symbolId, value)) {
JSAutoByteString bytes;

Просмотреть файл

@ -408,9 +408,12 @@ ExportFunction(JSContext* cx, HandleValue vfunction, HandleValue vscope, HandleV
RootedString funName(cx, JS_GetFunctionId(fun));
if (!funName)
funName = JS_AtomizeAndPinString(cx, "");
JS_MarkCrossZoneIdValue(cx, StringValue(funName));
if (!JS_StringToId(cx, funName, &id))
return false;
} else {
JS_MarkCrossZoneId(cx, id);
}
MOZ_ASSERT(JSID_IS_STRING(id));
@ -473,6 +476,8 @@ CreateObjectIn(JSContext* cx, HandleValue vobj, CreateObjectInOptions& options,
RootedObject obj(cx);
{
JSAutoCompartment ac(cx, scope);
JS_MarkCrossZoneId(cx, options.defineAs);
obj = JS_NewPlainObject(cx);
if (!obj)
return false;

Просмотреть файл

@ -197,6 +197,7 @@ SandboxImport(JSContext* cx, unsigned argc, Value* vp)
return false;
}
}
JS_MarkCrossZoneIdValue(cx, StringValue(funname));
RootedId id(cx);
if (!JS_StringToId(cx, funname, &id))

Просмотреть файл

@ -2370,6 +2370,10 @@ ReportJSRuntimeExplicitTreeStats(const JS::RuntimeStats& rtStats,
KIND_HEAP, rtStats.runtime.atomsTable,
"The atoms table.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/atoms-mark-bitmaps"),
KIND_HEAP, rtStats.runtime.atomsMarkBitmaps,
"Mark bitmaps for atoms held by each zone.");
RREPORT_BYTES(rtPath + NS_LITERAL_CSTRING("runtime/contexts"),
KIND_HEAP, rtStats.runtime.contexts,
"JSContext objects and structures that belong to them.");

Просмотреть файл

@ -318,6 +318,7 @@ bool JSXrayTraits::getOwnPropertyFromWrapperIfSafe(JSContext* cx,
RootedObject target(cx, getTargetObject(wrapper));
{
JSAutoCompartment ac(cx, target);
JS_MarkCrossZoneId(cx, id);
if (!getOwnPropertyFromTargetIfSafe(cx, target, wrapper, id, outDesc))
return false;
}
@ -348,6 +349,7 @@ bool JSXrayTraits::getOwnPropertyFromTargetIfSafe(JSContext* cx,
// Disallow accessor properties.
if (desc.hasGetterOrSetter()) {
JSAutoCompartment ac(cx, wrapper);
JS_MarkCrossZoneId(cx, id);
return ReportWrapperDenial(cx, id, WrapperDenialForXray, "property has accessor");
}
@ -359,6 +361,7 @@ bool JSXrayTraits::getOwnPropertyFromTargetIfSafe(JSContext* cx,
// Disallow non-subsumed objects.
if (!AccessCheck::subsumes(target, propObj)) {
JSAutoCompartment ac(cx, wrapper);
JS_MarkCrossZoneId(cx, id);
return ReportWrapperDenial(cx, id, WrapperDenialForXray, "value not same-origin with target");
}
@ -366,12 +369,14 @@ bool JSXrayTraits::getOwnPropertyFromTargetIfSafe(JSContext* cx,
XrayType xrayType = GetXrayType(propObj);
if (xrayType == NotXray || xrayType == XrayForOpaqueObject) {
JSAutoCompartment ac(cx, wrapper);
JS_MarkCrossZoneId(cx, id);
return ReportWrapperDenial(cx, id, WrapperDenialForXray, "value not Xrayable");
}
// Disallow callables.
if (JS::IsCallable(propObj)) {
JSAutoCompartment ac(cx, wrapper);
JS_MarkCrossZoneId(cx, id);
return ReportWrapperDenial(cx, id, WrapperDenialForXray, "value is callable");
}
}
@ -379,6 +384,7 @@ bool JSXrayTraits::getOwnPropertyFromTargetIfSafe(JSContext* cx,
// Disallow any property that shadows something on its (Xrayed)
// prototype chain.
JSAutoCompartment ac2(cx, wrapper);
JS_MarkCrossZoneId(cx, id);
RootedObject proto(cx);
bool foundOnProto = false;
if (!JS_GetPrototype(cx, wrapper, &proto) ||
@ -554,6 +560,7 @@ JSXrayTraits::resolveOwnProperty(JSContext* cx, const Wrapper& jsWrapper,
Rooted<PropertyDescriptor> innerDesc(cx);
{
JSAutoCompartment ac(cx, target);
JS_MarkCrossZoneId(cx, id);
if (!JS_GetOwnPropertyDescriptorById(cx, target, id, &innerDesc))
return false;
}
@ -576,6 +583,8 @@ JSXrayTraits::resolveOwnProperty(JSContext* cx, const Wrapper& jsWrapper,
return true;
} else if (id == GetJSIDByIndex(cx, XPCJSContext::IDX_NAME)) {
RootedString fname(cx, JS_GetFunctionId(JS_GetObjectFunction(target)));
if (fname)
JS_MarkCrossZoneIdValue(cx, StringValue(fname));
FillPropertyDescriptor(desc, wrapper, JSPROP_PERMANENT | JSPROP_READONLY,
fname ? StringValue(fname) : JS_GetEmptyStringValue(cx));
} else {
@ -706,6 +715,7 @@ JSXrayTraits::delete_(JSContext* cx, HandleObject wrapper, HandleId id, ObjectOp
if (isObjectOrArrayInstance) {
RootedObject target(cx, getTargetObject(wrapper));
JSAutoCompartment ac(cx, target);
JS_MarkCrossZoneId(cx, id);
Rooted<PropertyDescriptor> desc(cx);
if (!getOwnPropertyFromTargetIfSafe(cx, target, wrapper, id, &desc))
return false;
@ -763,6 +773,7 @@ JSXrayTraits::defineProperty(JSContext* cx, HandleObject wrapper, HandleId id,
Rooted<PropertyDescriptor> wrappedDesc(cx, desc);
JSAutoCompartment ac(cx, target);
JS_MarkCrossZoneId(cx, id);
if (!JS_WrapPropertyDescriptor(cx, &wrappedDesc) ||
!JS_DefinePropertyById(cx, target, id, wrappedDesc, result))
{
@ -781,6 +792,7 @@ JSXrayTraits::defineProperty(JSContext* cx, HandleObject wrapper, HandleId id,
{
RootedObject target(cx, getTargetObject(wrapper));
JSAutoCompartment ac(cx, target);
JS_MarkCrossZoneId(cx, id);
if (!JS_DefinePropertyById(cx, target, id, desc, result))
return false;
*defined = true;
@ -859,6 +871,8 @@ JSXrayTraits::enumerateNames(JSContext* cx, HandleObject wrapper, unsigned flags
props.infallibleAppend(id);
}
}
for (size_t i = 0; i < props.length(); ++i)
JS_MarkCrossZoneId(cx, props[i]);
return true;
} else if (IsTypedArrayKey(key)) {
uint32_t length = JS_GetTypedArrayLength(target);
@ -1491,6 +1505,7 @@ XrayTraits::resolveOwnProperty(JSContext* cx, const Wrapper& jsWrapper,
bool found = false;
if (expando) {
JSAutoCompartment ac(cx, expando);
JS_MarkCrossZoneId(cx, id);
if (!JS_GetOwnPropertyDescriptorById(cx, expando, id, desc))
return false;
found = !!desc.object();
@ -1586,6 +1601,7 @@ XPCWrappedNativeXrayTraits::enumerateNames(JSContext* cx, HandleObject wrapper,
return false;
for (size_t n = 0; n < wnProps.length(); ++n) {
RootedId id(cx, wnProps[n]);
JS_MarkCrossZoneId(cx, id);
bool hasProp;
if (JS_HasPropertyById(cx, wrapper, id, &hasProp) && hasProp)
props.infallibleAppend(id);
@ -2179,6 +2195,7 @@ XrayWrapper<Base, Traits>::defineProperty(JSContext* cx, HandleObject wrapper,
// compartment, so we need to enter it.
RootedObject target(cx, Traits::singleton.getTargetObject(wrapper));
JSAutoCompartment ac(cx, target);
JS_MarkCrossZoneId(cx, id);
// Grab the relevant expando object.
RootedObject expandoObject(cx, Traits::singleton.ensureExpandoObject(cx, wrapper,
@ -2222,6 +2239,7 @@ XrayWrapper<Base, Traits>::delete_(JSContext* cx, HandleObject wrapper,
if (expando) {
JSAutoCompartment ac(cx, expando);
JS_MarkCrossZoneId(cx, id);
bool hasProp;
if (!JS_HasPropertyById(cx, expando, id, &hasProp)) {
return false;
@ -2472,6 +2490,8 @@ XrayWrapper<Base, Traits>::getPropertyKeys(JSContext* cx, HandleObject wrapper,
if (!js::GetPropertyKeys(cx, expando, flags, &props))
return false;
}
for (size_t i = 0; i < props.length(); ++i)
JS_MarkCrossZoneId(cx, props[i]);
return Traits::singleton.enumerateNames(cx, wrapper, flags, props);
}