зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1295967 - Share Shapes and BaseShapes across compartments. r=jonco,fitzgen,njn
This commit is contained in:
Родитель
864b069141
Коммит
d8047f2b2f
|
@ -1035,6 +1035,52 @@ struct TwoByteString::HashPolicy {
|
|||
}
|
||||
};
|
||||
|
||||
// Returns whether `edge` should be included in a heap snapshot of
|
||||
// `compartments`. The optional `policy` out-param is set to INCLUDE_EDGES
|
||||
// if we want to include the referent's edges, or EXCLUDE_EDGES if we don't
|
||||
// want to include them.
|
||||
static bool
|
||||
ShouldIncludeEdge(JS::CompartmentSet* compartments,
|
||||
const ubi::Node& origin, const ubi::Edge& edge,
|
||||
CoreDumpWriter::EdgePolicy* policy = nullptr)
|
||||
{
|
||||
if (policy) {
|
||||
*policy = CoreDumpWriter::INCLUDE_EDGES;
|
||||
}
|
||||
|
||||
if (!compartments) {
|
||||
// We aren't targeting a particular set of compartments, so serialize all the
|
||||
// things!
|
||||
return true;
|
||||
}
|
||||
|
||||
// We are targeting a particular set of compartments. If this node is in our target
|
||||
// set, serialize it and all of its edges. If this node is _not_ in our
|
||||
// target set, we also serialize under the assumption that it is a shared
|
||||
// resource being used by something in our target compartments since we reached it
|
||||
// by traversing the heap graph. However, we do not serialize its outgoing
|
||||
// edges and we abandon further traversal from this node.
|
||||
//
|
||||
// If the node does not belong to any compartment, we also serialize its outgoing
|
||||
// edges. This case is relevant for Shapes: they don't belong to a specific
|
||||
// compartment and contain edges to parent/kids Shapes we want to include. Note
|
||||
// that these Shapes may contain pointers into our target compartment (the
|
||||
// Shape's getter/setter JSObjects). However, we do not serialize nodes in other
|
||||
// compartments that are reachable from these non-compartment nodes.
|
||||
|
||||
JSCompartment* compartment = edge.referent.compartment();
|
||||
|
||||
if (!compartment || compartments->has(compartment)) {
|
||||
return true;
|
||||
}
|
||||
|
||||
if (policy) {
|
||||
*policy = CoreDumpWriter::EXCLUDE_EDGES;
|
||||
}
|
||||
|
||||
return !!origin.compartment();
|
||||
}
|
||||
|
||||
// A `CoreDumpWriter` that serializes nodes to protobufs and writes them to the
|
||||
// given `ZeroCopyOutputStream`.
|
||||
class MOZ_STACK_CLASS StreamWriter : public CoreDumpWriter
|
||||
|
@ -1057,6 +1103,8 @@ class MOZ_STACK_CLASS StreamWriter : public CoreDumpWriter
|
|||
|
||||
::google::protobuf::io::ZeroCopyOutputStream& stream;
|
||||
|
||||
JS::CompartmentSet* compartments;
|
||||
|
||||
bool writeMessage(const ::google::protobuf::MessageLite& message) {
|
||||
// We have to create a new CodedOutputStream when writing each message so
|
||||
// that the 64MB size limit used by Coded{Output,Input}Stream to prevent
|
||||
|
@ -1187,13 +1235,15 @@ class MOZ_STACK_CLASS StreamWriter : public CoreDumpWriter
|
|||
public:
|
||||
StreamWriter(JSContext* cx,
|
||||
::google::protobuf::io::ZeroCopyOutputStream& stream,
|
||||
bool wantNames)
|
||||
bool wantNames,
|
||||
JS::CompartmentSet* compartments)
|
||||
: cx(cx)
|
||||
, wantNames(wantNames)
|
||||
, framesAlreadySerialized(cx)
|
||||
, twoByteStringsAlreadySerialized(cx)
|
||||
, oneByteStringsAlreadySerialized(cx)
|
||||
, stream(stream)
|
||||
, compartments(compartments)
|
||||
{ }
|
||||
|
||||
bool init() {
|
||||
|
@ -1240,6 +1290,9 @@ public:
|
|||
|
||||
for ( ; !edges->empty(); edges->popFront()) {
|
||||
ubi::Edge& ubiEdge = edges->front();
|
||||
if (!ShouldIncludeEdge(compartments, ubiNode, ubiEdge)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
protobuf::Edge* protobufEdge = protobufNode.add_edges();
|
||||
if (NS_WARN_IF(!protobufEdge)) {
|
||||
|
@ -1329,29 +1382,16 @@ public:
|
|||
if (!first)
|
||||
return true;
|
||||
|
||||
CoreDumpWriter::EdgePolicy policy;
|
||||
if (!ShouldIncludeEdge(compartments, origin, edge, &policy))
|
||||
return true;
|
||||
|
||||
nodeCount++;
|
||||
|
||||
const JS::ubi::Node& referent = edge.referent;
|
||||
|
||||
if (!compartments)
|
||||
// We aren't targeting a particular set of compartments, so serialize all the
|
||||
// things!
|
||||
return writer.writeNode(referent, CoreDumpWriter::INCLUDE_EDGES);
|
||||
|
||||
// We are targeting a particular set of compartments. If this node is in our target
|
||||
// set, serialize it and all of its edges. If this node is _not_ in our
|
||||
// target set, we also serialize under the assumption that it is a shared
|
||||
// resource being used by something in our target compartments since we reached it
|
||||
// by traversing the heap graph. However, we do not serialize its outgoing
|
||||
// edges and we abandon further traversal from this node.
|
||||
|
||||
JSCompartment* compartment = referent.compartment();
|
||||
|
||||
if (compartments->has(compartment))
|
||||
return writer.writeNode(referent, CoreDumpWriter::INCLUDE_EDGES);
|
||||
|
||||
if (policy == CoreDumpWriter::EXCLUDE_EDGES)
|
||||
traversal.abandonReferent();
|
||||
return writer.writeNode(referent, CoreDumpWriter::EXCLUDE_EDGES);
|
||||
|
||||
return writer.writeNode(edge.referent, policy);
|
||||
}
|
||||
};
|
||||
|
||||
|
@ -1533,11 +1573,6 @@ ThreadSafeChromeUtils::SaveHeapSnapshot(GlobalObject& global,
|
|||
::google::protobuf::io::GzipOutputStream gzipStream(&zeroCopyStream);
|
||||
|
||||
JSContext* cx = global.Context();
|
||||
StreamWriter writer(cx, gzipStream, wantNames);
|
||||
if (NS_WARN_IF(!writer.init())) {
|
||||
rv.Throw(NS_ERROR_OUT_OF_MEMORY);
|
||||
return;
|
||||
}
|
||||
|
||||
{
|
||||
Maybe<AutoCheckCannotGC> maybeNoGC;
|
||||
|
@ -1545,6 +1580,13 @@ ThreadSafeChromeUtils::SaveHeapSnapshot(GlobalObject& global,
|
|||
if (!EstablishBoundaries(cx, rv, boundaries, rootList, compartments))
|
||||
return;
|
||||
|
||||
StreamWriter writer(cx, gzipStream, wantNames,
|
||||
compartments.initialized() ? &compartments : nullptr);
|
||||
if (NS_WARN_IF(!writer.init())) {
|
||||
rv.Throw(NS_ERROR_OUT_OF_MEMORY);
|
||||
return;
|
||||
}
|
||||
|
||||
MOZ_ASSERT(maybeNoGC.isSome());
|
||||
ubi::Node roots(&rootList);
|
||||
|
||||
|
|
|
@ -58,8 +58,9 @@ DEF_TEST(DoesCrossCompartmentBoundaries, {
|
|||
// different compartment than A.
|
||||
ExpectWriteNode(writer, nodeC);
|
||||
|
||||
// However, should not serialize nodeD because nodeB doesn't belong to one
|
||||
// of our target compartments and so its edges are excluded from serialization.
|
||||
// Should serialize nodeD because it's reachable via B and both nodes B and D
|
||||
// don't belong to a specific compartment.
|
||||
ExpectWriteNode(writer, nodeD);
|
||||
|
||||
JS::AutoCheckCannotGC noGC(cx);
|
||||
|
||||
|
|
|
@ -172,14 +172,7 @@ struct ClassInfo
|
|||
macro(Objects, NonHeap, objectsNonHeapElementsAsmJS) \
|
||||
macro(Objects, NonHeap, objectsNonHeapElementsShared) \
|
||||
macro(Objects, NonHeap, objectsNonHeapCodeAsmJS) \
|
||||
macro(Objects, MallocHeap, objectsMallocHeapMisc) \
|
||||
\
|
||||
macro(Other, GCHeapUsed, shapesGCHeapTree) \
|
||||
macro(Other, GCHeapUsed, shapesGCHeapDict) \
|
||||
macro(Other, GCHeapUsed, shapesGCHeapBase) \
|
||||
macro(Other, MallocHeap, shapesMallocHeapTreeTables) \
|
||||
macro(Other, MallocHeap, shapesMallocHeapDictTables) \
|
||||
macro(Other, MallocHeap, shapesMallocHeapTreeKids)
|
||||
macro(Objects, MallocHeap, objectsMallocHeapMisc)
|
||||
|
||||
ClassInfo()
|
||||
: FOR_EACH_SIZE(ZERO_SIZE)
|
||||
|
@ -225,6 +218,55 @@ struct ClassInfo
|
|||
#undef FOR_EACH_SIZE
|
||||
};
|
||||
|
||||
struct ShapeInfo
|
||||
{
|
||||
#define FOR_EACH_SIZE(macro) \
|
||||
macro(Other, GCHeapUsed, shapesGCHeapTree) \
|
||||
macro(Other, GCHeapUsed, shapesGCHeapDict) \
|
||||
macro(Other, GCHeapUsed, shapesGCHeapBase) \
|
||||
macro(Other, MallocHeap, shapesMallocHeapTreeTables) \
|
||||
macro(Other, MallocHeap, shapesMallocHeapDictTables) \
|
||||
macro(Other, MallocHeap, shapesMallocHeapTreeKids)
|
||||
|
||||
ShapeInfo()
|
||||
: FOR_EACH_SIZE(ZERO_SIZE)
|
||||
dummy()
|
||||
{}
|
||||
|
||||
void add(const ShapeInfo& other) {
|
||||
FOR_EACH_SIZE(ADD_OTHER_SIZE)
|
||||
}
|
||||
|
||||
void subtract(const ShapeInfo& other) {
|
||||
FOR_EACH_SIZE(SUB_OTHER_SIZE)
|
||||
}
|
||||
|
||||
size_t sizeOfAllThings() const {
|
||||
size_t n = 0;
|
||||
FOR_EACH_SIZE(ADD_SIZE_TO_N)
|
||||
return n;
|
||||
}
|
||||
|
||||
size_t sizeOfLiveGCThings() const {
|
||||
size_t n = 0;
|
||||
FOR_EACH_SIZE(ADD_SIZE_TO_N_IF_LIVE_GC_THING)
|
||||
return n;
|
||||
}
|
||||
|
||||
void addToTabSizes(TabSizes* sizes) const {
|
||||
FOR_EACH_SIZE(ADD_TO_TAB_SIZES)
|
||||
}
|
||||
|
||||
void addToServoSizes(ServoSizes *sizes) const {
|
||||
FOR_EACH_SIZE(ADD_TO_SERVO_SIZES)
|
||||
}
|
||||
|
||||
FOR_EACH_SIZE(DECL_SIZE)
|
||||
int dummy; // present just to absorb the trailing comma from FOR_EACH_SIZE(ZERO_SIZE)
|
||||
|
||||
#undef FOR_EACH_SIZE
|
||||
};
|
||||
|
||||
/**
|
||||
* Holds data about a notable class (one whose combined object and shape
|
||||
* instances use more than a certain amount of memory) so we can report it
|
||||
|
@ -597,12 +639,14 @@ struct ZoneStats
|
|||
macro(Other, MallocHeap, objectGroupsMallocHeap) \
|
||||
macro(Other, MallocHeap, typePool) \
|
||||
macro(Other, MallocHeap, baselineStubsOptimized) \
|
||||
macro(Other, MallocHeap, uniqueIdMap)
|
||||
macro(Other, MallocHeap, uniqueIdMap) \
|
||||
macro(Other, MallocHeap, shapeTables)
|
||||
|
||||
ZoneStats()
|
||||
: FOR_EACH_SIZE(ZERO_SIZE)
|
||||
unusedGCThings(),
|
||||
stringInfo(),
|
||||
shapeInfo(),
|
||||
extra(),
|
||||
allStrings(nullptr),
|
||||
notableStrings(),
|
||||
|
@ -613,6 +657,7 @@ struct ZoneStats
|
|||
: FOR_EACH_SIZE(COPY_OTHER_SIZE)
|
||||
unusedGCThings(mozilla::Move(other.unusedGCThings)),
|
||||
stringInfo(mozilla::Move(other.stringInfo)),
|
||||
shapeInfo(mozilla::Move(other.shapeInfo)),
|
||||
extra(other.extra),
|
||||
allStrings(other.allStrings),
|
||||
notableStrings(mozilla::Move(other.notableStrings)),
|
||||
|
@ -636,6 +681,7 @@ struct ZoneStats
|
|||
FOR_EACH_SIZE(ADD_OTHER_SIZE)
|
||||
unusedGCThings.addSizes(other.unusedGCThings);
|
||||
stringInfo.add(other.stringInfo);
|
||||
shapeInfo.add(other.shapeInfo);
|
||||
}
|
||||
|
||||
size_t sizeOfLiveGCThings() const {
|
||||
|
@ -643,6 +689,7 @@ struct ZoneStats
|
|||
size_t n = 0;
|
||||
FOR_EACH_SIZE(ADD_SIZE_TO_N_IF_LIVE_GC_THING)
|
||||
n += stringInfo.sizeOfLiveGCThings();
|
||||
n += shapeInfo.sizeOfLiveGCThings();
|
||||
return n;
|
||||
}
|
||||
|
||||
|
@ -651,6 +698,7 @@ struct ZoneStats
|
|||
FOR_EACH_SIZE(ADD_TO_TAB_SIZES)
|
||||
unusedGCThings.addToTabSizes(sizes);
|
||||
stringInfo.addToTabSizes(sizes);
|
||||
shapeInfo.addToTabSizes(sizes);
|
||||
}
|
||||
|
||||
void addToServoSizes(JS::ServoSizes *sizes) const {
|
||||
|
@ -658,6 +706,7 @@ struct ZoneStats
|
|||
FOR_EACH_SIZE(ADD_TO_SERVO_SIZES)
|
||||
unusedGCThings.addToServoSizes(sizes);
|
||||
stringInfo.addToServoSizes(sizes);
|
||||
shapeInfo.addToServoSizes(sizes);
|
||||
}
|
||||
|
||||
// These string measurements are initially for all strings. At the end,
|
||||
|
@ -667,6 +716,7 @@ struct ZoneStats
|
|||
FOR_EACH_SIZE(DECL_SIZE)
|
||||
UnusedGCThingSizes unusedGCThings;
|
||||
StringInfo stringInfo;
|
||||
ShapeInfo shapeInfo;
|
||||
void* extra; // This field can be used by embedders.
|
||||
|
||||
typedef js::HashMap<JSString*, StringInfo,
|
||||
|
|
|
@ -1211,6 +1211,13 @@ js::ObjectGroup::traceChildren(JSTracer* trc)
|
|||
if (proto().isObject())
|
||||
TraceEdge(trc, &proto(), "group_proto");
|
||||
|
||||
if (trc->isMarkingTracer())
|
||||
compartment()->mark();
|
||||
|
||||
if (JSObject* global = compartment()->unsafeUnbarrieredMaybeGlobal())
|
||||
TraceManuallyBarrieredEdge(trc, &global, "group_global");
|
||||
|
||||
|
||||
if (newScript())
|
||||
newScript()->trace(trc);
|
||||
|
||||
|
|
|
@ -179,16 +179,7 @@ JS::TraceIncomingCCWs(JSTracer* trc, const JS::CompartmentSet& compartments)
|
|||
void
|
||||
gc::TraceCycleCollectorChildren(JS::CallbackTracer* trc, Shape* shape)
|
||||
{
|
||||
// We need to mark the global, but it's OK to only do this once instead of
|
||||
// doing it for every Shape in our lineage, since it's always the same
|
||||
// global.
|
||||
JSObject* global = shape->compartment()->unsafeUnbarrieredMaybeGlobal();
|
||||
MOZ_ASSERT(global);
|
||||
DoCallback(trc, &global, "global");
|
||||
|
||||
do {
|
||||
MOZ_ASSERT(global == shape->compartment()->unsafeUnbarrieredMaybeGlobal());
|
||||
|
||||
MOZ_ASSERT(shape->base());
|
||||
shape->base()->assertConsistency();
|
||||
|
||||
|
|
|
@ -36,6 +36,9 @@ JS::Zone::Zone(JSRuntime* rt)
|
|||
gcMallocGCTriggered(false),
|
||||
usage(&rt->gc.usage),
|
||||
gcDelayBytes(0),
|
||||
propertyTree(this),
|
||||
baseShapes(this, BaseShapeSet()),
|
||||
initialShapes(this, InitialShapeSet()),
|
||||
data(nullptr),
|
||||
isSystem(false),
|
||||
usedByExclusiveThread(false),
|
||||
|
@ -346,6 +349,21 @@ Zone::nextZone() const
|
|||
return listNext_;
|
||||
}
|
||||
|
||||
void
|
||||
Zone::clearTables()
|
||||
{
|
||||
if (baseShapes.initialized())
|
||||
baseShapes.clear();
|
||||
if (initialShapes.initialized())
|
||||
initialShapes.clear();
|
||||
}
|
||||
|
||||
void
|
||||
Zone::fixupAfterMovingGC()
|
||||
{
|
||||
fixupInitialShapeTable();
|
||||
}
|
||||
|
||||
ZoneList::ZoneList()
|
||||
: head(nullptr), tail(nullptr)
|
||||
{}
|
||||
|
|
|
@ -148,7 +148,8 @@ struct Zone : public JS::shadow::Zone,
|
|||
void addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
|
||||
size_t* typePool,
|
||||
size_t* baselineStubsOptimized,
|
||||
size_t* uniqueIdMap);
|
||||
size_t* uniqueIdMap,
|
||||
size_t* shapeTables);
|
||||
|
||||
void resetGCMallocBytes();
|
||||
void setGCMaxMallocBytes(size_t value);
|
||||
|
@ -297,6 +298,8 @@ struct Zone : public JS::shadow::Zone,
|
|||
DebuggerVector* getDebuggers() const { return debuggers; }
|
||||
DebuggerVector* getOrCreateDebuggers(JSContext* cx);
|
||||
|
||||
void clearTables();
|
||||
|
||||
/*
|
||||
* When true, skip calling the metadata callback. We use this:
|
||||
* - to avoid invoking the callback recursively;
|
||||
|
@ -380,6 +383,22 @@ struct Zone : public JS::shadow::Zone,
|
|||
// the current GC.
|
||||
size_t gcDelayBytes;
|
||||
|
||||
// Shared Shape property tree.
|
||||
js::PropertyTree propertyTree;
|
||||
|
||||
// Set of all unowned base shapes in the Zone.
|
||||
JS::WeakCache<js::BaseShapeSet> baseShapes;
|
||||
|
||||
// Set of initial shapes in the Zone.
|
||||
JS::WeakCache<js::InitialShapeSet> initialShapes;
|
||||
|
||||
#ifdef JSGC_HASH_TABLE_CHECKS
|
||||
void checkInitialShapesTableAfterMovingGC();
|
||||
void checkBaseShapeTableAfterMovingGC();
|
||||
#endif
|
||||
void fixupInitialShapeTable();
|
||||
void fixupAfterMovingGC();
|
||||
|
||||
// Per-zone data for use by an embedder.
|
||||
void* data;
|
||||
|
||||
|
|
|
@ -35,9 +35,8 @@ Match.Pattern([{node: gc, edge: "fun_environment"},
|
|||
.assert(findPath(gc, o));
|
||||
print(uneval(findPath(gc, o)));
|
||||
|
||||
Match.Pattern([{node: {}, edge: "shape"},
|
||||
{node: Match.Pattern.ANY, edge: "base"},
|
||||
{node: Match.Pattern.ANY, edge: "global"},
|
||||
Match.Pattern([{node: {}, edge: "group"},
|
||||
{node: Match.Pattern.ANY, edge: "group_global"},
|
||||
{node: {}, edge: "o"}])
|
||||
.assert(findPath(o, o));
|
||||
print(findPath(o, o).map((e) => e.edge).toString());
|
||||
|
|
|
@ -66,9 +66,6 @@ JSCompartment::JSCompartment(Zone* zone, const JS::CompartmentOptions& options =
|
|||
globalWriteBarriered(0),
|
||||
detachedTypedObjects(0),
|
||||
objectMetadataState(ImmediateMetadata()),
|
||||
propertyTree(thisForCtor()),
|
||||
baseShapes(zone, BaseShapeSet()),
|
||||
initialShapes(zone, InitialShapeSet()),
|
||||
selfHostingScriptSource(nullptr),
|
||||
objectMetadataTable(nullptr),
|
||||
innerViews(zone, InnerViewTable()),
|
||||
|
@ -850,7 +847,6 @@ void
|
|||
JSCompartment::fixupAfterMovingGC()
|
||||
{
|
||||
fixupGlobal();
|
||||
fixupInitialShapeTable();
|
||||
objectGroups.fixupTablesAfterMovingGC();
|
||||
dtoaCache.purge();
|
||||
fixupScriptMapsAfterMovingGC();
|
||||
|
@ -938,10 +934,6 @@ JSCompartment::clearTables()
|
|||
MOZ_ASSERT(regExps.empty());
|
||||
|
||||
objectGroups.clearTables();
|
||||
if (baseShapes.initialized())
|
||||
baseShapes.clear();
|
||||
if (initialShapes.initialized())
|
||||
initialShapes.clear();
|
||||
if (savedStacks_.initialized())
|
||||
savedStacks_.clear();
|
||||
}
|
||||
|
@ -1211,8 +1203,6 @@ JSCompartment::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
|
|||
tiArrayTypeTables, tiObjectTypeTables,
|
||||
compartmentTables);
|
||||
wasm.addSizeOfExcludingThis(mallocSizeOf, compartmentTables);
|
||||
*compartmentTables += baseShapes.sizeOfExcludingThis(mallocSizeOf)
|
||||
+ initialShapes.sizeOfExcludingThis(mallocSizeOf);
|
||||
*innerViewsArg += innerViews.sizeOfExcludingThis(mallocSizeOf);
|
||||
if (lazyArrayBuffers)
|
||||
*lazyArrayBuffersArg += lazyArrayBuffers->sizeOfIncludingThis(mallocSizeOf);
|
||||
|
|
|
@ -477,24 +477,11 @@ struct JSCompartment
|
|||
size_t* jitCompartment,
|
||||
size_t* privateData);
|
||||
|
||||
/*
|
||||
* Shared scope property tree, and arena-pool for allocating its nodes.
|
||||
*/
|
||||
js::PropertyTree propertyTree;
|
||||
|
||||
/* Set of all unowned base shapes in the compartment. */
|
||||
JS::WeakCache<js::BaseShapeSet> baseShapes;
|
||||
|
||||
/* Set of initial shapes in the compartment. */
|
||||
JS::WeakCache<js::InitialShapeSet> initialShapes;
|
||||
|
||||
// Object group tables and other state in the compartment.
|
||||
js::ObjectGroupCompartment objectGroups;
|
||||
|
||||
#ifdef JSGC_HASH_TABLE_CHECKS
|
||||
void checkInitialShapesTableAfterMovingGC();
|
||||
void checkWrapperMapAfterMovingGC();
|
||||
void checkBaseShapeTableAfterMovingGC();
|
||||
void checkScriptMapsAfterMovingGC();
|
||||
#endif
|
||||
|
||||
|
@ -636,7 +623,6 @@ struct JSCompartment
|
|||
void clearTables();
|
||||
|
||||
static void fixupCrossCompartmentWrappersAfterMovingGC(JSTracer* trc);
|
||||
void fixupInitialShapeTable();
|
||||
void fixupAfterMovingGC();
|
||||
void fixupGlobal();
|
||||
void fixupScriptMapsAfterMovingGC();
|
||||
|
|
|
@ -2507,6 +2507,8 @@ GCRuntime::updatePointersToRelocatedCells(Zone* zone, AutoLockForExclusiveAccess
|
|||
gcstats::AutoPhase ap(stats, gcstats::PHASE_COMPACT_UPDATE);
|
||||
MovingTracer trc(rt);
|
||||
|
||||
zone->fixupAfterMovingGC();
|
||||
|
||||
// Fixup compartment global pointers as these get accessed during marking.
|
||||
for (CompartmentsInZoneIter comp(zone); !comp.done(); comp.next())
|
||||
comp->fixupAfterMovingGC();
|
||||
|
@ -6574,6 +6576,7 @@ gc::MergeCompartments(JSCompartment* source, JSCompartment* target)
|
|||
// meaningless after merging into the target compartment.
|
||||
|
||||
source->clearTables();
|
||||
source->zone()->clearTables();
|
||||
source->unsetIsDebuggee();
|
||||
|
||||
// The delazification flag indicates the presence of LazyScripts in a
|
||||
|
@ -6626,11 +6629,6 @@ gc::MergeCompartments(JSCompartment* source, JSCompartment* target)
|
|||
}
|
||||
}
|
||||
|
||||
for (auto base = source->zone()->cellIter<BaseShape>(); !base.done(); base.next()) {
|
||||
MOZ_ASSERT(base->compartment() == source);
|
||||
base->compartment_ = target;
|
||||
}
|
||||
|
||||
for (auto group = source->zone()->cellIter<ObjectGroup>(); !group.done(); group.next()) {
|
||||
group->setGeneration(target->zone()->types.generation);
|
||||
group->compartment_ = target;
|
||||
|
@ -7058,6 +7056,8 @@ js::gc::CheckHashTablesAfterMovingGC(JSRuntime* rt)
|
|||
rt->spsProfiler.checkStringsMapAfterMovingGC();
|
||||
for (ZonesIter zone(rt, SkipAtoms); !zone.done(); zone.next()) {
|
||||
zone->checkUniqueIdTableAfterMovingGC();
|
||||
zone->checkInitialShapesTableAfterMovingGC();
|
||||
zone->checkBaseShapeTableAfterMovingGC();
|
||||
|
||||
for (auto baseShape = zone->cellIter<BaseShape>(); !baseShape.done(); baseShape.next()) {
|
||||
if (baseShape->hasTable())
|
||||
|
@ -7067,9 +7067,7 @@ js::gc::CheckHashTablesAfterMovingGC(JSRuntime* rt)
|
|||
for (CompartmentsIter c(rt, SkipAtoms); !c.done(); c.next()) {
|
||||
c->objectGroups.checkTablesAfterMovingGC();
|
||||
c->dtoaCache.checkCacheAfterMovingGC();
|
||||
c->checkInitialShapesTableAfterMovingGC();
|
||||
c->checkWrapperMapAfterMovingGC();
|
||||
c->checkBaseShapeTableAfterMovingGC();
|
||||
c->checkScriptMapsAfterMovingGC();
|
||||
if (c->debugScopes)
|
||||
c->debugScopes->checkHashTablesAfterMovingGC(rt);
|
||||
|
|
|
@ -521,7 +521,7 @@ js::SetIntegrityLevel(JSContext* cx, HandleObject obj, IntegrityLevel level)
|
|||
if (!JSID_IS_EMPTY(child.get().propid) && level == IntegrityLevel::Frozen)
|
||||
MarkTypePropertyNonWritable(cx, nobj, child.get().propid);
|
||||
|
||||
last = cx->compartment()->propertyTree.getChild(cx, last, child);
|
||||
last = cx->zone()->propertyTree.getChild(cx, last, child);
|
||||
if (!last)
|
||||
return false;
|
||||
}
|
||||
|
@ -1353,7 +1353,7 @@ InitializePropertiesFromCompatibleNativeObject(JSContext* cx,
|
|||
|
||||
for (Shape* shape : shapes) {
|
||||
Rooted<StackShape> child(cx, StackShape(shape));
|
||||
shape = cx->compartment()->propertyTree.getChild(cx, shape, child);
|
||||
shape = cx->zone()->propertyTree.getChild(cx, shape, child);
|
||||
if (!shape)
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -54,8 +54,8 @@ PropertyTree::insertChild(ExclusiveContext* cx, Shape* parent, Shape* child)
|
|||
MOZ_ASSERT(!parent->inDictionary());
|
||||
MOZ_ASSERT(!child->parent);
|
||||
MOZ_ASSERT(!child->inDictionary());
|
||||
MOZ_ASSERT(child->compartment() == parent->compartment());
|
||||
MOZ_ASSERT(cx->isInsideCurrentCompartment(this));
|
||||
MOZ_ASSERT(child->zone() == parent->zone());
|
||||
MOZ_ASSERT(cx->zone() == zone_);
|
||||
|
||||
KidsPointer* kidp = &parent->kids;
|
||||
|
||||
|
|
|
@ -72,7 +72,9 @@ class PropertyTree
|
|||
{
|
||||
friend class ::JSFunction;
|
||||
|
||||
JSCompartment* compartment_;
|
||||
#ifdef DEBUG
|
||||
JS::Zone* zone_;
|
||||
#endif
|
||||
|
||||
bool insertChild(ExclusiveContext* cx, Shape* parent, Shape* child);
|
||||
|
||||
|
@ -89,13 +91,13 @@ class PropertyTree
|
|||
MAX_HEIGHT_WITH_ELEMENTS_ACCESS = 128
|
||||
};
|
||||
|
||||
explicit PropertyTree(JSCompartment* comp)
|
||||
: compartment_(comp)
|
||||
explicit PropertyTree(JS::Zone* zone)
|
||||
#ifdef DEBUG
|
||||
: zone_(zone)
|
||||
#endif
|
||||
{
|
||||
}
|
||||
|
||||
JSCompartment* compartment() { return compartment_; }
|
||||
|
||||
Shape* getChild(ExclusiveContext* cx, Shape* parent, JS::Handle<StackShape> child);
|
||||
};
|
||||
|
||||
|
|
|
@ -203,7 +203,7 @@ Bindings::initWithTemporaryStorage(ExclusiveContext* cx, MutableHandle<Bindings>
|
|||
(bi->kind() == Binding::CONSTANT ? JSPROP_READONLY : 0);
|
||||
Rooted<StackShape> child(cx, StackShape(base, NameToId(bi->name()), slot, attrs, 0));
|
||||
|
||||
shape = cx->compartment()->propertyTree.getChild(cx, shape, child);
|
||||
shape = cx->zone()->propertyTree.getChild(cx, shape, child);
|
||||
if (!shape)
|
||||
return false;
|
||||
|
||||
|
|
|
@ -840,7 +840,7 @@ GlobalObject::addIntrinsicValue(JSContext* cx, Handle<GlobalObject*> global,
|
|||
|
||||
RootedId id(cx, NameToId(name));
|
||||
Rooted<StackShape> child(cx, StackShape(base, id, slot, 0, 0));
|
||||
Shape* shape = cx->compartment()->propertyTree.getChild(cx, last, child);
|
||||
Shape* shape = cx->zone()->propertyTree.getChild(cx, last, child);
|
||||
if (!shape)
|
||||
return false;
|
||||
|
||||
|
|
|
@ -322,7 +322,8 @@ StatsZoneCallback(JSRuntime* rt, void* data, Zone* zone)
|
|||
zone->addSizeOfIncludingThis(rtStats->mallocSizeOf_,
|
||||
&zStats.typePool,
|
||||
&zStats.baselineStubsOptimized,
|
||||
&zStats.uniqueIdMap);
|
||||
&zStats.uniqueIdMap,
|
||||
&zStats.shapeTables);
|
||||
}
|
||||
|
||||
static void
|
||||
|
@ -546,49 +547,11 @@ StatsCellCallback(JSRuntime* rt, void* data, void* thing, JS::TraceKind traceKin
|
|||
break;
|
||||
|
||||
case JS::TraceKind::BaseShape: {
|
||||
BaseShape* base = static_cast<BaseShape*>(thing);
|
||||
CompartmentStats& cStats = base->compartment()->compartmentStats();
|
||||
|
||||
JS::ClassInfo info; // This zeroes all the sizes.
|
||||
JS::ShapeInfo info; // This zeroes all the sizes.
|
||||
info.shapesGCHeapBase += thingSize;
|
||||
// No malloc-heap measurements.
|
||||
|
||||
cStats.classInfo.add(info);
|
||||
|
||||
// XXX: This code is currently disabled because it occasionally causes
|
||||
// crashes (bug 1132502 and bug 1243529). The best theory as to why is
|
||||
// as follows.
|
||||
//
|
||||
// - XPCNativeScriptableShared have heap-allocated js::Class instances.
|
||||
//
|
||||
// - Once an XPCNativeScriptableShared is destroyed, its js::Class is
|
||||
// freed, but we can still have a BaseShape with a clasp_ pointer
|
||||
// that points to the freed js::Class.
|
||||
//
|
||||
// - This dangling pointer isn't used in normal execution, because the
|
||||
// BaseShape is unreachable.
|
||||
//
|
||||
// - However, memory reporting inspects all GC cells, reachable or not,
|
||||
// so we trace the dangling pointer and crash.
|
||||
//
|
||||
// One solution would be to mark BaseShapes whose js::Class is
|
||||
// heap-allocated, and skip this code just for them. However, that's a
|
||||
// non-trivial change, and heap-allocated js::Class instances are
|
||||
// likely to go away soon.
|
||||
//
|
||||
// So for now we just skip this code for all BaseShapes. The
|
||||
// consequence is that all BaseShapes will show up in about:memory
|
||||
// under "class(<non-notable classes>)" sub-trees, instead of the more
|
||||
// appropriate, class-specific "class(Foo)" sub-tree. But BaseShapes
|
||||
// typically don't take up that much memory so this isn't a big deal.
|
||||
//
|
||||
// XXX: once bug 1265271 is done this code should be re-enabled.
|
||||
//
|
||||
if (0) {
|
||||
const Class* clasp = base->clasp();
|
||||
const char* className = clasp->name;
|
||||
AddClassInfo(granularity, cStats, className, info);
|
||||
}
|
||||
zStats->shapeInfo.add(info);
|
||||
break;
|
||||
}
|
||||
|
||||
|
@ -607,23 +570,14 @@ StatsCellCallback(JSRuntime* rt, void* data, void* thing, JS::TraceKind traceKin
|
|||
|
||||
case JS::TraceKind::Shape: {
|
||||
Shape* shape = static_cast<Shape*>(thing);
|
||||
CompartmentStats& cStats = shape->compartment()->compartmentStats();
|
||||
JS::ClassInfo info; // This zeroes all the sizes.
|
||||
|
||||
JS::ShapeInfo info; // This zeroes all the sizes.
|
||||
if (shape->inDictionary())
|
||||
info.shapesGCHeapDict += thingSize;
|
||||
else
|
||||
info.shapesGCHeapTree += thingSize;
|
||||
shape->addSizeOfExcludingThis(rtStats->mallocSizeOf_, &info);
|
||||
cStats.classInfo.add(info);
|
||||
|
||||
// XXX: once bug 1265271 is done, occur, this code should be
|
||||
// re-enabled. (See the big comment on the BaseShape case above.)
|
||||
if (0) {
|
||||
const BaseShape* base = shape->base();
|
||||
const Class* clasp = base->clasp();
|
||||
const char* className = clasp->name;
|
||||
AddClassInfo(granularity, cStats, className, info);
|
||||
}
|
||||
zStats->shapeInfo.add(info);
|
||||
break;
|
||||
}
|
||||
|
||||
|
|
|
@ -294,7 +294,7 @@ NativeObject::setLastProperty(ExclusiveContext* cx, Shape* shape)
|
|||
{
|
||||
MOZ_ASSERT(!inDictionaryMode());
|
||||
MOZ_ASSERT(!shape->inDictionary());
|
||||
MOZ_ASSERT(shape->compartment() == compartment());
|
||||
MOZ_ASSERT(shape->zone() == zone());
|
||||
MOZ_ASSERT(shape->numFixedSlots() == numFixedSlots());
|
||||
MOZ_ASSERT(shape->getObjectClass() == getClass());
|
||||
|
||||
|
@ -318,7 +318,7 @@ NativeObject::setLastPropertyShrinkFixedSlots(Shape* shape)
|
|||
{
|
||||
MOZ_ASSERT(!inDictionaryMode());
|
||||
MOZ_ASSERT(!shape->inDictionary());
|
||||
MOZ_ASSERT(shape->compartment() == compartment());
|
||||
MOZ_ASSERT(shape->zone() == zone());
|
||||
MOZ_ASSERT(lastProperty()->slotSpan() == shape->slotSpan());
|
||||
MOZ_ASSERT(shape->getObjectClass() == getClass());
|
||||
|
||||
|
@ -338,7 +338,7 @@ NativeObject::setLastPropertyMakeNonNative(Shape* shape)
|
|||
{
|
||||
MOZ_ASSERT(!inDictionaryMode());
|
||||
MOZ_ASSERT(!shape->getObjectClass()->isNative());
|
||||
MOZ_ASSERT(shape->compartment() == compartment());
|
||||
MOZ_ASSERT(shape->zone() == zone());
|
||||
MOZ_ASSERT(shape->slotSpan() == 0);
|
||||
MOZ_ASSERT(shape->numFixedSlots() == 0);
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
|
||||
/* -*- Mode: C++; tab-width: 8; indent-tabs-mode: nil; c-basic-offset: 4 -*-
|
||||
* vim: set ts=8 sts=4 et sw=4 tw=99:
|
||||
* This Source Code Form is subject to the terms of the Mozilla Public
|
||||
|
@ -26,8 +27,7 @@ namespace js {
|
|||
inline
|
||||
StackBaseShape::StackBaseShape(ExclusiveContext* cx, const Class* clasp, uint32_t objectFlags)
|
||||
: flags(objectFlags),
|
||||
clasp(clasp),
|
||||
compartment(cx->compartment_)
|
||||
clasp(clasp)
|
||||
{}
|
||||
|
||||
inline Shape*
|
||||
|
|
|
@ -99,7 +99,7 @@ Shape::insertIntoDictionary(GCPtrShape* dictp)
|
|||
|
||||
MOZ_ASSERT_IF(*dictp, (*dictp)->inDictionary());
|
||||
MOZ_ASSERT_IF(*dictp, (*dictp)->listp == dictp);
|
||||
MOZ_ASSERT_IF(*dictp, compartment() == (*dictp)->compartment());
|
||||
MOZ_ASSERT_IF(*dictp, zone() == (*dictp)->zone());
|
||||
|
||||
setParent(dictp->get());
|
||||
if (parent)
|
||||
|
@ -112,7 +112,7 @@ bool
|
|||
Shape::makeOwnBaseShape(ExclusiveContext* cx)
|
||||
{
|
||||
MOZ_ASSERT(!base()->isOwned());
|
||||
assertSameCompartmentDebugOnly(cx, compartment());
|
||||
MOZ_ASSERT(cx->zone() == zone());
|
||||
|
||||
BaseShape* nbase = Allocate<BaseShape, NoGC>(cx);
|
||||
if (!nbase)
|
||||
|
@ -373,7 +373,7 @@ Shape::replaceLastProperty(ExclusiveContext* cx, StackBaseShape& base,
|
|||
Rooted<StackShape> child(cx, StackShape(shape));
|
||||
child.setBase(nbase);
|
||||
|
||||
return cx->compartment()->propertyTree.getChild(cx, shape->parent, child);
|
||||
return cx->zone()->propertyTree.getChild(cx, shape->parent, child);
|
||||
}
|
||||
|
||||
/*
|
||||
|
@ -443,7 +443,7 @@ NativeObject::getChildProperty(ExclusiveContext* cx,
|
|||
Shape* shape = getChildPropertyOnDictionary(cx, obj, parent, child);
|
||||
|
||||
if (!obj->inDictionaryMode()) {
|
||||
shape = cx->compartment()->propertyTree.getChild(cx, parent, child);
|
||||
shape = cx->zone()->propertyTree.getChild(cx, parent, child);
|
||||
if (!shape)
|
||||
return nullptr;
|
||||
//MOZ_ASSERT(shape->parent == parent);
|
||||
|
@ -690,7 +690,7 @@ js::ReshapeForAllocKind(JSContext* cx, Shape* shape, TaggedProto proto,
|
|||
}
|
||||
|
||||
Rooted<StackShape> child(cx, StackShape(nbase, id, i, JSPROP_ENUMERATE, 0));
|
||||
newShape = cx->compartment()->propertyTree.getChild(cx, newShape, child);
|
||||
newShape = cx->zone()->propertyTree.getChild(cx, newShape, child);
|
||||
if (!newShape)
|
||||
return nullptr;
|
||||
}
|
||||
|
@ -1118,7 +1118,7 @@ Shape*
|
|||
NativeObject::replaceWithNewEquivalentShape(ExclusiveContext* cx, Shape* oldShape, Shape* newShape,
|
||||
bool accessorShape)
|
||||
{
|
||||
MOZ_ASSERT(cx->isInsideCurrentCompartment(oldShape));
|
||||
MOZ_ASSERT(cx->isInsideCurrentZone(oldShape));
|
||||
MOZ_ASSERT_IF(oldShape != lastProperty(),
|
||||
inDictionaryMode() && lookup(cx, oldShape->propidRef()) == oldShape);
|
||||
|
||||
|
@ -1261,7 +1261,6 @@ StackBaseShape::match(ReadBarriered<UnownedBaseShape*> key, const Lookup& lookup
|
|||
inline
|
||||
BaseShape::BaseShape(const StackBaseShape& base)
|
||||
: clasp_(base.clasp),
|
||||
compartment_(base.compartment),
|
||||
flags(base.flags),
|
||||
slotSpan_(0),
|
||||
unowned_(nullptr),
|
||||
|
@ -1274,7 +1273,6 @@ BaseShape::copyFromUnowned(BaseShape& dest, UnownedBaseShape& src)
|
|||
{
|
||||
dest.clasp_ = src.clasp_;
|
||||
dest.slotSpan_ = src.slotSpan_;
|
||||
dest.compartment_ = src.compartment_;
|
||||
dest.unowned_ = &src;
|
||||
dest.flags = src.flags | OWNED_SHAPE;
|
||||
}
|
||||
|
@ -1299,7 +1297,7 @@ BaseShape::adoptUnowned(UnownedBaseShape* other)
|
|||
/* static */ UnownedBaseShape*
|
||||
BaseShape::getUnowned(ExclusiveContext* cx, StackBaseShape& base)
|
||||
{
|
||||
auto& table = cx->compartment()->baseShapes;
|
||||
auto& table = cx->zone()->baseShapes;
|
||||
|
||||
if (!table.initialized() && !table.init()) {
|
||||
ReportOutOfMemory(cx);
|
||||
|
@ -1345,16 +1343,9 @@ BaseShape::traceChildren(JSTracer* trc)
|
|||
void
|
||||
BaseShape::traceChildrenSkipShapeTable(JSTracer* trc)
|
||||
{
|
||||
if (trc->isMarkingTracer())
|
||||
compartment()->mark();
|
||||
|
||||
if (isOwned())
|
||||
TraceEdge(trc, &unowned_, "base");
|
||||
|
||||
JSObject* global = compartment()->unsafeUnbarrieredMaybeGlobal();
|
||||
if (global)
|
||||
TraceManuallyBarrieredEdge(trc, &global, "global");
|
||||
|
||||
assertConsistency();
|
||||
}
|
||||
|
||||
|
@ -1390,7 +1381,7 @@ BaseShape::canSkipMarkingShapeTable(Shape* lastShape)
|
|||
#ifdef JSGC_HASH_TABLE_CHECKS
|
||||
|
||||
void
|
||||
JSCompartment::checkBaseShapeTableAfterMovingGC()
|
||||
Zone::checkBaseShapeTableAfterMovingGC()
|
||||
{
|
||||
if (!baseShapes.initialized())
|
||||
return;
|
||||
|
@ -1453,7 +1444,7 @@ InitialShapeEntry::match(const InitialShapeEntry& key, const Lookup& lookup)
|
|||
#ifdef JSGC_HASH_TABLE_CHECKS
|
||||
|
||||
void
|
||||
JSCompartment::checkInitialShapesTableAfterMovingGC()
|
||||
Zone::checkInitialShapesTableAfterMovingGC()
|
||||
{
|
||||
if (!initialShapes.initialized())
|
||||
return;
|
||||
|
@ -1502,7 +1493,7 @@ EmptyShape::getInitialShape(ExclusiveContext* cx, const Class* clasp, TaggedProt
|
|||
{
|
||||
MOZ_ASSERT_IF(proto.isObject(), cx->isInsideCurrentCompartment(proto.toObject()));
|
||||
|
||||
auto& table = cx->compartment()->initialShapes;
|
||||
auto& table = cx->zone()->initialShapes;
|
||||
|
||||
if (!table.initialized() && !table.init()) {
|
||||
ReportOutOfMemory(cx);
|
||||
|
@ -1550,7 +1541,6 @@ NewObjectCache::invalidateEntriesForShape(JSContext* cx, HandleShape shape, Hand
|
|||
if (CanBeFinalizedInBackground(kind, clasp))
|
||||
kind = GetBackgroundAllocKind(kind);
|
||||
|
||||
Rooted<GlobalObject*> global(cx, shape->compartment()->unsafeUnbarrieredMaybeGlobal());
|
||||
RootedObjectGroup group(cx, ObjectGroup::defaultNewGroup(cx, clasp, TaggedProto(proto)));
|
||||
if (!group) {
|
||||
purge();
|
||||
|
@ -1559,8 +1549,12 @@ NewObjectCache::invalidateEntriesForShape(JSContext* cx, HandleShape shape, Hand
|
|||
}
|
||||
|
||||
EntryIndex entry;
|
||||
for (CompartmentsInZoneIter comp(shape->zone()); !comp.done(); comp.next()) {
|
||||
if (GlobalObject* global = comp->unsafeUnbarrieredMaybeGlobal()) {
|
||||
if (lookupGlobal(clasp, global, kind, &entry))
|
||||
PodZero(&entries[entry]);
|
||||
}
|
||||
}
|
||||
if (!proto->is<GlobalObject>() && lookupProto(clasp, proto, kind, &entry))
|
||||
PodZero(&entries[entry]);
|
||||
if (lookupGroup(group, kind, &entry))
|
||||
|
@ -1573,7 +1567,7 @@ EmptyShape::insertInitialShape(ExclusiveContext* cx, HandleShape shape, HandleOb
|
|||
InitialShapeEntry::Lookup lookup(shape->getObjectClass(), TaggedProto(proto),
|
||||
shape->numFixedSlots(), shape->getObjectFlags());
|
||||
|
||||
InitialShapeSet::Ptr p = cx->compartment()->initialShapes.lookup(lookup);
|
||||
InitialShapeSet::Ptr p = cx->zone()->initialShapes.lookup(lookup);
|
||||
MOZ_ASSERT(p);
|
||||
|
||||
InitialShapeEntry& entry = const_cast<InitialShapeEntry&>(*p);
|
||||
|
@ -1609,7 +1603,7 @@ EmptyShape::insertInitialShape(ExclusiveContext* cx, HandleShape shape, HandleOb
|
|||
}
|
||||
|
||||
void
|
||||
JSCompartment::fixupInitialShapeTable()
|
||||
Zone::fixupInitialShapeTable()
|
||||
{
|
||||
if (!initialShapes.initialized())
|
||||
return;
|
||||
|
|
|
@ -375,7 +375,6 @@ class BaseShape : public gc::TenuredCell
|
|||
|
||||
private:
|
||||
const Class* clasp_; /* Class of referring object. */
|
||||
JSCompartment* compartment_; /* Compartment shape belongs to. */
|
||||
uint32_t flags; /* Vector of above flags. */
|
||||
uint32_t slotSpan_; /* Object slot span for BaseShapes at
|
||||
* dictionary last properties. */
|
||||
|
@ -386,6 +385,11 @@ class BaseShape : public gc::TenuredCell
|
|||
/* For owned BaseShapes, the shape's shape table. */
|
||||
ShapeTable* table_;
|
||||
|
||||
#if JS_BITS_PER_WORD == 32
|
||||
// Ensure sizeof(BaseShape) is a multiple of gc::CellSize.
|
||||
uint32_t padding_;
|
||||
#endif
|
||||
|
||||
BaseShape(const BaseShape& base) = delete;
|
||||
BaseShape& operator=(const BaseShape& other) = delete;
|
||||
|
||||
|
@ -398,7 +402,6 @@ class BaseShape : public gc::TenuredCell
|
|||
mozilla::PodZero(this);
|
||||
this->clasp_ = clasp;
|
||||
this->flags = objectFlags;
|
||||
this->compartment_ = comp;
|
||||
}
|
||||
|
||||
explicit inline BaseShape(const StackBaseShape& base);
|
||||
|
@ -427,9 +430,6 @@ class BaseShape : public gc::TenuredCell
|
|||
uint32_t slotSpan() const { MOZ_ASSERT(isOwned()); return slotSpan_; }
|
||||
void setSlotSpan(uint32_t slotSpan) { MOZ_ASSERT(isOwned()); slotSpan_ = slotSpan; }
|
||||
|
||||
JSCompartment* compartment() const { return compartment_; }
|
||||
JSCompartment* maybeCompartment() const { return compartment(); }
|
||||
|
||||
/*
|
||||
* Lookup base shapes from the compartment's baseShapes table, adding if
|
||||
* not already found.
|
||||
|
@ -498,12 +498,10 @@ struct StackBaseShape : public DefaultHasher<ReadBarriered<UnownedBaseShape*>>
|
|||
{
|
||||
uint32_t flags;
|
||||
const Class* clasp;
|
||||
JSCompartment* compartment;
|
||||
|
||||
explicit StackBaseShape(BaseShape* base)
|
||||
: flags(base->flags & BaseShape::OBJECT_FLAG_MASK),
|
||||
clasp(base->clasp_),
|
||||
compartment(base->compartment())
|
||||
clasp(base->clasp_)
|
||||
{}
|
||||
|
||||
inline StackBaseShape(ExclusiveContext* cx, const Class* clasp, uint32_t objectFlags);
|
||||
|
@ -637,7 +635,7 @@ class Shape : public gc::TenuredCell
|
|||
ShapeTable& table() const { return base()->table(); }
|
||||
|
||||
void addSizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf,
|
||||
JS::ClassInfo* info) const
|
||||
JS::ShapeInfo* info) const
|
||||
{
|
||||
if (hasTable()) {
|
||||
if (inDictionary())
|
||||
|
@ -660,8 +658,6 @@ class Shape : public gc::TenuredCell
|
|||
}
|
||||
|
||||
const GCPtrShape& previous() const { return parent; }
|
||||
JSCompartment* compartment() const { return base()->compartment(); }
|
||||
JSCompartment* maybeCompartment() const { return compartment(); }
|
||||
|
||||
template <AllowGC allowGC>
|
||||
class Range {
|
||||
|
@ -1011,8 +1007,7 @@ class AccessorShape : public Shape
|
|||
inline
|
||||
StackBaseShape::StackBaseShape(Shape* shape)
|
||||
: flags(shape->getObjectFlags()),
|
||||
clasp(shape->getObjectClass()),
|
||||
compartment(shape->compartment())
|
||||
clasp(shape->getObjectClass())
|
||||
{}
|
||||
|
||||
class MOZ_RAII AutoRooterGetterSetter
|
||||
|
@ -1466,9 +1461,9 @@ namespace JS {
|
|||
namespace ubi {
|
||||
|
||||
template<>
|
||||
class Concrete<js::Shape> : TracerConcreteWithCompartment<js::Shape> {
|
||||
class Concrete<js::Shape> : TracerConcrete<js::Shape> {
|
||||
protected:
|
||||
explicit Concrete(js::Shape *ptr) : TracerConcreteWithCompartment<js::Shape>(ptr) { }
|
||||
explicit Concrete(js::Shape *ptr) : TracerConcrete<js::Shape>(ptr) { }
|
||||
|
||||
public:
|
||||
static void construct(void *storage, js::Shape *ptr) { new (storage) Concrete(ptr); }
|
||||
|
@ -1480,9 +1475,9 @@ class Concrete<js::Shape> : TracerConcreteWithCompartment<js::Shape> {
|
|||
};
|
||||
|
||||
template<>
|
||||
class Concrete<js::BaseShape> : TracerConcreteWithCompartment<js::BaseShape> {
|
||||
class Concrete<js::BaseShape> : TracerConcrete<js::BaseShape> {
|
||||
protected:
|
||||
explicit Concrete(js::BaseShape *ptr) : TracerConcreteWithCompartment<js::BaseShape>(ptr) { }
|
||||
explicit Concrete(js::BaseShape *ptr) : TracerConcrete<js::BaseShape>(ptr) { }
|
||||
|
||||
public:
|
||||
static void construct(void *storage, js::BaseShape *ptr) { new (storage) Concrete(ptr); }
|
||||
|
|
|
@ -4337,7 +4337,8 @@ void
|
|||
Zone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
|
||||
size_t* typePool,
|
||||
size_t* baselineStubsOptimized,
|
||||
size_t* uniqueIdMap)
|
||||
size_t* uniqueIdMap,
|
||||
size_t* shapeTables)
|
||||
{
|
||||
*typePool += types.typeLifoAlloc.sizeOfExcludingThis(mallocSizeOf);
|
||||
if (jitZone()) {
|
||||
|
@ -4345,6 +4346,8 @@ Zone::addSizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf,
|
|||
jitZone()->optimizedStubSpace()->sizeOfExcludingThis(mallocSizeOf);
|
||||
}
|
||||
*uniqueIdMap += uniqueIds_.sizeOfExcludingThis(mallocSizeOf);
|
||||
*shapeTables += baseShapes.sizeOfExcludingThis(mallocSizeOf)
|
||||
+ initialShapes.sizeOfExcludingThis(mallocSizeOf);
|
||||
}
|
||||
|
||||
TypeZone::TypeZone(Zone* zone)
|
||||
|
|
|
@ -341,8 +341,6 @@ TracerConcreteWithCompartment<Referent>::compartment() const
|
|||
}
|
||||
|
||||
template JSCompartment* TracerConcreteWithCompartment<JSScript>::compartment() const;
|
||||
template JSCompartment* TracerConcreteWithCompartment<js::Shape>::compartment() const;
|
||||
template JSCompartment* TracerConcreteWithCompartment<js::BaseShape>::compartment() const;
|
||||
|
||||
bool
|
||||
Concrete<JSObject>::hasAllocationStack() const
|
||||
|
|
|
@ -498,7 +498,7 @@ UnboxedLayout::makeNativeGroup(JSContext* cx, ObjectGroup* group)
|
|||
|
||||
Rooted<StackShape> child(cx, StackShape(shape->base()->unowned(), NameToId(property.name),
|
||||
i, JSPROP_ENUMERATE, 0));
|
||||
shape = cx->compartment()->propertyTree.getChild(cx, shape, child);
|
||||
shape = cx->zone()->propertyTree.getChild(cx, shape, child);
|
||||
if (!shape)
|
||||
return false;
|
||||
}
|
||||
|
|
|
@ -1964,6 +1964,10 @@ ReportZoneStats(const JS::ZoneStats& zStats,
|
|||
zStats.uniqueIdMap,
|
||||
"Address-independent cell identities.");
|
||||
|
||||
ZCREPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("shape-tables"),
|
||||
zStats.shapeTables,
|
||||
"Tables storing shape information.");
|
||||
|
||||
ZCREPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("lazy-scripts/gc-heap"),
|
||||
zStats.lazyScriptsGCHeap,
|
||||
"Scripts that haven't executed yet.");
|
||||
|
@ -2118,6 +2122,43 @@ ReportZoneStats(const JS::ZoneStats& zStats,
|
|||
"is refreshed.");
|
||||
}
|
||||
|
||||
const JS::ShapeInfo& shapeInfo = zStats.shapeInfo;
|
||||
if (shapeInfo.shapesGCHeapTree > 0) {
|
||||
REPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("shapes/gc-heap/tree"),
|
||||
shapeInfo.shapesGCHeapTree,
|
||||
"Shapes in a property tree.");
|
||||
}
|
||||
|
||||
if (shapeInfo.shapesGCHeapDict > 0) {
|
||||
REPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("shapes/gc-heap/dict"),
|
||||
shapeInfo.shapesGCHeapDict,
|
||||
"Shapes in dictionary mode.");
|
||||
}
|
||||
|
||||
if (shapeInfo.shapesGCHeapBase > 0) {
|
||||
REPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("shapes/gc-heap/base"),
|
||||
shapeInfo.shapesGCHeapBase,
|
||||
"Base shapes, which collate data common to many shapes.");
|
||||
}
|
||||
|
||||
if (shapeInfo.shapesMallocHeapTreeTables > 0) {
|
||||
REPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("shapes/malloc-heap/tree-tables"),
|
||||
KIND_HEAP, shapeInfo.shapesMallocHeapTreeTables,
|
||||
"Property tables of shapes in a property tree.");
|
||||
}
|
||||
|
||||
if (shapeInfo.shapesMallocHeapDictTables > 0) {
|
||||
REPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("shapes/malloc-heap/dict-tables"),
|
||||
KIND_HEAP, shapeInfo.shapesMallocHeapDictTables,
|
||||
"Property tables of shapes in dictionary mode.");
|
||||
}
|
||||
|
||||
if (shapeInfo.shapesMallocHeapTreeKids > 0) {
|
||||
REPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("shapes/malloc-heap/tree-kids"),
|
||||
KIND_HEAP, shapeInfo.shapesMallocHeapTreeKids,
|
||||
"Kid hashes of shapes in a property tree.");
|
||||
}
|
||||
|
||||
if (sundriesGCHeap > 0) {
|
||||
// We deliberately don't use ZCREPORT_GC_BYTES here.
|
||||
REPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("sundries/gc-heap"),
|
||||
|
@ -2144,7 +2185,7 @@ ReportZoneStats(const JS::ZoneStats& zStats,
|
|||
|
||||
static nsresult
|
||||
ReportClassStats(const ClassInfo& classInfo, const nsACString& path,
|
||||
const nsACString& shapesPath, nsIHandleReportCallback* cb,
|
||||
nsIHandleReportCallback* cb,
|
||||
nsISupports* closure, size_t& gcTotal)
|
||||
{
|
||||
// We deliberately don't use ZCREPORT_BYTES, so that these per-class values
|
||||
|
@ -2214,42 +2255,6 @@ ReportClassStats(const ClassInfo& classInfo, const nsACString& path,
|
|||
"Miscellaneous object data.");
|
||||
}
|
||||
|
||||
if (classInfo.shapesGCHeapTree > 0) {
|
||||
REPORT_GC_BYTES(shapesPath + NS_LITERAL_CSTRING("shapes/gc-heap/tree"),
|
||||
classInfo.shapesGCHeapTree,
|
||||
"Shapes in a property tree.");
|
||||
}
|
||||
|
||||
if (classInfo.shapesGCHeapDict > 0) {
|
||||
REPORT_GC_BYTES(shapesPath + NS_LITERAL_CSTRING("shapes/gc-heap/dict"),
|
||||
classInfo.shapesGCHeapDict,
|
||||
"Shapes in dictionary mode.");
|
||||
}
|
||||
|
||||
if (classInfo.shapesGCHeapBase > 0) {
|
||||
REPORT_GC_BYTES(shapesPath + NS_LITERAL_CSTRING("shapes/gc-heap/base"),
|
||||
classInfo.shapesGCHeapBase,
|
||||
"Base shapes, which collate data common to many shapes.");
|
||||
}
|
||||
|
||||
if (classInfo.shapesMallocHeapTreeTables > 0) {
|
||||
REPORT_BYTES(shapesPath + NS_LITERAL_CSTRING("shapes/malloc-heap/tree-tables"),
|
||||
KIND_HEAP, classInfo.shapesMallocHeapTreeTables,
|
||||
"Property tables of shapes in a property tree.");
|
||||
}
|
||||
|
||||
if (classInfo.shapesMallocHeapDictTables > 0) {
|
||||
REPORT_BYTES(shapesPath + NS_LITERAL_CSTRING("shapes/malloc-heap/dict-tables"),
|
||||
KIND_HEAP, classInfo.shapesMallocHeapDictTables,
|
||||
"Property tables of shapes in dictionary mode.");
|
||||
}
|
||||
|
||||
if (classInfo.shapesMallocHeapTreeKids > 0) {
|
||||
REPORT_BYTES(shapesPath + NS_LITERAL_CSTRING("shapes/malloc-heap/tree-kids"),
|
||||
KIND_HEAP, classInfo.shapesMallocHeapTreeKids,
|
||||
"Kid hashes of shapes in a property tree.");
|
||||
}
|
||||
|
||||
return NS_OK;
|
||||
}
|
||||
|
||||
|
@ -2293,12 +2298,8 @@ ReportCompartmentStats(const JS::CompartmentStats& cStats,
|
|||
? NS_LITERAL_CSTRING("classes/")
|
||||
: NS_LITERAL_CSTRING("classes/class(<non-notable classes>)/");
|
||||
|
||||
// XXX: shapes need special treatment until bug 1265271 is fixed.
|
||||
nsCString shapesPath = cJSPathPrefix;
|
||||
shapesPath += NS_LITERAL_CSTRING("classes/");
|
||||
|
||||
rv = ReportClassStats(cStats.classInfo, nonNotablePath, shapesPath, cb,
|
||||
closure, gcTotal);
|
||||
rv = ReportClassStats(cStats.classInfo, nonNotablePath, cb, closure,
|
||||
gcTotal);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
|
||||
for (size_t i = 0; i < cStats.notableClasses.length(); i++) {
|
||||
|
@ -2308,8 +2309,7 @@ ReportCompartmentStats(const JS::CompartmentStats& cStats,
|
|||
nsCString classPath = cJSPathPrefix +
|
||||
nsPrintfCString("classes/class(%s)/", classInfo.className_);
|
||||
|
||||
rv = ReportClassStats(classInfo, classPath, shapesPath, cb, closure,
|
||||
gcTotal);
|
||||
rv = ReportClassStats(classInfo, classPath, cb, closure, gcTotal);
|
||||
NS_ENSURE_SUCCESS(rv, rv);
|
||||
}
|
||||
|
||||
|
@ -2364,7 +2364,7 @@ ReportCompartmentStats(const JS::CompartmentStats& cStats,
|
|||
|
||||
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("compartment-tables"),
|
||||
cStats.compartmentTables,
|
||||
"Compartment-wide tables storing shape and type object information.");
|
||||
"Compartment-wide tables storing object group information and wasm instances.");
|
||||
|
||||
ZCREPORT_BYTES(cJSPathPrefix + NS_LITERAL_CSTRING("inner-views"),
|
||||
cStats.innerViewsTable,
|
||||
|
@ -3043,11 +3043,11 @@ JSReporter::CollectReports(WindowPaths* windowPaths,
|
|||
|
||||
MREPORT_BYTES(NS_LITERAL_CSTRING("js-main-runtime-gc-heap-committed/used/gc-things/shapes"),
|
||||
KIND_OTHER,
|
||||
rtStats.cTotals.classInfo.shapesGCHeapTree + rtStats.cTotals.classInfo.shapesGCHeapDict,
|
||||
rtStats.zTotals.shapeInfo.shapesGCHeapTree + rtStats.zTotals.shapeInfo.shapesGCHeapDict,
|
||||
"Used shape cells.");
|
||||
|
||||
MREPORT_BYTES(NS_LITERAL_CSTRING("js-main-runtime-gc-heap-committed/used/gc-things/base-shapes"),
|
||||
KIND_OTHER, rtStats.cTotals.classInfo.shapesGCHeapBase,
|
||||
KIND_OTHER, rtStats.zTotals.shapeInfo.shapesGCHeapBase,
|
||||
"Used base shape cells.");
|
||||
|
||||
MREPORT_BYTES(NS_LITERAL_CSTRING("js-main-runtime-gc-heap-committed/used/gc-things/object-groups"),
|
||||
|
|
Загрузка…
Ссылка в новой задаче