Bug 1345177 - Make RegExpShared a GC thing r=sfink

This commit is contained in:
Jon Coppeard 2017-03-27 10:38:29 +01:00
Родитель 42d667debc
Коммит f7a6438226
29 изменённых файлов: 386 добавлений и 414 удалений

Просмотреть файл

@ -578,7 +578,8 @@ struct UnusedGCThingSizes
macro(Other, GCHeapUnused, string) \
macro(Other, GCHeapUnused, symbol) \
macro(Other, GCHeapUnused, jitcode) \
macro(Other, GCHeapUnused, scope)
macro(Other, GCHeapUnused, scope) \
macro(Other, GCHeapUnused, regExpShared)
UnusedGCThingSizes()
: FOR_EACH_SIZE(ZERO_SIZE)
@ -592,16 +593,17 @@ struct UnusedGCThingSizes
void addToKind(JS::TraceKind kind, intptr_t n) {
switch (kind) {
case JS::TraceKind::Object: object += n; break;
case JS::TraceKind::String: string += n; break;
case JS::TraceKind::Symbol: symbol += n; break;
case JS::TraceKind::Script: script += n; break;
case JS::TraceKind::Shape: shape += n; break;
case JS::TraceKind::BaseShape: baseShape += n; break;
case JS::TraceKind::JitCode: jitcode += n; break;
case JS::TraceKind::LazyScript: lazyScript += n; break;
case JS::TraceKind::ObjectGroup: objectGroup += n; break;
case JS::TraceKind::Scope: scope += n; break;
case JS::TraceKind::Object: object += n; break;
case JS::TraceKind::String: string += n; break;
case JS::TraceKind::Symbol: symbol += n; break;
case JS::TraceKind::Script: script += n; break;
case JS::TraceKind::Shape: shape += n; break;
case JS::TraceKind::BaseShape: baseShape += n; break;
case JS::TraceKind::JitCode: jitcode += n; break;
case JS::TraceKind::LazyScript: lazyScript += n; break;
case JS::TraceKind::ObjectGroup: objectGroup += n; break;
case JS::TraceKind::Scope: scope += n; break;
case JS::TraceKind::RegExpShared: regExpShared += n; break;
default:
MOZ_CRASH("Bad trace kind for UnusedGCThingSizes");
}
@ -643,6 +645,8 @@ struct ZoneStats
macro(Other, MallocHeap, objectGroupsMallocHeap) \
macro(Other, GCHeapUsed, scopesGCHeap) \
macro(Other, MallocHeap, scopesMallocHeap) \
macro(Other, GCHeapUsed, regExpSharedsGCHeap) \
macro(Other, MallocHeap, regExpSharedsMallocHeap) \
macro(Other, MallocHeap, typePool) \
macro(Other, MallocHeap, baselineStubsOptimized) \
macro(Other, MallocHeap, uniqueIdMap) \

Просмотреть файл

@ -32,7 +32,8 @@ using JS::PrivateValue;
using JS::PropertyDescriptor;
using JS::Value;
class RegExpGuard;
using RegExpGuard = JS::Rooted<RegExpShared*>;
class JS_FRIEND_API(Wrapper);
/*

Просмотреть файл

@ -16,6 +16,7 @@ namespace js {
class BaseShape;
class LazyScript;
class ObjectGroup;
class RegExpShared;
class Shape;
class Scope;
namespace jit {
@ -59,13 +60,15 @@ enum class TraceKind
BaseShape = 0x0F,
JitCode = 0x1F,
LazyScript = 0x2F,
Scope = 0x3F
Scope = 0x3F,
RegExpShared = 0x4F
};
const static uintptr_t OutOfLineTraceKindMask = 0x07;
static_assert(uintptr_t(JS::TraceKind::BaseShape) & OutOfLineTraceKindMask, "mask bits are set");
static_assert(uintptr_t(JS::TraceKind::JitCode) & OutOfLineTraceKindMask, "mask bits are set");
static_assert(uintptr_t(JS::TraceKind::LazyScript) & OutOfLineTraceKindMask, "mask bits are set");
static_assert(uintptr_t(JS::TraceKind::Scope) & OutOfLineTraceKindMask, "mask bits are set");
static_assert(uintptr_t(JS::TraceKind::RegExpShared) & OutOfLineTraceKindMask, "mask bits are set");
// When this header is imported inside SpiderMonkey, the class definitions are
// available and we can query those definitions to find the correct kind
@ -88,7 +91,8 @@ struct MapTypeToTraceKind {
D(Script, JSScript, true) \
D(Shape, js::Shape, true) \
D(String, JSString, false) \
D(Symbol, JS::Symbol, false)
D(Symbol, JS::Symbol, false) \
D(RegExpShared, js::RegExpShared, true)
// Map from all public types to their trace kind.
#define JS_EXPAND_DEF(name, type, _) \

Просмотреть файл

@ -164,6 +164,9 @@ class JS_PUBLIC_API(CallbackTracer) : public JSTracer
virtual void onScopeEdge(js::Scope** scopep) {
onChild(JS::GCCellPtr(*scopep, JS::TraceKind::Scope));
}
virtual void onRegExpSharedEdge(js::RegExpShared** sharedp) {
onChild(JS::GCCellPtr(*sharedp, JS::TraceKind::RegExpShared));
}
// Override this method to receive notification when a node in the GC
// heap graph is visited.
@ -234,6 +237,7 @@ class JS_PUBLIC_API(CallbackTracer) : public JSTracer
void dispatchToOnEdge(js::jit::JitCode** codep) { onJitCodeEdge(codep); }
void dispatchToOnEdge(js::LazyScript** lazyp) { onLazyScriptEdge(lazyp); }
void dispatchToOnEdge(js::Scope** scopep) { onScopeEdge(scopep); }
void dispatchToOnEdge(js::RegExpShared** sharedp) { onRegExpSharedEdge(sharedp); }
protected:
void setTraceWeakEdges(bool value) {

Просмотреть файл

@ -120,6 +120,7 @@ struct MovingTracer : JS::CallbackTracer
void onLazyScriptEdge(LazyScript** lazyp) override;
void onBaseShapeEdge(BaseShape** basep) override;
void onScopeEdge(Scope** basep) override;
void onRegExpSharedEdge(RegExpShared** sharedp) override;
void onChild(const JS::GCCellPtr& thing) override {
MOZ_ASSERT(!RelocationOverlay::isCellForwarded(thing.asCell()));
}
@ -127,6 +128,10 @@ struct MovingTracer : JS::CallbackTracer
#ifdef DEBUG
TracerKind getTracerKind() const override { return TracerKind::Moving; }
#endif
private:
template <typename T>
void updateEdge(T** thingp);
};
// Structure for counting how many times objects in a particular group have

Просмотреть файл

@ -769,6 +769,8 @@ class GCRuntime
bool isCompactingGCEnabled() const;
bool isShrinkingGC() const { return invocationKind == GC_SHRINK; }
void setGrayRootsTracer(JSTraceDataOp traceOp, void* data);
MOZ_MUST_USE bool addBlackRootsTracer(JSTraceDataOp traceOp, void* data);
void removeBlackRootsTracer(JSTraceDataOp traceOp, void* data);

Просмотреть файл

@ -109,6 +109,7 @@ enum class AllocKind {
SYMBOL,
JITCODE,
SCOPE,
REGEXP_SHARED,
LIMIT,
LAST = LIMIT - 1
};
@ -116,38 +117,39 @@ enum class AllocKind {
// Macro to enumerate the different allocation kinds supplying information about
// the trace kind, C++ type and allocation size.
#define FOR_EACH_OBJECT_ALLOCKIND(D) \
/* AllocKind TraceKind TypeName SizedType */ \
D(FUNCTION, Object, JSObject, JSFunction) \
D(FUNCTION_EXTENDED, Object, JSObject, FunctionExtended) \
D(OBJECT0, Object, JSObject, JSObject_Slots0) \
D(OBJECT0_BACKGROUND, Object, JSObject, JSObject_Slots0) \
D(OBJECT2, Object, JSObject, JSObject_Slots2) \
D(OBJECT2_BACKGROUND, Object, JSObject, JSObject_Slots2) \
D(OBJECT4, Object, JSObject, JSObject_Slots4) \
D(OBJECT4_BACKGROUND, Object, JSObject, JSObject_Slots4) \
D(OBJECT8, Object, JSObject, JSObject_Slots8) \
D(OBJECT8_BACKGROUND, Object, JSObject, JSObject_Slots8) \
D(OBJECT12, Object, JSObject, JSObject_Slots12) \
D(OBJECT12_BACKGROUND, Object, JSObject, JSObject_Slots12) \
D(OBJECT16, Object, JSObject, JSObject_Slots16) \
D(OBJECT16_BACKGROUND, Object, JSObject, JSObject_Slots16)
/* AllocKind TraceKind TypeName SizedType */ \
D(FUNCTION, Object, JSObject, JSFunction) \
D(FUNCTION_EXTENDED, Object, JSObject, FunctionExtended) \
D(OBJECT0, Object, JSObject, JSObject_Slots0) \
D(OBJECT0_BACKGROUND, Object, JSObject, JSObject_Slots0) \
D(OBJECT2, Object, JSObject, JSObject_Slots2) \
D(OBJECT2_BACKGROUND, Object, JSObject, JSObject_Slots2) \
D(OBJECT4, Object, JSObject, JSObject_Slots4) \
D(OBJECT4_BACKGROUND, Object, JSObject, JSObject_Slots4) \
D(OBJECT8, Object, JSObject, JSObject_Slots8) \
D(OBJECT8_BACKGROUND, Object, JSObject, JSObject_Slots8) \
D(OBJECT12, Object, JSObject, JSObject_Slots12) \
D(OBJECT12_BACKGROUND, Object, JSObject, JSObject_Slots12) \
D(OBJECT16, Object, JSObject, JSObject_Slots16) \
D(OBJECT16_BACKGROUND, Object, JSObject, JSObject_Slots16)
#define FOR_EACH_NONOBJECT_ALLOCKIND(D) \
/* AllocKind TraceKind TypeName SizedType */ \
D(SCRIPT, Script, JSScript, JSScript) \
D(LAZY_SCRIPT, LazyScript, js::LazyScript, js::LazyScript) \
D(SHAPE, Shape, js::Shape, js::Shape) \
D(ACCESSOR_SHAPE, Shape, js::AccessorShape, js::AccessorShape) \
D(BASE_SHAPE, BaseShape, js::BaseShape, js::BaseShape) \
D(OBJECT_GROUP, ObjectGroup, js::ObjectGroup, js::ObjectGroup) \
D(FAT_INLINE_STRING, String, JSFatInlineString, JSFatInlineString) \
D(STRING, String, JSString, JSString) \
D(EXTERNAL_STRING, String, JSExternalString, JSExternalString) \
D(FAT_INLINE_ATOM, String, js::FatInlineAtom, js::FatInlineAtom) \
D(ATOM, String, js::NormalAtom, js::NormalAtom) \
D(SYMBOL, Symbol, JS::Symbol, JS::Symbol) \
D(JITCODE, JitCode, js::jit::JitCode, js::jit::JitCode) \
D(SCOPE, Scope, js::Scope, js::Scope)
/* AllocKind TraceKind TypeName SizedType */ \
D(SCRIPT, Script, JSScript, JSScript) \
D(LAZY_SCRIPT, LazyScript, js::LazyScript, js::LazyScript) \
D(SHAPE, Shape, js::Shape, js::Shape) \
D(ACCESSOR_SHAPE, Shape, js::AccessorShape, js::AccessorShape) \
D(BASE_SHAPE, BaseShape, js::BaseShape, js::BaseShape) \
D(OBJECT_GROUP, ObjectGroup, js::ObjectGroup, js::ObjectGroup) \
D(FAT_INLINE_STRING, String, JSFatInlineString, JSFatInlineString) \
D(STRING, String, JSString, JSString) \
D(EXTERNAL_STRING, String, JSExternalString, JSExternalString) \
D(FAT_INLINE_ATOM, String, js::FatInlineAtom, js::FatInlineAtom) \
D(ATOM, String, js::NormalAtom, js::NormalAtom) \
D(SYMBOL, Symbol, JS::Symbol, JS::Symbol) \
D(JITCODE, JitCode, js::jit::JitCode, js::jit::JitCode) \
D(SCOPE, Scope, js::Scope, js::Scope) \
D(REGEXP_SHARED, RegExpShared, js::RegExpShared, js::RegExpShared)
#define FOR_EACH_ALLOCKIND(D) \
FOR_EACH_OBJECT_ALLOCKIND(D) \
@ -313,6 +315,9 @@ class TenuredCell : public Cell
static MOZ_ALWAYS_INLINE void writeBarrierPost(void* cellp, TenuredCell* prior,
TenuredCell* next);
// Default implementation for kinds that don't require finalization.
void finalize(FreeOp* fop) {}
// Default implementation for kinds that don't require fixup.
void fixupAfterMovingGC() {}

Просмотреть файл

@ -442,6 +442,14 @@ js::TraceNullableEdge(JSTracer* trc, WriteBarrieredBase<T>* thingp, const char*
DispatchToTracer(trc, ConvertToBase(thingp->unsafeUnbarrieredForTracing()), name);
}
template <typename T>
void
js::TraceNullableEdge(JSTracer* trc, ReadBarriered<T>* thingp, const char* name)
{
if (InternalBarrierMethods<T>::isMarkable(thingp->unbarrieredGet()))
DispatchToTracer(trc, ConvertToBase(thingp->unsafeGet()), name);
}
template <typename T>
JS_PUBLIC_API(void)
JS::TraceEdge(JSTracer* trc, JS::Heap<T>* thingp, const char* name)
@ -559,6 +567,7 @@ js::TraceRootRange(JSTracer* trc, size_t len, T* vec, const char* name)
template void js::TraceEdge<type>(JSTracer*, WriteBarrieredBase<type>*, const char*); \
template void js::TraceEdge<type>(JSTracer*, ReadBarriered<type>*, const char*); \
template void js::TraceNullableEdge<type>(JSTracer*, WriteBarrieredBase<type>*, const char*); \
template void js::TraceNullableEdge<type>(JSTracer*, ReadBarriered<type>*, const char*); \
template void js::TraceManuallyBarrieredEdge<type>(JSTracer*, type*, const char*); \
template void js::TraceWeakEdge<type>(JSTracer*, WeakRef<type>*, const char*); \
template void js::TraceRoot<type>(JSTracer*, type*, const char*); \
@ -875,6 +884,7 @@ js::GCMarker::markAndTraceChildren(T* thing)
namespace js {
template <> void GCMarker::traverse(BaseShape* thing) { markAndTraceChildren(thing); }
template <> void GCMarker::traverse(JS::Symbol* thing) { markAndTraceChildren(thing); }
template <> void GCMarker::traverse(RegExpShared* thing) { markAndTraceChildren(thing); }
} // namespace js
// Strings, LazyScripts, Shapes, and Scopes are extremely common, but have

Просмотреть файл

@ -86,6 +86,7 @@ class JitCode;
D(js::PlainObject*) \
D(js::PropertyName*) \
D(js::RegExpObject*) \
D(js::RegExpShared*) \
D(js::SavedFrame*) \
D(js::Scope*) \
D(js::ScriptSourceObject*) \

Просмотреть файл

@ -184,6 +184,7 @@ static const PhaseInfo phases[] = {
{ PHASE_SWEEP_STRING, "Sweep String", PHASE_SWEEP, 34 },
{ PHASE_SWEEP_SCRIPT, "Sweep Script", PHASE_SWEEP, 35 },
{ PHASE_SWEEP_SCOPE, "Sweep Scope", PHASE_SWEEP, 59 },
{ PHASE_SWEEP_REGEXP_SHARED, "Sweep RegExpShared", PHASE_SWEEP, 61 },
{ PHASE_SWEEP_SHAPE, "Sweep Shape", PHASE_SWEEP, 36 },
{ PHASE_SWEEP_JITCODE, "Sweep JIT code", PHASE_SWEEP, 37 },
{ PHASE_FINALIZE_END, "Finalize End Callback", PHASE_SWEEP, 38 },
@ -211,9 +212,9 @@ static const PhaseInfo phases[] = {
{ PHASE_MARK_COMPARTMENTS, "Mark Compartments", PHASE_MARK_ROOTS, 54 },
{ PHASE_PURGE_SHAPE_TABLES, "Purge ShapeTables", PHASE_NO_PARENT, 60 },
{ PHASE_LIMIT, nullptr, PHASE_NO_PARENT, 60 }
{ PHASE_LIMIT, nullptr, PHASE_NO_PARENT, 61 }
// Current number of telemetryBuckets is 60. If you insert new phases
// Current number of telemetryBuckets is 61. If you insert new phases
// somewhere, start at that number and count up. Do not change any existing
// numbers.
};

Просмотреть файл

@ -69,6 +69,7 @@ enum Phase : uint8_t {
PHASE_SWEEP_STRING,
PHASE_SWEEP_SCRIPT,
PHASE_SWEEP_SCOPE,
PHASE_SWEEP_REGEXP_SHARED,
PHASE_SWEEP_SHAPE,
PHASE_SWEEP_JITCODE,
PHASE_FINALIZE_END,

Просмотреть файл

@ -65,6 +65,10 @@ template <typename T>
void
TraceNullableEdge(JSTracer* trc, WriteBarrieredBase<T>* thingp, const char* name);
template <typename T>
void
TraceNullableEdge(JSTracer* trc, ReadBarriered<T>* thingp, const char* name);
// Trace through a "root" edge. These edges are the initial edges in the object
// graph traversal. Root edges are asserted to only be traversed in the initial
// phase of a GC.

Просмотреть файл

@ -6101,7 +6101,7 @@ JS_GetRegExpFlags(JSContext* cx, HandleObject obj)
RegExpGuard shared(cx);
if (!RegExpToShared(cx, obj, &shared))
return false;
return shared.re()->getFlags();
return shared->getFlags();
}
JS_PUBLIC_API(JSString*)
@ -6113,7 +6113,7 @@ JS_GetRegExpSource(JSContext* cx, HandleObject obj)
RegExpGuard shared(cx);
if (!RegExpToShared(cx, obj, &shared))
return nullptr;
return shared.re()->getSource();
return shared->getSource();
}
/************************************************************************/

Просмотреть файл

@ -67,7 +67,7 @@ JSCompartment::JSCompartment(Zone* zone, const JS::CompartmentOptions& options =
data(nullptr),
allocationMetadataBuilder(nullptr),
lastAnimationTime(0),
regExps(runtime_),
regExps(zone),
globalWriteBarriered(0),
detachedTypedObjects(0),
objectMetadataState(ImmediateMetadata()),
@ -228,6 +228,13 @@ JSCompartment::ensureJitCompartmentExists(JSContext* cx)
}
#ifdef JSGC_HASH_TABLE_CHECKS
void
js::DtoaCache::checkCacheAfterMovingGC()
{
MOZ_ASSERT(!s || !IsForwarded(s));
}
namespace {
struct CheckGCThingAfterMovingGCFunctor {
template <class T> void operator()(T* t) { CheckGCThingAfterMovingGC(*t); }
@ -250,7 +257,8 @@ JSCompartment::checkWrapperMapAfterMovingGC()
MOZ_RELEASE_ASSERT(ptr.found() && &*ptr == &e.front());
}
}
#endif
#endif // JSGC_HASH_TABLE_CHECKS
bool
JSCompartment::putWrapper(JSContext* cx, const CrossCompartmentKey& wrapped,

Просмотреть файл

@ -66,7 +66,7 @@ class DtoaCache {
}
#ifdef JSGC_HASH_TABLE_CHECKS
void checkCacheAfterMovingGC() { MOZ_ASSERT(!s || !IsForwarded(s)); }
void checkCacheAfterMovingGC();
#endif
};

Просмотреть файл

@ -1163,7 +1163,7 @@ extern JS_FRIEND_API(unsigned)
GetEnterCompartmentDepth(JSContext* cx);
#endif
class RegExpGuard;
using RegExpGuard = JS::Rooted<RegExpShared*>;
extern JS_FRIEND_API(bool)
RegExpToSharedNonInline(JSContext* cx, JS::HandleObject regexp, RegExpGuard* shared);

Просмотреть файл

@ -367,7 +367,12 @@ static const FinalizePhase BackgroundFinalizePhases[] = {
},
{
gcstats::PHASE_SWEEP_SCOPE, {
AllocKind::SCOPE
AllocKind::SCOPE,
}
},
{
gcstats::PHASE_SWEEP_REGEXP_SHARED, {
AllocKind::REGEXP_SHARED,
}
},
{
@ -1722,7 +1727,6 @@ static const AllocKind AllocKindsToRelocate[] = {
AllocKind::OBJECT16_BACKGROUND,
AllocKind::SCRIPT,
AllocKind::LAZY_SCRIPT,
AllocKind::SCOPE,
AllocKind::SHAPE,
AllocKind::ACCESSOR_SHAPE,
AllocKind::BASE_SHAPE,
@ -1730,7 +1734,9 @@ static const AllocKind AllocKindsToRelocate[] = {
AllocKind::STRING,
AllocKind::EXTERNAL_STRING,
AllocKind::FAT_INLINE_ATOM,
AllocKind::ATOM
AllocKind::ATOM,
AllocKind::SCOPE,
AllocKind::REGEXP_SHARED
};
Arena*
@ -2054,61 +2060,23 @@ GCRuntime::relocateArenas(Zone* zone, JS::gcreason::Reason reason, Arena*& reloc
return true;
}
void
MovingTracer::onObjectEdge(JSObject** objp)
template <typename T>
inline void
MovingTracer::updateEdge(T** thingp)
{
JSObject* obj = *objp;
if (obj->runtimeFromAnyThread() == runtime() && IsForwarded(obj))
*objp = Forwarded(obj);
auto thing = *thingp;
if (thing->runtimeFromAnyThread() == runtime() && IsForwarded(thing))
*thingp = Forwarded(thing);
}
void
MovingTracer::onShapeEdge(Shape** shapep)
{
Shape* shape = *shapep;
if (shape->runtimeFromAnyThread() == runtime() && IsForwarded(shape))
*shapep = Forwarded(shape);
}
void
MovingTracer::onStringEdge(JSString** stringp)
{
JSString* string = *stringp;
if (string->runtimeFromAnyThread() == runtime() && IsForwarded(string))
*stringp = Forwarded(string);
}
void
MovingTracer::onScriptEdge(JSScript** scriptp)
{
JSScript* script = *scriptp;
if (script->runtimeFromAnyThread() == runtime() && IsForwarded(script))
*scriptp = Forwarded(script);
}
void
MovingTracer::onLazyScriptEdge(LazyScript** lazyp)
{
LazyScript* lazy = *lazyp;
if (lazy->runtimeFromAnyThread() == runtime() && IsForwarded(lazy))
*lazyp = Forwarded(lazy);
}
void
MovingTracer::onBaseShapeEdge(BaseShape** basep)
{
BaseShape* base = *basep;
if (base->runtimeFromAnyThread() == runtime() && IsForwarded(base))
*basep = Forwarded(base);
}
void
MovingTracer::onScopeEdge(Scope** scopep)
{
Scope* scope = *scopep;
if (scope->runtimeFromAnyThread() == runtime() && IsForwarded(scope))
*scopep = Forwarded(scope);
}
void MovingTracer::onObjectEdge(JSObject** objp) { updateEdge(objp); }
void MovingTracer::onShapeEdge(Shape** shapep) { updateEdge(shapep); }
void MovingTracer::onStringEdge(JSString** stringp) { updateEdge(stringp); }
void MovingTracer::onScriptEdge(JSScript** scriptp) { updateEdge(scriptp); }
void MovingTracer::onLazyScriptEdge(LazyScript** lazyp) { updateEdge(lazyp); }
void MovingTracer::onBaseShapeEdge(BaseShape** basep) { updateEdge(basep); }
void MovingTracer::onScopeEdge(Scope** scopep) { updateEdge(scopep); }
void MovingTracer::onRegExpSharedEdge(RegExpShared** sharedp) { updateEdge(sharedp); }
void
Zone::prepareForCompacting()

Просмотреть файл

@ -121,6 +121,7 @@ IsNurseryAllocable(AllocKind kind)
false, /* AllocKind::SYMBOL */
false, /* AllocKind::JITCODE */
false, /* AllocKind::SCOPE */
false, /* AllocKind::REGEXP_SHARED */
};
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == size_t(AllocKind::LIMIT));
return map[size_t(kind)];
@ -159,6 +160,7 @@ IsBackgroundFinalized(AllocKind kind)
true, /* AllocKind::SYMBOL */
false, /* AllocKind::JITCODE */
true, /* AllocKind::SCOPE */
true, /* AllocKind::REGEXP_SHARED */
};
JS_STATIC_ASSERT(JS_ARRAY_LENGTH(map) == size_t(AllocKind::LIMIT));
return map[size_t(kind)];
@ -1136,109 +1138,29 @@ class RelocationOverlay
// to allow slots to be accessed.
template <typename T>
struct MightBeForwarded
{
static_assert(mozilla::IsBaseOf<Cell, T>::value,
"T must derive from Cell");
static_assert(!mozilla::IsSame<Cell, T>::value && !mozilla::IsSame<TenuredCell, T>::value,
"T must not be Cell or TenuredCell");
static const bool value = mozilla::IsBaseOf<JSObject, T>::value ||
mozilla::IsBaseOf<Shape, T>::value ||
mozilla::IsBaseOf<BaseShape, T>::value ||
mozilla::IsBaseOf<JSString, T>::value ||
mozilla::IsBaseOf<JSScript, T>::value ||
mozilla::IsBaseOf<js::LazyScript, T>::value ||
mozilla::IsBaseOf<js::Scope, T>::value;
};
inline bool IsForwarded(T* t);
inline bool IsForwarded(const JS::Value& value);
template <typename T>
inline bool
IsForwarded(T* t)
{
RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
if (!MightBeForwarded<T>::value) {
MOZ_ASSERT(!overlay->isForwarded());
return false;
}
inline T* Forwarded(T* t);
return overlay->isForwarded();
}
struct IsForwardedFunctor : public BoolDefaultAdaptor<Value, false> {
template <typename T> bool operator()(T* t) { return IsForwarded(t); }
};
inline bool
IsForwarded(const JS::Value& value)
{
return DispatchTyped(IsForwardedFunctor(), value);
}
inline Value Forwarded(const JS::Value& value);
template <typename T>
inline T*
Forwarded(T* t)
{
RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
MOZ_ASSERT(overlay->isForwarded());
return reinterpret_cast<T*>(overlay->forwardingAddress());
}
struct ForwardedFunctor : public IdentityDefaultAdaptor<Value> {
template <typename T> inline Value operator()(T* t) {
return js::gc::RewrapTaggedPointer<Value, T>::wrap(Forwarded(t));
}
};
inline Value
Forwarded(const JS::Value& value)
{
return DispatchTyped(ForwardedFunctor(), value);
}
template <typename T>
inline T
MaybeForwarded(T t)
{
if (IsForwarded(t))
t = Forwarded(t);
MakeAccessibleAfterMovingGC(t);
return t;
}
inline T MaybeForwarded(T t);
#ifdef JSGC_HASH_TABLE_CHECKS
template <typename T>
inline bool
IsGCThingValidAfterMovingGC(T* t)
{
return !IsInsideNursery(t) && !RelocationOverlay::isCellForwarded(t);
}
inline bool IsGCThingValidAfterMovingGC(T* t);
template <typename T>
inline void
CheckGCThingAfterMovingGC(T* t)
{
if (t)
MOZ_RELEASE_ASSERT(IsGCThingValidAfterMovingGC(t));
}
inline void CheckGCThingAfterMovingGC(T* t);
template <typename T>
inline void
CheckGCThingAfterMovingGC(const ReadBarriered<T*>& t)
{
CheckGCThingAfterMovingGC(t.unbarrieredGet());
}
inline void CheckGCThingAfterMovingGC(const ReadBarriered<T*>& t);
struct CheckValueAfterMovingGCFunctor : public VoidDefaultAdaptor<Value> {
template <typename T> void operator()(T* t) { CheckGCThingAfterMovingGC(t); }
};
inline void
CheckValueAfterMovingGC(const JS::Value& value)
{
DispatchTyped(CheckValueAfterMovingGCFunctor(), value);
}
inline void CheckValueAfterMovingGC(const JS::Value& value);
#endif // JSGC_HASH_TABLE_CHECKS

Просмотреть файл

@ -488,6 +488,114 @@ RelocationOverlay::forwardTo(Cell* cell)
newLocation_ = cell;
}
template <typename T>
struct MightBeForwarded
{
static_assert(mozilla::IsBaseOf<Cell, T>::value,
"T must derive from Cell");
static_assert(!mozilla::IsSame<Cell, T>::value && !mozilla::IsSame<TenuredCell, T>::value,
"T must not be Cell or TenuredCell");
static const bool value = mozilla::IsBaseOf<JSObject, T>::value ||
mozilla::IsBaseOf<Shape, T>::value ||
mozilla::IsBaseOf<BaseShape, T>::value ||
mozilla::IsBaseOf<JSString, T>::value ||
mozilla::IsBaseOf<JSScript, T>::value ||
mozilla::IsBaseOf<js::LazyScript, T>::value ||
mozilla::IsBaseOf<js::Scope, T>::value ||
mozilla::IsBaseOf<js::RegExpShared, T>::value;
};
template <typename T>
inline bool
IsForwarded(T* t)
{
RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
if (!MightBeForwarded<T>::value) {
MOZ_ASSERT(!overlay->isForwarded());
return false;
}
return overlay->isForwarded();
}
struct IsForwardedFunctor : public BoolDefaultAdaptor<Value, false> {
template <typename T> bool operator()(T* t) { return IsForwarded(t); }
};
inline bool
IsForwarded(const JS::Value& value)
{
return DispatchTyped(IsForwardedFunctor(), value);
}
template <typename T>
inline T*
Forwarded(T* t)
{
RelocationOverlay* overlay = RelocationOverlay::fromCell(t);
MOZ_ASSERT(overlay->isForwarded());
return reinterpret_cast<T*>(overlay->forwardingAddress());
}
struct ForwardedFunctor : public IdentityDefaultAdaptor<Value> {
template <typename T> inline Value operator()(T* t) {
return js::gc::RewrapTaggedPointer<Value, T>::wrap(Forwarded(t));
}
};
inline Value
Forwarded(const JS::Value& value)
{
return DispatchTyped(ForwardedFunctor(), value);
}
template <typename T>
inline T
MaybeForwarded(T t)
{
if (IsForwarded(t))
t = Forwarded(t);
MakeAccessibleAfterMovingGC(t);
return t;
}
#ifdef JSGC_HASH_TABLE_CHECKS
template <typename T>
inline bool
IsGCThingValidAfterMovingGC(T* t)
{
return !IsInsideNursery(t) && !RelocationOverlay::isCellForwarded(t);
}
template <typename T>
inline void
CheckGCThingAfterMovingGC(T* t)
{
if (t)
MOZ_RELEASE_ASSERT(IsGCThingValidAfterMovingGC(t));
}
template <typename T>
inline void
CheckGCThingAfterMovingGC(const ReadBarriered<T*>& t)
{
CheckGCThingAfterMovingGC(t.unbarrieredGet());
}
struct CheckValueAfterMovingGCFunctor : public VoidDefaultAdaptor<Value> {
template <typename T> void operator()(T* t) { CheckGCThingAfterMovingGC(t); }
};
inline void
CheckValueAfterMovingGC(const JS::Value& value)
{
DispatchTyped(CheckValueAfterMovingGCFunctor(), value);
}
#endif // JSGC_HASH_TABLE_CHECKS
} /* namespace gc */
} /* namespace js */

Просмотреть файл

@ -460,15 +460,14 @@ CrossCompartmentWrapper::fun_toString(JSContext* cx, HandleObject wrapper, unsig
bool
CrossCompartmentWrapper::regexp_toShared(JSContext* cx, HandleObject wrapper, RegExpGuard* g) const
{
RegExpGuard wrapperGuard(cx);
RegExpGuard re(cx);
{
AutoCompartment call(cx, wrappedObject(wrapper));
if (!Wrapper::regexp_toShared(cx, wrapper, &wrapperGuard))
if (!Wrapper::regexp_toShared(cx, wrapper, &re))
return false;
}
// Get an equivalent RegExpShared associated with the current compartment.
RegExpShared* re = wrapperGuard.re();
cx->markAtom(re->getSource());
return cx->compartment()->regExps.get(cx, re->getSource(), re->getFlags(), g);
}

Просмотреть файл

@ -13,8 +13,6 @@
namespace js {
class RegExpGuard;
/*
* Dispatch point for handlers that executes the appropriate C++ or scripted traps.
*

Просмотреть файл

@ -19,6 +19,8 @@
#include "jit/JitFrames.h"
#include "vm/StringBuffer.h"
#include "jsgcinlines.h"
using namespace js;
using mozilla::DebugOnly;

Просмотреть файл

@ -598,6 +598,13 @@ StatsCellCallback(JSRuntime* rt, void* data, void* thing, JS::TraceKind traceKin
break;
}
case JS::TraceKind::RegExpShared: {
auto regexp = static_cast<RegExpShared*>(thing);
zStats->regExpSharedsGCHeap += thingSize;
zStats->regExpSharedsMallocHeap += regexp->sizeOfExcludingThis(rtStats->mallocSizeOf_);
break;
}
default:
MOZ_CRASH("invalid traceKind in StatsCellCallback");
}

Просмотреть файл

@ -9,6 +9,7 @@
#include "mozilla/MemoryReporting.h"
#include "mozilla/PodOperations.h"
#include "jshashutil.h"
#include "jsstr.h"
#ifdef DEBUG
#include "jsutil.h"
@ -119,25 +120,11 @@ VectorMatchPairs::allocOrExpandArray(size_t pairCount)
/* RegExpObject */
static inline void
RegExpSharedReadBarrier(JSContext* cx, RegExpShared* shared)
{
Zone* zone = cx->zone();
if (zone->needsIncrementalBarrier())
shared->trace(zone->barrierTracer());
if (shared->isMarkedGray())
shared->unmarkGray();
}
/* static */ bool
RegExpObject::getShared(JSContext* cx, Handle<RegExpObject*> regexp, RegExpGuard* g)
{
if (RegExpShared* shared = regexp->maybeShared()) {
// Fetching a RegExpShared from an object requires a read
// barrier, as the shared pointer might be weak.
RegExpSharedReadBarrier(cx, shared);
g->init(*shared);
if (regexp->hasShared()) {
g->set(regexp->sharedRef());
return true;
}
@ -174,26 +161,32 @@ RegExpObject::isOriginalFlagGetter(JSNative native, RegExpFlag* mask)
/* static */ void
RegExpObject::trace(JSTracer* trc, JSObject* obj)
{
RegExpShared* shared = obj->as<RegExpObject>().maybeShared();
if (!shared)
return;
obj->as<RegExpObject>().trace(trc);
}
// When tracing through the object normally, we have the option of
// unlinking the object from its RegExpShared so that the RegExpShared may
// be collected. To detect this we need to test all the following
// conditions, since:
static inline bool
IsMarkingTrace(JSTracer* trc)
{
// Determine whether tracing is happening during normal marking. We need to
// test all the following conditions, since:
//
// 1. During TraceRuntime, CurrentThreadIsHeapBusy() is true, but the
// tracer might not be a marking tracer.
// 2. When a write barrier executes, IsMarkingTracer is true, but
// CurrentThreadIsHeapBusy() will be false.
if (JS::CurrentThreadIsHeapCollecting() &&
trc->isMarkingTracer() &&
!obj->asTenured().zone()->isPreservingCode())
{
obj->as<RegExpObject>().NativeObject::setPrivate(nullptr);
} else {
shared->trace(trc);
}
return JS::CurrentThreadIsHeapCollecting() && trc->isMarkingTracer();
}
void
RegExpObject::trace(JSTracer* trc)
{
// When marking the object normally we have the option of unlinking the
// object from its RegExpShared so that the RegExpShared may be collected.
if (IsMarkingTrace(trc) && !zone()->isPreservingCode())
sharedRef() = nullptr;
TraceNullableEdge(trc, &sharedRef(), "RegExpObject shared");
}
static JSObject*
@ -282,7 +275,7 @@ RegExpObject::create(JSContext* cx, HandleAtom source, RegExpFlag flags,
/* static */ bool
RegExpObject::createShared(JSContext* cx, Handle<RegExpObject*> regexp, RegExpGuard* g)
{
MOZ_ASSERT(!regexp->maybeShared());
MOZ_ASSERT(!regexp->hasShared());
if (!cx->compartment()->regExps.get(cx, regexp->getSource(), regexp->getFlags(), g))
return false;
@ -898,7 +891,7 @@ RegExpObject::dumpBytecode(JSContext* cx, Handle<RegExpObject*> regexp,
if (!getShared(cx, regexp, &g))
return false;
return g.re()->dumpBytecode(cx, match_only, input);
return g->dumpBytecode(cx, match_only, input);
}
#endif
@ -947,7 +940,7 @@ js::StringHasRegExpMetaChars(JSLinearString* str)
/* RegExpShared */
RegExpShared::RegExpShared(JSAtom* source, RegExpFlag flags)
: source(source), flags(flags), parenCount(0), canStringMatch(false), marked_(false)
: source(source), flags(flags), canStringMatch(false), parenCount(0)
{}
RegExpShared::~RegExpShared()
@ -957,37 +950,22 @@ RegExpShared::~RegExpShared()
}
void
RegExpShared::trace(JSTracer* trc)
RegExpShared::traceChildren(JSTracer* trc)
{
if (trc->isMarkingTracer())
marked_ = true;
// Discard code to avoid holding onto ExecutablePools.
if (IsMarkingTrace(trc) && trc->runtime()->gc.isShrinkingGC())
discardJitCode();
TraceNullableEdge(trc, &source, "RegExpShared source");
for (auto& comp : compilationArray)
TraceNullableEdge(trc, &comp.jitCode, "RegExpShared code");
}
bool
RegExpShared::isMarkedGray() const
{
if (source && source->isMarked(gc::GRAY))
return true;
for (const auto& comp : compilationArray) {
if (comp.jitCode && comp.jitCode->isMarked(gc::GRAY))
return true;
}
return false;
}
void
RegExpShared::unmarkGray()
RegExpShared::discardJitCode()
{
if (source)
JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(source));
for (const auto& comp : compilationArray) {
if (comp.jitCode)
JS::UnmarkGrayGCThingRecursively(JS::GCCellPtr(comp.jitCode.get()));
}
for (auto& comp : compilationArray)
comp.jitCode = nullptr;
}
bool
@ -1184,9 +1162,9 @@ RegExpShared::execute(JSContext* cx, HandleLinearString input, size_t start,
}
size_t
RegExpShared::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf)
RegExpShared::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
{
size_t n = mallocSizeOf(this);
size_t n = 0;
for (size_t i = 0; i < ArrayLength(compilationArray); i++) {
const RegExpCompilation& compilation = compilationArray[i];
@ -1203,8 +1181,8 @@ RegExpShared::sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf)
/* RegExpCompartment */
RegExpCompartment::RegExpCompartment(JSRuntime* rt)
: set_(rt),
RegExpCompartment::RegExpCompartment(Zone* zone)
: set_(zone, Set(zone->runtimeFromActiveCooperatingThread())),
matchResultTemplateObject_(nullptr),
optimizableRegExpPrototypeShape_(nullptr),
optimizableRegExpInstanceShape_(nullptr)
@ -1212,14 +1190,7 @@ RegExpCompartment::RegExpCompartment(JSRuntime* rt)
RegExpCompartment::~RegExpCompartment()
{
// Because of stray mark bits being set (see RegExpCompartment::sweep)
// there might still be RegExpShared instances which haven't been deleted.
if (set_.initialized()) {
for (Set::Enum e(set_); !e.empty(); e.popFront()) {
RegExpShared* shared = e.front();
js_delete(shared);
}
}
MOZ_ASSERT_IF(set_.initialized(), set_.empty());
}
ArrayObject*
@ -1229,7 +1200,7 @@ RegExpCompartment::createMatchResultTemplateObject(JSContext* cx)
/* Create template array object */
RootedArrayObject templateObject(cx, NewDenseUnallocatedArray(cx, RegExpObject::MaxPairCount,
nullptr, TenuredObject));
nullptr, TenuredObject));
if (!templateObject)
return matchResultTemplateObject_; // = nullptr
@ -1285,59 +1256,9 @@ RegExpCompartment::init(JSContext* cx)
return true;
}
bool
RegExpShared::needsSweep(JSRuntime* rt)
{
// Sometimes RegExpShared instances are marked without the compartment
// being subsequently cleared. This can happen if a GC is restarted while
// in progress (i.e. performing a full GC in the middle of an incremental
// GC) or if a RegExpShared referenced via the stack is traced but is not
// in a zone being collected.
//
// Because of this we only treat the marked_ bit as a hint, and destroy the
// RegExpShared if it was accidentally marked earlier but wasn't marked by
// the current trace.
bool keep = marked() && IsMarked(rt, &source);
for (size_t i = 0; i < ArrayLength(compilationArray); i++) {
RegExpShared::RegExpCompilation& compilation = compilationArray[i];
if (compilation.jitCode && gc::IsAboutToBeFinalized(&compilation.jitCode))
keep = false;
}
MOZ_ASSERT(JS::CurrentThreadIsHeapMajorCollecting());
if (keep || rt->gc.isHeapCompacting()) {
clearMarked();
return false;
}
return true;
}
void
RegExpShared::discardJitCode()
{
for (size_t i = 0; i < ArrayLength(compilationArray); i++)
compilationArray[i].jitCode = nullptr;
}
void
RegExpCompartment::sweep(JSRuntime* rt)
{
if (!set_.initialized())
return;
for (Set::Enum e(set_); !e.empty(); e.popFront()) {
RegExpShared* shared = e.front();
if (shared->needsSweep(rt)) {
js_delete(shared);
e.removeFront();
} else {
// Discard code to avoid holding onto ExecutablePools.
if (rt->gc.isHeapCompacting())
shared->discardJitCode();
}
}
if (matchResultTemplateObject_ &&
IsAboutToBeFinalized(&matchResultTemplateObject_))
{
@ -1360,30 +1281,24 @@ RegExpCompartment::sweep(JSRuntime* rt)
bool
RegExpCompartment::get(JSContext* cx, JSAtom* source, RegExpFlag flags, RegExpGuard* g)
{
Key key(source, flags);
Set::AddPtr p = set_.lookupForAdd(key);
DependentAddPtr<Set> p(cx, set_.get(), Key(source, flags));
if (p) {
// Trigger a read barrier on existing RegExpShared instances fetched
// from the table (which only holds weak references).
RegExpSharedReadBarrier(cx, *p);
g->init(**p);
g->set(*p);
return true;
}
ScopedJSDeletePtr<RegExpShared> shared(cx->new_<RegExpShared>(source, flags));
auto shared = Allocate<RegExpShared>(cx);
if (!shared)
return false;
if (!set_.add(p, shared)) {
new (shared) RegExpShared(source, flags);
if (!p.add(cx, set_.get(), Key(source, flags), shared)) {
ReportOutOfMemory(cx);
return false;
}
// Trace RegExpShared instances created during an incremental GC.
RegExpSharedReadBarrier(cx, shared);
g->init(*shared.forget());
g->set(shared);
return true;
}
@ -1400,13 +1315,7 @@ RegExpCompartment::get(JSContext* cx, HandleAtom atom, JSString* opt, RegExpGuar
size_t
RegExpCompartment::sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf)
{
size_t n = 0;
n += set_.sizeOfExcludingThis(mallocSizeOf);
for (Set::Enum e(set_); !e.empty(); e.popFront()) {
RegExpShared* shared = e.front();
n += shared->sizeOfIncludingThis(mallocSizeOf);
}
return n;
return set_.sizeOfExcludingThis(mallocSizeOf);
}
/* Functions */
@ -1434,7 +1343,7 @@ js::CloneRegExpObject(JSContext* cx, JSObject* obj_)
return nullptr;
clone->initAndZeroLastIndex(source, g->getFlags(), cx);
clone->setShared(*g.re());
clone->setShared(*g);
return clone;
}
@ -1568,3 +1477,10 @@ js::RegExpToSharedNonInline(JSContext* cx, HandleObject obj, js::RegExpGuard* g)
{
return RegExpToShared(cx, obj, g);
}
JS::ubi::Node::Size
JS::ubi::Concrete<RegExpShared>::size(mozilla::MallocSizeOf mallocSizeOf) const
{
return js::gc::Arena::thingSize(gc::AllocKind::REGEXP_SHARED) +
get().sizeOfExcludingThis(mallocSizeOf);
}

Просмотреть файл

@ -46,7 +46,7 @@ class RegExpStatics;
namespace frontend { class TokenStream; }
enum RegExpFlag
enum RegExpFlag : uint8_t
{
IgnoreCaseFlag = 0x01,
GlobalFlag = 0x02,
@ -92,7 +92,7 @@ CloneRegExpObject(JSContext* cx, JSObject* regexp);
* objects when we are preserving jitcode in their zone, to avoid the same
* recompilation inefficiencies as normal Ion and baseline compilation.
*/
class RegExpShared
class RegExpShared : public gc::TenuredCell
{
public:
enum CompilationMode {
@ -113,7 +113,7 @@ class RegExpShared
struct RegExpCompilation
{
HeapPtr<jit::JitCode*> jitCode;
ReadBarriered<jit::JitCode*> jitCode;
uint8_t* byteCode;
RegExpCompilation() : byteCode(nullptr) {}
@ -125,12 +125,11 @@ class RegExpShared
};
/* Source to the RegExp, for lazy compilation. */
HeapPtr<JSAtom*> source;
HeapPtr<JSAtom*> source;
RegExpFlag flags;
size_t parenCount;
bool canStringMatch;
bool marked_;
size_t parenCount;
RegExpCompilation compilationArray[4];
@ -146,6 +145,8 @@ class RegExpShared
Vector<uint8_t*, 0, SystemAllocPolicy> tables;
/* Internal functions. */
RegExpShared(JSAtom* source, RegExpFlag flags);
bool compile(JSContext* cx, HandleLinearString input,
CompilationMode mode, ForceByteCodeEnum force);
bool compile(JSContext* cx, HandleAtom pattern, HandleLinearString input,
@ -163,7 +164,6 @@ class RegExpShared
}
public:
RegExpShared(JSAtom* source, RegExpFlag flags);
~RegExpShared();
// Execute this RegExp on input starting from searchIndex, filling in
@ -203,16 +203,9 @@ class RegExpShared
|| isCompiled(MatchOnly, true) || isCompiled(MatchOnly, false);
}
void trace(JSTracer* trc);
bool needsSweep(JSRuntime* rt);
void traceChildren(JSTracer* trc);
void discardJitCode();
bool marked() const { return marked_; }
void clearMarked() { marked_ = false; }
bool isMarkedGray() const;
void unmarkGray();
static size_t offsetOfSource() {
return offsetof(RegExpShared, source);
}
@ -236,60 +229,13 @@ class RegExpShared
+ offsetof(RegExpCompilation, jitCode);
}
size_t sizeOfIncludingThis(mozilla::MallocSizeOf mallocSizeOf);
size_t sizeOfExcludingThis(mozilla::MallocSizeOf mallocSizeOf);
#ifdef DEBUG
bool dumpBytecode(JSContext* cx, bool match_only, HandleLinearString input);
#endif
};
/*
* Extend the lifetime of a given RegExpShared to at least the lifetime of
* the guard object. See Regular Expression comment at the top.
*/
class RegExpGuard : public JS::CustomAutoRooter
{
RegExpShared* re_;
RegExpGuard(const RegExpGuard&) = delete;
void operator=(const RegExpGuard&) = delete;
public:
explicit RegExpGuard(JSContext* cx)
: CustomAutoRooter(cx), re_(nullptr)
{}
RegExpGuard(JSContext* cx, RegExpShared& re)
: CustomAutoRooter(cx), re_(nullptr)
{
init(re);
}
~RegExpGuard() {
release();
}
public:
void init(RegExpShared& re) {
MOZ_ASSERT(!initialized());
re_ = &re;
}
void release() {
re_ = nullptr;
}
virtual void trace(JSTracer* trc) {
if (re_)
re_->trace(trc);
}
bool initialized() const { return !!re_; }
RegExpShared* re() const { MOZ_ASSERT(initialized()); return re_; }
RegExpShared* operator->() { return re(); }
RegExpShared& operator*() { return *re(); }
};
class RegExpCompartment
{
struct Key {
@ -300,8 +246,9 @@ class RegExpCompartment
Key(JSAtom* atom, RegExpFlag flag)
: atom(atom), flag(flag)
{ }
MOZ_IMPLICIT Key(RegExpShared* shared)
: atom(shared->getSource()), flag(shared->getFlags())
MOZ_IMPLICIT Key(const ReadBarriered<RegExpShared*>& shared)
: atom(shared.unbarrieredGet()->getSource()),
flag(shared.unbarrieredGet()->getFlags())
{ }
typedef Key Lookup;
@ -317,8 +264,8 @@ class RegExpCompartment
* The set of all RegExpShareds in the compartment. On every GC, every
* RegExpShared that was not marked is deleted and removed from the set.
*/
typedef HashSet<RegExpShared*, Key, RuntimeAllocPolicy> Set;
Set set_;
using Set = GCHashSet<ReadBarriered<RegExpShared*>, Key, RuntimeAllocPolicy>;
JS::WeakCache<Set> set_;
/*
* This is the template object where the result of re.exec() is based on,
@ -351,7 +298,7 @@ class RegExpCompartment
ArrayObject* createMatchResultTemplateObject(JSContext* cx);
public:
explicit RegExpCompartment(JSRuntime* rt);
explicit RegExpCompartment(Zone* zone);
~RegExpCompartment();
bool init(JSContext* cx);
@ -486,12 +433,17 @@ class RegExpObject : public NativeObject
static MOZ_MUST_USE bool getShared(JSContext* cx, Handle<RegExpObject*> regexp,
RegExpGuard* g);
bool hasShared() {
return !!sharedRef();
}
void setShared(RegExpShared& shared) {
MOZ_ASSERT(!maybeShared());
NativeObject::setPrivate(&shared);
MOZ_ASSERT(!hasShared());
sharedRef() = &shared;
}
static void trace(JSTracer* trc, JSObject* obj);
void trace(JSTracer* trc);
void initIgnoringLastIndex(HandleAtom source, RegExpFlag flags);
@ -512,8 +464,10 @@ class RegExpObject : public NativeObject
*/
static MOZ_MUST_USE bool createShared(JSContext* cx, Handle<RegExpObject*> regexp,
RegExpGuard* g);
RegExpShared* maybeShared() const {
return static_cast<RegExpShared*>(NativeObject::getPrivate(PRIVATE_SLOT));
ReadBarriered<RegExpShared*>& sharedRef() {
auto& ref = NativeObject::privateRef(PRIVATE_SLOT);
return reinterpret_cast<ReadBarriered<RegExpShared*>&>(ref);
}
/* Call setShared in preference to setPrivate. */
@ -559,4 +513,29 @@ StringHasRegExpMetaChars(JSLinearString* str);
} /* namespace js */
namespace JS {
namespace ubi {
template <>
class Concrete<js::RegExpShared> : TracerConcrete<js::RegExpShared>
{
protected:
explicit Concrete(js::RegExpShared* ptr) : TracerConcrete<js::RegExpShared>(ptr) { }
public:
static void construct(void* storage, js::RegExpShared* ptr) {
new (storage) Concrete(ptr);
}
CoarseType coarseType() const final { return CoarseType::Other; }
Size size(mozilla::MallocSizeOf mallocSizeOf) const override;
const char16_t* typeName() const override { return concreteTypeName; }
static const char16_t concreteTypeName[];
};
} // namespace ubi
} // namespace JS
#endif /* vm_RegExpObject_h */

Просмотреть файл

@ -19,6 +19,7 @@
#include "jsatominlines.h"
#include "jscntxtinlines.h"
#include "jsgcinlines.h"
namespace js {

Просмотреть файл

@ -312,6 +312,7 @@ template JS::Zone* TracerConcrete<js::LazyScript>::zone() const;
template JS::Zone* TracerConcrete<js::Shape>::zone() const;
template JS::Zone* TracerConcrete<js::BaseShape>::zone() const;
template JS::Zone* TracerConcrete<js::ObjectGroup>::zone() const;
template JS::Zone* TracerConcrete<js::RegExpShared>::zone() const;
template JS::Zone* TracerConcrete<js::Scope>::zone() const;
template JS::Zone* TracerConcrete<JS::Symbol>::zone() const;
template JS::Zone* TracerConcrete<JSString>::zone() const;
@ -334,6 +335,7 @@ template UniquePtr<EdgeRange> TracerConcrete<js::LazyScript>::edges(JSContext* c
template UniquePtr<EdgeRange> TracerConcrete<js::Shape>::edges(JSContext* cx, bool wantNames) const;
template UniquePtr<EdgeRange> TracerConcrete<js::BaseShape>::edges(JSContext* cx, bool wantNames) const;
template UniquePtr<EdgeRange> TracerConcrete<js::ObjectGroup>::edges(JSContext* cx, bool wantNames) const;
template UniquePtr<EdgeRange> TracerConcrete<js::RegExpShared>::edges(JSContext* cx, bool wantNames) const;
template UniquePtr<EdgeRange> TracerConcrete<js::Scope>::edges(JSContext* cx, bool wantNames) const;
template UniquePtr<EdgeRange> TracerConcrete<JS::Symbol>::edges(JSContext* cx, bool wantNames) const;
template UniquePtr<EdgeRange> TracerConcrete<JSString>::edges(JSContext* cx, bool wantNames) const;
@ -398,6 +400,7 @@ const char16_t Concrete<js::Shape>::concreteTypeName[] = u"js::Shape";
const char16_t Concrete<js::BaseShape>::concreteTypeName[] = u"js::BaseShape";
const char16_t Concrete<js::ObjectGroup>::concreteTypeName[] = u"js::ObjectGroup";
const char16_t Concrete<js::Scope>::concreteTypeName[] = u"js::Scope";
const char16_t Concrete<js::RegExpShared>::concreteTypeName[] = u"js::RegExpShared";
namespace JS {
namespace ubi {

Просмотреть файл

@ -1944,6 +1944,14 @@ ReportZoneStats(const JS::ZoneStats& zStats,
zStats.scopesMallocHeap,
"Arrays of binding names and other binding-related data.");
ZCREPORT_GC_BYTES(pathPrefix + NS_LITERAL_CSTRING("regexp-shareds/gc-heap"),
zStats.regExpSharedsGCHeap,
"Shared compiled regexp data.");
ZCREPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("regexp-shareds/malloc-heap"),
zStats.regExpSharedsMallocHeap,
"Shared compiled regexp data.");
ZCREPORT_BYTES(pathPrefix + NS_LITERAL_CSTRING("type-pool"),
zStats.typePool,
"Type sets and related data.");
@ -2978,6 +2986,10 @@ JSReporter::CollectReports(WindowPaths* windowPaths,
KIND_OTHER, rtStats.zTotals.unusedGCThings.jitcode,
"Unused jitcode cells within non-empty arenas.");
REPORT_BYTES(NS_LITERAL_CSTRING("js-main-runtime-gc-heap-committed/unused/gc-things/regexp-shareds"),
KIND_OTHER, rtStats.zTotals.unusedGCThings.regExpShared,
"Unused regexpshared cells within non-empty arenas.");
REPORT_BYTES(NS_LITERAL_CSTRING("js-main-runtime-gc-heap-committed/used/chunk-admin"),
KIND_OTHER, rtStats.gcHeapChunkAdmin,
"The same as 'explicit/js-non-window/gc-heap/chunk-admin'.");
@ -3029,6 +3041,10 @@ JSReporter::CollectReports(WindowPaths* windowPaths,
KIND_OTHER, rtStats.zTotals.jitCodesGCHeap,
"Used jitcode cells.");
MREPORT_BYTES(NS_LITERAL_CSTRING("js-main-runtime-gc-heap-committed/used/gc-things/regexp-shareds"),
KIND_OTHER, rtStats.zTotals.regExpSharedsGCHeap,
"Used regexpshared cells.");
MOZ_ASSERT(gcThingTotal == rtStats.gcHeapGCThings);
// Report xpconnect.

Просмотреть файл

@ -483,7 +483,10 @@ void TraceScriptHolder(nsISupports* aHolder, JSTracer* aTracer);
// Returns true if the JS::TraceKind is one the cycle collector cares about.
inline bool AddToCCKind(JS::TraceKind aKind)
{
return aKind == JS::TraceKind::Object || aKind == JS::TraceKind::Script || aKind == JS::TraceKind::Scope;
return aKind == JS::TraceKind::Object ||
aKind == JS::TraceKind::Script ||
aKind == JS::TraceKind::Scope ||
aKind == JS::TraceKind::RegExpShared;
}
bool