Bug 1182730 - Mark the JitcodeGlobalTable unconditionally when minor collecting. (r=terrence)

This commit is contained in:
Shu-yu Guo 2015-07-22 10:49:42 -07:00
Родитель 2917ac0397
Коммит eb3728f68f
6 изменённых файлов: 72 добавлений и 27 удалений

Просмотреть файл

@ -394,6 +394,9 @@ js::gc::GCRuntime::markRuntime(JSTracer* trc,
}
}
if (rt->isHeapMinorCollecting())
jit::JitRuntime::MarkJitcodeGlobalTableUnconditionally(trc);
for (ContextIter acx(rt); !acx.done(); acx.next())
acx->mark(trc);

Просмотреть файл

@ -620,6 +620,16 @@ JitRuntime::Mark(JSTracer* trc)
}
}
/* static */ void
JitRuntime::MarkJitcodeGlobalTableUnconditionally(JSTracer* trc)
{
if (trc->runtime()->hasJitRuntime() &&
trc->runtime()->jitRuntime()->hasJitcodeGlobalTable())
{
trc->runtime()->jitRuntime()->getJitcodeGlobalTable()->markUnconditionally(trc);
}
}
/* static */ bool
JitRuntime::MarkJitcodeGlobalTableIteratively(JSTracer* trc)
{

Просмотреть файл

@ -254,6 +254,7 @@ class JitRuntime
void freeOsrTempData();
static void Mark(JSTracer* trc);
static void MarkJitcodeGlobalTableUnconditionally(JSTracer* trc);
static bool MarkJitcodeGlobalTableIteratively(JSTracer* trc);
static void SweepJitcodeGlobalTable(JSRuntime* rt);

Просмотреть файл

@ -748,6 +748,35 @@ JitcodeGlobalTable::setAllEntriesAsExpired(JSRuntime* rt)
r.front()->setAsExpired();
}
struct Unconditionally
{
template <typename T>
static bool ShouldMark(T* thingp) { return true; }
};
void
JitcodeGlobalTable::markUnconditionally(JSTracer* trc)
{
// Mark all entries unconditionally. This is done during minor collection
// to account for tenuring.
AutoSuppressProfilerSampling suppressSampling(trc->runtime());
for (Range r(*this); !r.empty(); r.popFront())
r.front()->mark<Unconditionally>(trc);
}
struct IfUnmarked
{
template <typename T>
static bool ShouldMark(T* thingp) { return !IsMarkedUnbarriered(thingp); }
};
template <>
bool IfUnmarked::ShouldMark<TypeSet::Type>(TypeSet::Type* type)
{
return !TypeSet::IsTypeMarked(type);
}
bool
JitcodeGlobalTable::markIteratively(JSTracer* trc)
{
@ -773,6 +802,8 @@ JitcodeGlobalTable::markIteratively(JSTracer* trc)
// The approach above obviates the need for read barriers. The assumption
// above is checked in JitcodeGlobalTable::lookupForSampler.
MOZ_ASSERT(!trc->runtime()->isHeapMinorCollecting());
AutoSuppressProfilerSampling suppressSampling(trc->runtime());
uint32_t gen = trc->runtime()->profilerSampleBufferGen();
uint32_t lapCount = trc->runtime()->profilerSampleBufferLapCount();
@ -802,7 +833,7 @@ JitcodeGlobalTable::markIteratively(JSTracer* trc)
if (!entry->zone()->isCollecting() || entry->zone()->isGCFinished())
continue;
markedAny |= entry->markIfUnmarked(trc);
markedAny |= entry->mark<IfUnmarked>(trc);
}
return markedAny;
@ -825,10 +856,11 @@ JitcodeGlobalTable::sweep(JSRuntime* rt)
}
}
template <class ShouldMarkProvider>
bool
JitcodeGlobalEntry::BaseEntry::markJitcodeIfUnmarked(JSTracer* trc)
JitcodeGlobalEntry::BaseEntry::markJitcode(JSTracer* trc)
{
if (!IsMarkedUnbarriered(&jitcode_)) {
if (ShouldMarkProvider::ShouldMark(&jitcode_)) {
TraceManuallyBarrieredEdge(trc, &jitcode_, "jitcodglobaltable-baseentry-jitcode");
return true;
}
@ -848,10 +880,11 @@ JitcodeGlobalEntry::BaseEntry::isJitcodeAboutToBeFinalized()
return IsAboutToBeFinalizedUnbarriered(&jitcode_);
}
template <class ShouldMarkProvider>
bool
JitcodeGlobalEntry::BaselineEntry::markIfUnmarked(JSTracer* trc)
JitcodeGlobalEntry::BaselineEntry::mark(JSTracer* trc)
{
if (!IsMarkedUnbarriered(&script_)) {
if (ShouldMarkProvider::ShouldMark(&script_)) {
TraceManuallyBarrieredEdge(trc, &script_, "jitcodeglobaltable-baselineentry-script");
return true;
}
@ -871,13 +904,14 @@ JitcodeGlobalEntry::BaselineEntry::isMarkedFromAnyThread()
script_->arenaHeader()->allocatedDuringIncremental;
}
template <class ShouldMarkProvider>
bool
JitcodeGlobalEntry::IonEntry::markIfUnmarked(JSTracer* trc)
JitcodeGlobalEntry::IonEntry::mark(JSTracer* trc)
{
bool markedAny = false;
for (unsigned i = 0; i < numScripts(); i++) {
if (!IsMarkedUnbarriered(&sizedScriptList()->pairs[i].script)) {
if (ShouldMarkProvider::ShouldMark(&sizedScriptList()->pairs[i].script)) {
TraceManuallyBarrieredEdge(trc, &sizedScriptList()->pairs[i].script,
"jitcodeglobaltable-ionentry-script");
markedAny = true;
@ -890,15 +924,15 @@ JitcodeGlobalEntry::IonEntry::markIfUnmarked(JSTracer* trc)
for (IonTrackedTypeWithAddendum* iter = optsAllTypes_->begin();
iter != optsAllTypes_->end(); iter++)
{
if (!TypeSet::IsTypeMarked(&iter->type)) {
if (ShouldMarkProvider::ShouldMark(&iter->type)) {
TypeSet::MarkTypeUnbarriered(trc, &iter->type, "jitcodeglobaltable-ionentry-type");
markedAny = true;
}
if (iter->hasAllocationSite() && !IsMarkedUnbarriered(&iter->script)) {
if (iter->hasAllocationSite() && ShouldMarkProvider::ShouldMark(&iter->script)) {
TraceManuallyBarrieredEdge(trc, &iter->script,
"jitcodeglobaltable-ionentry-type-addendum-script");
markedAny = true;
} else if (iter->hasConstructor() && !IsMarkedUnbarriered(&iter->constructor)) {
} else if (iter->hasConstructor() && ShouldMarkProvider::ShouldMark(&iter->constructor)) {
TraceManuallyBarrieredEdge(trc, &iter->constructor,
"jitcodeglobaltable-ionentry-type-addendum-constructor");
markedAny = true;
@ -957,12 +991,13 @@ JitcodeGlobalEntry::IonEntry::isMarkedFromAnyThread()
return true;
}
template <class ShouldMarkProvider>
bool
JitcodeGlobalEntry::IonCacheEntry::markIfUnmarked(JSTracer* trc)
JitcodeGlobalEntry::IonCacheEntry::mark(JSTracer* trc)
{
JitcodeGlobalEntry entry;
RejoinEntry(trc->runtime(), *this, nativeStartAddr(), &entry);
return entry.markIfUnmarked(trc);
return entry.mark<ShouldMarkProvider>(trc);
}
void

Просмотреть файл

@ -207,7 +207,7 @@ class JitcodeGlobalEntry
return startsBelowPointer(ptr) && endsAbovePointer(ptr);
}
bool markJitcodeIfUnmarked(JSTracer* trc);
template <class ShouldMarkProvider> bool markJitcode(JSTracer* trc);
bool isJitcodeMarkedFromAnyThread();
bool isJitcodeAboutToBeFinalized();
};
@ -368,7 +368,7 @@ class JitcodeGlobalEntry
void forEachOptimizationTypeInfo(JSRuntime* rt, uint8_t index,
IonTrackedOptimizationsTypeInfo::ForEachOpAdapter& op);
bool markIfUnmarked(JSTracer* trc);
template <class ShouldMarkProvider> bool mark(JSTracer* trc);
void sweep();
bool isMarkedFromAnyThread();
};
@ -426,7 +426,7 @@ class JitcodeGlobalEntry
void youngestFrameLocationAtAddr(JSRuntime* rt, void* ptr,
JSScript** script, jsbytecode** pc) const;
bool markIfUnmarked(JSTracer* trc);
template <class ShouldMarkProvider> bool mark(JSTracer* trc);
void sweep();
bool isMarkedFromAnyThread();
};
@ -475,7 +475,7 @@ class JitcodeGlobalEntry
void forEachOptimizationTypeInfo(JSRuntime* rt, uint8_t index,
IonTrackedOptimizationsTypeInfo::ForEachOpAdapter& op);
bool markIfUnmarked(JSTracer* trc);
template <class ShouldMarkProvider> bool mark(JSTracer* trc);
void sweep(JSRuntime* rt);
bool isMarkedFromAnyThread(JSRuntime* rt);
};
@ -911,17 +911,18 @@ class JitcodeGlobalEntry
return baseEntry().jitcode()->zone();
}
bool markIfUnmarked(JSTracer* trc) {
bool markedAny = baseEntry().markJitcodeIfUnmarked(trc);
template <class ShouldMarkProvider>
bool mark(JSTracer* trc) {
bool markedAny = baseEntry().markJitcode<ShouldMarkProvider>(trc);
switch (kind()) {
case Ion:
markedAny |= ionEntry().markIfUnmarked(trc);
markedAny |= ionEntry().mark<ShouldMarkProvider>(trc);
break;
case Baseline:
markedAny |= baselineEntry().markIfUnmarked(trc);
markedAny |= baselineEntry().mark<ShouldMarkProvider>(trc);
break;
case IonCache:
markedAny |= ionCacheEntry().markIfUnmarked(trc);
markedAny |= ionCacheEntry().mark<ShouldMarkProvider>(trc);
case Dummy:
break;
default:
@ -1052,6 +1053,7 @@ class JitcodeGlobalTable
void releaseEntry(JitcodeGlobalEntry& entry, JitcodeGlobalEntry** prevTower, JSRuntime* rt);
void setAllEntriesAsExpired(JSRuntime* rt);
void markUnconditionally(JSTracer* trc);
bool markIteratively(JSTracer* trc);
void sweep(JSRuntime* rt);

Просмотреть файл

@ -384,12 +384,6 @@ class jit::UniqueTrackedTypes
bool
UniqueTrackedTypes::getIndexOf(JSContext* cx, TypeSet::Type ty, uint8_t* indexp)
{
// FIXME bug 1176511. It is unduly onerous to make nursery things work
// correctly as keys of hash tables. Until then, since TypeSet::Types may
// be in the nursery, we evict the nursery before tracking types.
cx->runtime()->gc.evictNursery();
MOZ_ASSERT_IF(ty.isSingletonUnchecked(), !IsInsideNursery(ty.singleton()));
TypesMap::AddPtr p = map_.lookupForAdd(ty);
if (p) {
*indexp = p->value();