Bug 1154115 - Rewrite the JSAPI profiling API to use a FrameHandle, as to avoid multiple lookups in JitcodeGlobalTable. (r=djvj)

This commit is contained in:
Shu-yu Guo 2015-05-11 14:16:44 -07:00
Родитель 7147d5e0e3
Коммит d91c9a82a2
6 изменённых файлов: 151 добавлений и 76 удалений

Просмотреть файл

@ -10,11 +10,11 @@
#include "mozilla/Alignment.h"
#include "mozilla/Maybe.h"
#include <stdint.h>
#include "jsbytecode.h"
#include "js/Utility.h"
struct JSRuntime;
class JSScript;
namespace js {
class Activation;
@ -28,6 +28,9 @@ namespace js {
namespace JS {
struct ForEachTrackedOptimizationAttemptOp;
struct ForEachTrackedOptimizationTypeInfoOp;
// This iterator can be used to walk the stack of a thread suspended at an
// arbitrary pc. To provide acurate results, profiling must have been enabled
// (via EnableRuntimeProfilingStack) before executing the callstack being
@ -128,9 +131,6 @@ class JS_PUBLIC_API(ProfilingFrameIterator)
bool iteratorDone();
};
extern JS_PUBLIC_API(ProfilingFrameIterator::FrameKind)
GetProfilingFrameKindFromNativeAddr(JSRuntime* runtime, void* pc);
JS_FRIEND_API(bool)
IsProfilingEnabledForRuntime(JSRuntime* runtime);
@ -148,8 +148,40 @@ UpdateJSRuntimeProfilerSampleBufferGen(JSRuntime* runtime, uint32_t generation,
struct ForEachProfiledFrameOp
{
// A handle to the underlying JitcodeGlobalEntry, so as to avoid repeated
// lookups on JitcodeGlobalTable.
class MOZ_STACK_CLASS FrameHandle
{
friend JS_PUBLIC_API(void) JS::ForEachProfiledFrame(JSRuntime* rt, void* addr,
ForEachProfiledFrameOp& op);
JSRuntime* rt_;
js::jit::JitcodeGlobalEntry& entry_;
void* addr_;
void* canonicalAddr_;
const char* label_;
uint32_t depth_;
mozilla::Maybe<uint8_t> optsIndex_;
FrameHandle(JSRuntime* rt, js::jit::JitcodeGlobalEntry& entry, void* addr,
const char* label, uint32_t depth);
void updateHasTrackedOptimizations();
public:
const char* label() const { return label_; }
uint32_t depth() const { return depth_; }
bool hasTrackedOptimizations() const { return optsIndex_.isSome(); }
void* canonicalAddress() const { return canonicalAddr_; }
ProfilingFrameIterator::FrameKind frameKind() const;
void forEachOptimizationAttempt(ForEachTrackedOptimizationAttemptOp& op,
JSScript** scriptOut, jsbytecode** pcOut) const;
void forEachOptimizationTypeInfo(ForEachTrackedOptimizationTypeInfoOp& op) const;
};
// Called once per frame.
virtual void operator()(const char* label, bool mightHaveTrackedOptimizations) = 0;
virtual void operator()(const FrameHandle& frame) = 0;
};
JS_PUBLIC_API(void)

Просмотреть файл

@ -182,11 +182,6 @@ struct ForEachTrackedOptimizationAttemptOp
virtual void operator()(TrackedStrategy strategy, TrackedOutcome outcome) = 0;
};
JS_PUBLIC_API(void)
ForEachTrackedOptimizationAttempt(JSRuntime* rt, void* addr, uint8_t index,
ForEachTrackedOptimizationAttemptOp& op,
JSScript** scriptOut, jsbytecode** pcOut);
struct ForEachTrackedOptimizationTypeInfoOp
{
// Called 0+ times per entry, once for each type in the type set that Ion
@ -228,13 +223,6 @@ struct ForEachTrackedOptimizationTypeInfoOp
virtual void operator()(TrackedTypeSite site, const char* mirType) = 0;
};
JS_PUBLIC_API(void)
ForEachTrackedOptimizationTypeInfo(JSRuntime* rt, void* addr, uint8_t index,
ForEachTrackedOptimizationTypeInfoOp& op);
JS_PUBLIC_API(mozilla::Maybe<uint8_t>)
TrackedOptimizationIndexAtAddr(JSRuntime* rt, void* addr, void** entryAddr);
} // namespace JS
#endif // js_TrackedOptimizationInfo_h

Просмотреть файл

@ -38,6 +38,14 @@ RegionAtAddr(const JitcodeGlobalEntry::IonEntry& entry, void* ptr,
return entry.regionTable()->regionEntry(regionIdx);
}
void*
JitcodeGlobalEntry::IonEntry::canonicalNativeAddrFor(JSRuntime* rt, void* ptr) const
{
uint32_t ptrOffset;
JitcodeRegionEntry region = RegionAtAddr(*this, ptr, &ptrOffset);
return (void*)(((uint8_t*) nativeStartAddr()) + region.nativeOffset());
}
bool
JitcodeGlobalEntry::IonEntry::callStackAtAddr(JSRuntime* rt, void* ptr,
BytecodeLocationVector& results,
@ -148,6 +156,14 @@ JitcodeGlobalEntry::IonEntry::destroy()
optsAllTypes_ = nullptr;
}
void*
JitcodeGlobalEntry::BaselineEntry::canonicalNativeAddrFor(JSRuntime* rt, void* ptr) const
{
// TODO: We can't yet normalize Baseline addresses until we unify
// BaselineScript's PCMappingEntries with JitcodeGlobalMap.
return ptr;
}
bool
JitcodeGlobalEntry::BaselineEntry::callStackAtAddr(JSRuntime* rt, void* ptr,
BytecodeLocationVector& results,
@ -209,6 +225,14 @@ RejoinEntry(JSRuntime* rt, const JitcodeGlobalEntry::IonCacheEntry& cache,
MOZ_ASSERT(entry->isIon());
}
void*
JitcodeGlobalEntry::IonCacheEntry::canonicalNativeAddrFor(JSRuntime* rt, void* ptr) const
{
JitcodeGlobalEntry entry;
RejoinEntry(rt, *this, ptr, &entry);
return entry.canonicalNativeAddrFor(rt, rejoinAddr());
}
bool
JitcodeGlobalEntry::IonCacheEntry::callStackAtAddr(JSRuntime* rt, void* ptr,
BytecodeLocationVector& results,
@ -1528,17 +1552,46 @@ JitcodeIonTable::WriteIonTable(CompactBufferWriter& writer,
} // namespace jit
} // namespace js
JS::ForEachProfiledFrameOp::FrameHandle::FrameHandle(JSRuntime* rt, JitcodeGlobalEntry& entry,
void* addr, const char* label, uint32_t depth)
: rt_(rt),
entry_(entry),
addr_(addr),
canonicalAddr_(nullptr),
label_(label),
depth_(depth)
{
updateHasTrackedOptimizations();
JS_PUBLIC_API(JS::ProfilingFrameIterator::FrameKind)
JS::GetProfilingFrameKindFromNativeAddr(JSRuntime* rt, void* addr)
if (!canonicalAddr_) {
// If the entry has tracked optimizations, updateHasTrackedOptimizations
// would have updated the canonical address.
MOZ_ASSERT_IF(entry_.isIon(), !hasTrackedOptimizations());
canonicalAddr_ = entry_.canonicalNativeAddrFor(rt_, addr_);
}
}
JS::ProfilingFrameIterator::FrameKind
JS::ForEachProfiledFrameOp::FrameHandle::frameKind() const
{
if (entry_.isBaseline())
return JS::ProfilingFrameIterator::Frame_Baseline;
return JS::ProfilingFrameIterator::Frame_Ion;
}
JS_PUBLIC_API(void)
JS::ForEachProfiledFrame(JSRuntime* rt, void* addr, ForEachProfiledFrameOp& op)
{
js::jit::JitcodeGlobalTable* table = rt->jitRuntime()->getJitcodeGlobalTable();
js::jit::JitcodeGlobalEntry entry;
table->lookupInfallible(addr, &entry, rt);
MOZ_ASSERT(entry.isIon() || entry.isIonCache() || entry.isBaseline());
if (entry.isBaseline())
return JS::ProfilingFrameIterator::Frame_Baseline;
return JS::ProfilingFrameIterator::Frame_Ion;
// Extract the stack for the entry. Assume maximum inlining depth is <64
const char* labels[64];
uint32_t depth = entry.callStackAtAddr(rt, addr, labels, 64);
MOZ_ASSERT(depth < 64);
for (uint32_t i = depth; i != 0; i--) {
JS::ForEachProfiledFrameOp::FrameHandle handle(rt, entry, addr, labels[i - 1], i - 1);
op(handle);
}
}

Просмотреть файл

@ -318,6 +318,8 @@ class JitcodeGlobalEntry
return -1;
}
void* canonicalNativeAddrFor(JSRuntime*rt, void* ptr) const;
bool callStackAtAddr(JSRuntime* rt, void* ptr, BytecodeLocationVector& results,
uint32_t* depth) const;
@ -405,6 +407,8 @@ class JitcodeGlobalEntry
void destroy();
void* canonicalNativeAddrFor(JSRuntime* rt, void* ptr) const;
bool callStackAtAddr(JSRuntime* rt, void* ptr, BytecodeLocationVector& results,
uint32_t* depth) const;
@ -437,6 +441,8 @@ class JitcodeGlobalEntry
void destroy() {}
void* canonicalNativeAddrFor(JSRuntime* rt, void* ptr) const;
bool callStackAtAddr(JSRuntime* rt, void* ptr, BytecodeLocationVector& results,
uint32_t* depth) const;
@ -462,6 +468,10 @@ class JitcodeGlobalEntry
void destroy() {}
void* canonicalNativeAddrFor(JSRuntime* rt, void* ptr) const {
return nullptr;
}
bool callStackAtAddr(JSRuntime* rt, void* ptr, BytecodeLocationVector& results,
uint32_t* depth) const
{
@ -705,6 +715,22 @@ class JitcodeGlobalEntry
return query_;
}
void* canonicalNativeAddrFor(JSRuntime* rt, void* ptr) const {
switch (kind()) {
case Ion:
return ionEntry().canonicalNativeAddrFor(rt, ptr);
case Baseline:
return baselineEntry().canonicalNativeAddrFor(rt, ptr);
case IonCache:
return ionCacheEntry().canonicalNativeAddrFor(rt, ptr);
case Dummy:
return dummyEntry().canonicalNativeAddrFor(rt, ptr);
default:
MOZ_CRASH("Invalid JitcodeGlobalEntry kind.");
}
return nullptr;
}
// Read the inline call stack at a given point in the native code and append into
// the given vector. Innermost (script,pc) pair will be appended first, and
// outermost appended last.

Просмотреть файл

@ -1132,18 +1132,6 @@ IonBuilder::trackInlineSuccessUnchecked(InliningStatus status)
trackOptimizationOutcome(TrackedOutcome::Inlined);
}
JS_PUBLIC_API(void)
JS::ForEachTrackedOptimizationAttempt(JSRuntime* rt, void* addr, uint8_t index,
ForEachTrackedOptimizationAttemptOp& op,
JSScript** scriptOut, jsbytecode** pcOut)
{
JitcodeGlobalTable* table = rt->jitRuntime()->getJitcodeGlobalTable();
JitcodeGlobalEntry entry;
table->lookupInfallible(addr, &entry, rt);
entry.youngestFrameLocationAtAddr(rt, addr, scriptOut, pcOut);
entry.trackedOptimizationAttempts(index).forEach(op);
}
static void
InterpretedFunctionFilenameAndLineNumber(JSFunction* fun, const char** filename,
Maybe<unsigned>* lineno)
@ -1261,28 +1249,35 @@ IonTrackedOptimizationsTypeInfo::ForEachOpAdapter::operator()(JS::TrackedTypeSit
op_(site, StringFromMIRType(mirType));
}
JS_PUBLIC_API(void)
JS::ForEachTrackedOptimizationTypeInfo(JSRuntime* rt, void* addr, uint8_t index,
ForEachTrackedOptimizationTypeInfoOp& op)
typedef JS::ForEachProfiledFrameOp::FrameHandle FrameHandle;
void
FrameHandle::updateHasTrackedOptimizations()
{
JitcodeGlobalTable* table = rt->jitRuntime()->getJitcodeGlobalTable();
JitcodeGlobalEntry entry;
table->lookupInfallible(addr, &entry, rt);
IonTrackedOptimizationsTypeInfo::ForEachOpAdapter adapter(op);
entry.trackedOptimizationTypeInfo(index).forEach(adapter, entry.allTrackedTypes());
// All inlined frames will have the same optimization information by
// virtue of sharing the JitcodeGlobalEntry, but such information is
// only interpretable on the youngest frame.
if (depth() != 0)
return;
if (!entry_.hasTrackedOptimizations())
return;
uint32_t entryOffset;
optsIndex_ = entry_.trackedOptimizationIndexAtAddr(addr_, &entryOffset);
if (optsIndex_.isSome())
canonicalAddr_ = (void*)(((uint8_t*) entry_.nativeStartAddr()) + entryOffset);
}
JS_PUBLIC_API(Maybe<uint8_t>)
JS::TrackedOptimizationIndexAtAddr(JSRuntime* rt, void* addr, void** entryAddr)
void
FrameHandle::forEachOptimizationAttempt(ForEachTrackedOptimizationAttemptOp& op,
JSScript** scriptOut, jsbytecode** pcOut) const
{
JitcodeGlobalTable* table = rt->jitRuntime()->getJitcodeGlobalTable();
JitcodeGlobalEntry entry;
table->lookupInfallible(addr, &entry, rt);
if (!entry.hasTrackedOptimizations())
return Nothing();
uint32_t entryOffset = 0;
Maybe<uint8_t> index = entry.trackedOptimizationIndexAtAddr(addr, &entryOffset);
if (index.isSome())
*entryAddr = (void*)(((uint8_t*) entry.nativeStartAddr()) + entryOffset);
return index;
entry_.trackedOptimizationAttempts(*optsIndex_).forEach(op);
entry_.youngestFrameLocationAtAddr(rt_, addr_, scriptOut, pcOut);
}
void
FrameHandle::forEachOptimizationTypeInfo(ForEachTrackedOptimizationTypeInfoOp& op) const
{
IonTrackedOptimizationsTypeInfo::ForEachOpAdapter adapter(op);
entry_.trackedOptimizationTypeInfo(*optsIndex_).forEach(adapter, entry_.allTrackedTypes());
}

Просмотреть файл

@ -1954,22 +1954,3 @@ JS::ProfilingFrameIterator::isJit() const
{
return activation_->isJit();
}
JS_PUBLIC_API(void)
JS::ForEachProfiledFrame(JSRuntime* rt, void* addr, ForEachProfiledFrameOp& op)
{
jit::JitcodeGlobalTable* table = rt->jitRuntime()->getJitcodeGlobalTable();
jit::JitcodeGlobalEntry entry;
table->lookupInfallible(addr, &entry, rt);
// Extract the stack for the entry. Assume maximum inlining depth is <64
const char* labels[64];
uint32_t depth = entry.callStackAtAddr(rt, addr, labels, 64);
MOZ_ASSERT(depth < 64);
for (uint32_t i = depth; i != 0; i--) {
// All inlined frames will have the same optimization information by
// virtue of sharing the JitcodeGlobalEntry, but such information is
// only interpretable on the youngest frame.
op(labels[i - 1], i == 1 && entry.hasTrackedOptimizations());
}
}