From 870604424c029c61d2145e5ab371a2a6cace8eb6 Mon Sep 17 00:00:00 2001 From: Luke Wagner Date: Mon, 21 Jul 2014 11:05:44 -0500 Subject: [PATCH] Bug 1027885 - OdinMonkey: properly represent calls to builtin functions in the profiling stack (r=dougc) --- js/public/ProfilingFrameIterator.h | 3 +- js/src/jit-test/tests/asm.js/testProfiling.js | 33 ++++- js/src/jit/AsmJS.cpp | 105 ++++++++++++-- js/src/jit/AsmJSFrameIterator.cpp | 131 ++++++++++++------ js/src/jit/AsmJSFrameIterator.h | 73 ++++++++-- js/src/jit/AsmJSModule.cpp | 69 +++++++-- js/src/jit/AsmJSModule.h | 44 ++++-- js/src/jit/shared/Assembler-shared.h | 47 ++++--- js/src/shell/js.cpp | 5 + js/src/vm/Stack.cpp | 2 +- js/src/vm/Stack.h | 4 +- 11 files changed, 400 insertions(+), 116 deletions(-) diff --git a/js/public/ProfilingFrameIterator.h b/js/public/ProfilingFrameIterator.h index 457148d3d3bc..09a25832b0f3 100644 --- a/js/public/ProfilingFrameIterator.h +++ b/js/public/ProfilingFrameIterator.h @@ -57,7 +57,8 @@ class JS_PUBLIC_API(ProfilingFrameIterator) enum Kind { Function, - AsmJSTrampoline + AsmJSTrampoline, + CppFunction }; Kind kind() const; diff --git a/js/src/jit-test/tests/asm.js/testProfiling.js b/js/src/jit-test/tests/asm.js/testProfiling.js index 6409673bff39..af7ccdee1b86 100644 --- a/js/src/jit-test/tests/asm.js/testProfiling.js +++ b/js/src/jit-test/tests/asm.js/testProfiling.js @@ -48,12 +48,33 @@ assertEq(f(1), 2); var stacks = disableSingleStepProfiling(); assertEq(String(stacks), ",*,f*,g1f*,f*,*,,*,f*,g2f*,f*,*,"); -//TODO: next patch -//var f = asmLink(asmCompile('g', USE_ASM + "var sin=g.Math.sin; function f(d) { d=+d; return +sin(d) } return f"), this); -//enableSingleStepProfiling(); -//assertEq(f(.5), Math.sin(.5)); -//var stacks = disableSingleStepProfiling(); -//assertEq(String(stacks), ",*,f*,Math.sinf*,f*,*,"); +function testBuiltinD2D(name) { + var f = asmLink(asmCompile('g', USE_ASM + "var fun=g.Math." + name + "; function f(d) { d=+d; return +fun(d) } return f"), this); + enableSingleStepProfiling(); + assertEq(f(.1), eval("Math." + name + "(.1)")); + var stacks = disableSingleStepProfiling(); + assertEq(String(stacks), ",*,f*,Math." + name + "f*,f*,*,"); +} +for (name of ['sin', 'cos', 'tan', 'asin', 'acos', 'atan', 'ceil', 'floor', 'exp', 'log']) + testBuiltinD2D(name); +function testBuiltinF2F(name) { + var f = asmLink(asmCompile('g', USE_ASM + "var tof=g.Math.fround; var fun=g.Math." + name + "; function f(d) { d=tof(d); return tof(fun(d)) } return f"), this); + enableSingleStepProfiling(); + assertEq(f(.1), eval("Math.fround(Math." + name + "(Math.fround(.1)))")); + var stacks = disableSingleStepProfiling(); + assertEq(String(stacks), ",*,f*,Math." + name + "f*,f*,*,"); +} +for (name of ['ceil', 'floor']) + testBuiltinF2F(name); +function testBuiltinDD2D(name) { + var f = asmLink(asmCompile('g', USE_ASM + "var fun=g.Math." + name + "; function f(d, e) { d=+d; e=+e; return +fun(d,e) } return f"), this); + enableSingleStepProfiling(); + assertEq(f(.1, .2), eval("Math." + name + "(.1, .2)")); + var stacks = disableSingleStepProfiling(); + assertEq(String(stacks), ",*,f*,Math." + name + "f*,f*,*,"); +} +for (name of ['atan2', 'pow']) + testBuiltinDD2D(name); // FFI tests: setJitCompilerOption("ion.usecount.trigger", 10); diff --git a/js/src/jit/AsmJS.cpp b/js/src/jit/AsmJS.cpp index 751ee0131f34..f92ffb7415a2 100644 --- a/js/src/jit/AsmJS.cpp +++ b/js/src/jit/AsmJS.cpp @@ -1503,6 +1503,11 @@ class MOZ_STACK_CLASS ModuleCompiler uint32_t end = masm_.currentOffset(); return module_->addInlineCodeRange(begin->offset(), end); } + bool finishGeneratingBuiltinThunk(AsmJSExit::BuiltinKind builtin, Label *begin, Label *pret) { + JS_ASSERT(finishedFunctionBodies_); + uint32_t end = masm_.currentOffset(); + return module_->addBuiltinThunkCodeRange(builtin, begin->offset(), pret->offset(), end); + } void buildCompilationTimeReport(bool storedInCache, ScopedJSFreePtr *out) { ScopedJSFreePtr slowFuns; @@ -6062,7 +6067,7 @@ FillArgumentArray(ModuleCompiler &m, const VarTypeVector &argTypes, MacroAssembler &masm = m.masm(); for (ABIArgTypeIter i(argTypes); !i.done(); i++) { - Address dstAddr = Address(StackPointer, offsetToArgs + i.index() * sizeof(Value)); + Address dstAddr(StackPointer, offsetToArgs + i.index() * sizeof(Value)); switch (i->kind()) { case ABIArg::GPR: masm.storeValue(JSVAL_TYPE_INT32, i->gpr(), dstAddr); @@ -6115,7 +6120,7 @@ GenerateFFIInterpExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &e unsigned framePushed = StackDecrementForCall(masm, offsetToArgv + argvBytes); Label begin; - GenerateAsmJSExitPrologue(masm, framePushed, AsmJSFFI, &begin); + GenerateAsmJSExitPrologue(masm, framePushed, AsmJSExit::FFI, &begin); // Fill the argument array. unsigned offsetToCallerStackArgs = sizeof(AsmJSFrame) + masm.framePushed(); @@ -6174,7 +6179,7 @@ GenerateFFIInterpExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &e } Label profilingReturn; - GenerateAsmJSExitEpilogue(masm, framePushed, AsmJSFFI, &profilingReturn); + GenerateAsmJSExitEpilogue(masm, framePushed, AsmJSExit::FFI, &profilingReturn); return m.finishGeneratingInterpExit(exitIndex, &begin, &profilingReturn) && !masm.oom(); } @@ -6229,7 +6234,7 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit unsigned framePushed = Max(ionFrameSize, coerceFrameSize); Label begin; - GenerateAsmJSExitPrologue(masm, framePushed, AsmJSFFI, &begin); + GenerateAsmJSExitPrologue(masm, framePushed, AsmJSExit::FFI, &begin); // 1. Descriptor size_t argOffset = offsetToIonArgs; @@ -6389,7 +6394,7 @@ GenerateFFIIonExit(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit #endif Label profilingReturn; - GenerateAsmJSExitEpilogue(masm, framePushed, AsmJSFFI, &profilingReturn); + GenerateAsmJSExitEpilogue(masm, framePushed, AsmJSExit::FFI, &profilingReturn); if (oolConvert.used()) { masm.bind(&oolConvert); @@ -6452,6 +6457,85 @@ GenerateFFIExits(ModuleCompiler &m, const ModuleCompiler::ExitDescriptor &exit, return true; } +// Generate a thunk that updates fp before calling the given builtin so that +// both the builtin and the calling function show up in profiler stacks. (This +// thunk is dynamically patched in when profiling is enabled.) Since the thunk +// pushes an AsmJSFrame on the stack, that means we must rebuild the stack +// frame. Fortunately, these are low arity functions and everything is passed in +// regs on everything but x86 anyhow. +static bool +GenerateBuiltinThunk(ModuleCompiler &m, AsmJSExit::BuiltinKind builtin) +{ + MacroAssembler &masm = m.masm(); + JS_ASSERT(masm.framePushed() == 0); + + MIRTypeVector argTypes(m.cx()); + switch (builtin) { + case AsmJSExit::Builtin_ToInt32: + argTypes.infallibleAppend(MIRType_Int32); + break; +#if defined(JS_CODEGEN_ARM) + case AsmJSExit::Builtin_IDivMod: + case AsmJSExit::Builtin_UDivMod: + argTypes.infallibleAppend(MIRType_Int32); + argTypes.infallibleAppend(MIRType_Int32); + break; +#endif + case AsmJSExit::Builtin_SinD: + case AsmJSExit::Builtin_CosD: + case AsmJSExit::Builtin_TanD: + case AsmJSExit::Builtin_ASinD: + case AsmJSExit::Builtin_ACosD: + case AsmJSExit::Builtin_ATanD: + case AsmJSExit::Builtin_CeilD: + case AsmJSExit::Builtin_FloorD: + case AsmJSExit::Builtin_ExpD: + case AsmJSExit::Builtin_LogD: + argTypes.infallibleAppend(MIRType_Double); + break; + case AsmJSExit::Builtin_ModD: + case AsmJSExit::Builtin_PowD: + case AsmJSExit::Builtin_ATan2D: + argTypes.infallibleAppend(MIRType_Double); + argTypes.infallibleAppend(MIRType_Double); + break; + case AsmJSExit::Builtin_CeilF: + case AsmJSExit::Builtin_FloorF: + argTypes.infallibleAppend(MIRType_Float32); + break; + case AsmJSExit::Builtin_Limit: + MOZ_ASSUME_UNREACHABLE("Bad builtin"); + } + + uint32_t framePushed = StackDecrementForCall(masm, argTypes); + + Label begin; + GenerateAsmJSExitPrologue(masm, framePushed, AsmJSExit::Builtin(builtin), &begin); + + unsigned offsetToCallerStackArgs = sizeof(AsmJSFrame) + masm.framePushed(); + for (ABIArgMIRTypeIter i(argTypes); !i.done(); i++) { + if (i->kind() != ABIArg::Stack) + continue; + Address srcAddr(StackPointer, offsetToCallerStackArgs + i->offsetFromArgBase()); + Address dstAddr(StackPointer, i->offsetFromArgBase()); + if (i.mirType() == MIRType_Int32 || i.mirType() == MIRType_Float32) { + masm.load32(srcAddr, ABIArgGenerator::NonArgReturnVolatileReg0); + masm.store32(ABIArgGenerator::NonArgReturnVolatileReg0, dstAddr); + } else { + JS_ASSERT(i.mirType() == MIRType_Double); + masm.loadDouble(srcAddr, ScratchDoubleReg); + masm.storeDouble(ScratchDoubleReg, dstAddr); + } + } + + AssertStackAlignment(masm); + masm.call(BuiltinToImmKind(builtin)); + + Label profilingReturn; + GenerateAsmJSExitEpilogue(masm, framePushed, AsmJSExit::Builtin(builtin), &profilingReturn); + return m.finishGeneratingBuiltinThunk(builtin, &begin, &profilingReturn) && !masm.oom(); +} + static bool GenerateStackOverflowExit(ModuleCompiler &m, Label *throwLabel) { @@ -6616,14 +6700,14 @@ GenerateSyncInterruptExit(ModuleCompiler &m, Label *throwLabel) unsigned framePushed = StackDecrementForCall(masm, ShadowStackSpace); - GenerateAsmJSExitPrologue(masm, framePushed, AsmJSInterrupt, &m.syncInterruptLabel()); + GenerateAsmJSExitPrologue(masm, framePushed, AsmJSExit::Interrupt, &m.syncInterruptLabel()); AssertStackAlignment(masm); masm.call(AsmJSImmPtr(AsmJSImm_HandleExecutionInterrupt)); masm.branchIfFalseBool(ReturnReg, throwLabel); Label profilingReturn; - GenerateAsmJSExitEpilogue(masm, framePushed, AsmJSInterrupt, &profilingReturn); + GenerateAsmJSExitEpilogue(masm, framePushed, AsmJSExit::Interrupt, &profilingReturn); return m.finishGeneratingInterrupt(&m.syncInterruptLabel(), &profilingReturn) && !masm.oom(); } @@ -6668,8 +6752,6 @@ GenerateStubs(ModuleCompiler &m) Label throwLabel; - // The order of the iterations here is non-deterministic, since - // m.allExits() is a hash keyed by pointer values! for (ModuleCompiler::ExitMap::Range r = m.allExits(); !r.empty(); r.popFront()) { if (!GenerateFFIExits(m, r.front().key(), r.front().value(), &throwLabel)) return false; @@ -6686,6 +6768,11 @@ GenerateStubs(ModuleCompiler &m) if (!GenerateThrowStub(m, &throwLabel)) return false; + for (unsigned i = 0; i < AsmJSExit::Builtin_Limit; i++) { + if (!GenerateBuiltinThunk(m, AsmJSExit::BuiltinKind(i))) + return false; + } + return true; } diff --git a/js/src/jit/AsmJSFrameIterator.cpp b/js/src/jit/AsmJSFrameIterator.cpp index cafbc4db81c7..dc575aceb279 100644 --- a/js/src/jit/AsmJSFrameIterator.cpp +++ b/js/src/jit/AsmJSFrameIterator.cpp @@ -70,6 +70,7 @@ AsmJSFrameIterator::settle() case AsmJSModule::CodeRange::FFI: case AsmJSModule::CodeRange::Interrupt: case AsmJSModule::CodeRange::Inline: + case AsmJSModule::CodeRange::Thunk: MOZ_ASSUME_UNREACHABLE("Should not encounter an exit during iteration"); } } @@ -126,7 +127,7 @@ PushRetAddr(MacroAssembler &masm) // pointer so that AsmJSProfilingFrameIterator can walk the stack at any pc in // generated code. static void -GenerateProfilingPrologue(MacroAssembler &masm, unsigned framePushed, AsmJSExitReason reason, +GenerateProfilingPrologue(MacroAssembler &masm, unsigned framePushed, AsmJSExit::Reason reason, Label *begin) { Register act = ABIArgGenerator::NonArgReturnVolatileReg0; @@ -154,7 +155,7 @@ GenerateProfilingPrologue(MacroAssembler &masm, unsigned framePushed, AsmJSExitR JS_ASSERT(StoredFP == masm.currentOffset() - offsetAtBegin); } - if (reason != AsmJSNoExit) + if (reason != AsmJSExit::None) masm.store32(Imm32(reason), Address(act, AsmJSActivation::offsetOfExitReason())); if (framePushed) @@ -163,7 +164,7 @@ GenerateProfilingPrologue(MacroAssembler &masm, unsigned framePushed, AsmJSExitR // Generate the inverse of GenerateProfilingPrologue. static void -GenerateProfilingEpilogue(MacroAssembler &masm, unsigned framePushed, AsmJSExitReason reason, +GenerateProfilingEpilogue(MacroAssembler &masm, unsigned framePushed, AsmJSExit::Reason reason, Label *profilingReturn) { Register act = ABIArgGenerator::NonArgReturnVolatileReg0; @@ -173,8 +174,8 @@ GenerateProfilingEpilogue(MacroAssembler &masm, unsigned framePushed, AsmJSExitR masm.loadAsmJSActivation(act); - if (reason != AsmJSNoExit) - masm.store32(Imm32(AsmJSNoExit), Address(act, AsmJSActivation::offsetOfExitReason())); + if (reason != AsmJSExit::None) + masm.store32(Imm32(AsmJSExit::None), Address(act, AsmJSActivation::offsetOfExitReason())); // AsmJSProfilingFrameIterator assumes that there is only a single 'ret' // instruction (whose offset is recorded by profilingReturn) after the store @@ -216,7 +217,7 @@ js::GenerateAsmJSFunctionPrologue(MacroAssembler &masm, unsigned framePushed, masm.align(CodeAlignment); - GenerateProfilingPrologue(masm, framePushed, AsmJSNoExit, &labels->begin); + GenerateProfilingPrologue(masm, framePushed, AsmJSExit::None, &labels->begin); Label body; masm.jump(&body); @@ -285,7 +286,7 @@ js::GenerateAsmJSFunctionEpilogue(MacroAssembler &masm, unsigned framePushed, // Profiling epilogue: masm.bind(&labels->profilingEpilogue); - GenerateProfilingEpilogue(masm, framePushed, AsmJSNoExit, &labels->profilingReturn); + GenerateProfilingEpilogue(masm, framePushed, AsmJSExit::None, &labels->profilingReturn); if (!labels->overflowThunk.empty() && labels->overflowThunk.ref().used()) { // The general throw stub assumes that only sizeof(AsmJSFrame) bytes @@ -346,7 +347,7 @@ js::GenerateAsmJSEntryEpilogue(MacroAssembler &masm) } void -js::GenerateAsmJSExitPrologue(MacroAssembler &masm, unsigned framePushed, AsmJSExitReason reason, +js::GenerateAsmJSExitPrologue(MacroAssembler &masm, unsigned framePushed, AsmJSExit::Reason reason, Label *begin) { masm.align(CodeAlignment); @@ -355,7 +356,7 @@ js::GenerateAsmJSExitPrologue(MacroAssembler &masm, unsigned framePushed, AsmJSE } void -js::GenerateAsmJSExitEpilogue(MacroAssembler &masm, unsigned framePushed, AsmJSExitReason reason, +js::GenerateAsmJSExitEpilogue(MacroAssembler &masm, unsigned framePushed, AsmJSExit::Reason reason, Label *profilingReturn) { // Inverse of GenerateAsmJSExitPrologue: @@ -371,26 +372,32 @@ AsmJSProfilingFrameIterator::AsmJSProfilingFrameIterator(const AsmJSActivation & : module_(&activation.module()), callerFP_(nullptr), callerPC_(nullptr), - exitReason_(AsmJSNoExit), + exitReason_(AsmJSExit::None), codeRange_(nullptr) { initFromFP(activation); } static inline void -AssertMatchesCallSite(const AsmJSModule &module, void *pc, void *newfp, void *oldfp) +AssertMatchesCallSite(const AsmJSModule &module, const AsmJSModule::CodeRange *calleeCodeRange, + void *callerPC, void *callerFP, void *fp) { #ifdef DEBUG - const AsmJSModule::CodeRange *codeRange = module.lookupCodeRange(pc); - JS_ASSERT(codeRange); - if (codeRange->isEntry()) { - JS_ASSERT(newfp == nullptr); + const AsmJSModule::CodeRange *callerCodeRange = module.lookupCodeRange(callerPC); + JS_ASSERT(callerCodeRange); + if (callerCodeRange->isEntry()) { + JS_ASSERT(callerFP == nullptr); return; } - const CallSite *callsite = module.lookupCallSite(pc); - JS_ASSERT(callsite); - JS_ASSERT(newfp == (uint8_t*)oldfp + callsite->stackDepth()); + const CallSite *callsite = module.lookupCallSite(callerPC); + if (calleeCodeRange->isThunk()) { + JS_ASSERT(!callsite); + JS_ASSERT(callerCodeRange->isFunction()); + } else { + JS_ASSERT(callsite); + JS_ASSERT(callerFP == (uint8_t*)fp + callsite->stackDepth()); + } #endif } @@ -430,15 +437,16 @@ AsmJSProfilingFrameIterator::initFromFP(const AsmJSActivation &activation) callerFP_ = nullptr; break; case AsmJSModule::CodeRange::Function: - case AsmJSModule::CodeRange::FFI: - case AsmJSModule::CodeRange::Interrupt: - case AsmJSModule::CodeRange::Inline: - AssertMatchesCallSite(*module_, pc, CallerFPFromFP(fp), fp); fp = CallerFPFromFP(fp); callerPC_ = ReturnAddressFromFP(fp); callerFP_ = CallerFPFromFP(fp); - AssertMatchesCallSite(*module_, callerPC_, callerFP_, fp); + AssertMatchesCallSite(*module_, codeRange, callerPC_, callerFP_, fp); break; + case AsmJSModule::CodeRange::FFI: + case AsmJSModule::CodeRange::Interrupt: + case AsmJSModule::CodeRange::Inline: + case AsmJSModule::CodeRange::Thunk: + MOZ_CRASH("Unexpected CodeRange kind"); } JS_ASSERT(!done()); @@ -451,7 +459,7 @@ AsmJSProfilingFrameIterator::AsmJSProfilingFrameIterator(const AsmJSActivation & : module_(&activation.module()), callerFP_(nullptr), callerPC_(nullptr), - exitReason_(AsmJSNoExit), + exitReason_(AsmJSExit::None), codeRange_(nullptr) { // If profiling hasn't been enabled for this module, then CallerFPFromFP @@ -478,7 +486,8 @@ AsmJSProfilingFrameIterator::AsmJSProfilingFrameIterator(const AsmJSActivation & switch (codeRange->kind()) { case AsmJSModule::CodeRange::Function: case AsmJSModule::CodeRange::FFI: - case AsmJSModule::CodeRange::Interrupt: { + case AsmJSModule::CodeRange::Interrupt: + case AsmJSModule::CodeRange::Thunk: { // While codeRange describes the *current* frame, the fp/pc state stored in // the iterator is the *caller's* frame. The reason for this is that the // activation.fp isn't always the AsmJSFrame for state.pc; during the @@ -498,22 +507,22 @@ AsmJSProfilingFrameIterator::AsmJSProfilingFrameIterator(const AsmJSActivation & if (offsetInCodeRange < PushedRetAddr) { callerPC_ = state.lr; callerFP_ = fp; - AssertMatchesCallSite(*module_, callerPC_, callerFP_, sp - 2); + AssertMatchesCallSite(*module_, codeRange, callerPC_, callerFP_, sp - 2); } else #endif if (offsetInCodeRange < PushedFP || offsetInModule == codeRange->profilingReturn()) { callerPC_ = *sp; callerFP_ = fp; - AssertMatchesCallSite(*module_, callerPC_, callerFP_, sp - 1); + AssertMatchesCallSite(*module_, codeRange, callerPC_, callerFP_, sp - 1); } else if (offsetInCodeRange < StoredFP) { JS_ASSERT(fp == CallerFPFromFP(sp)); callerPC_ = ReturnAddressFromFP(sp); callerFP_ = CallerFPFromFP(sp); - AssertMatchesCallSite(*module_, callerPC_, callerFP_, sp); + AssertMatchesCallSite(*module_, codeRange, callerPC_, callerFP_, sp); } else { callerPC_ = ReturnAddressFromFP(fp); callerFP_ = CallerFPFromFP(fp); - AssertMatchesCallSite(*module_, callerPC_, callerFP_, fp); + AssertMatchesCallSite(*module_, codeRange, callerPC_, callerFP_, fp); } break; } @@ -538,7 +547,7 @@ AsmJSProfilingFrameIterator::AsmJSProfilingFrameIterator(const AsmJSActivation & // as below. callerPC_ = ReturnAddressFromFP(fp); callerFP_ = CallerFPFromFP(fp); - AssertMatchesCallSite(*module_, callerPC_, callerFP_, fp); + AssertMatchesCallSite(*module_, codeRange, callerPC_, callerFP_, fp); break; } } @@ -550,9 +559,9 @@ AsmJSProfilingFrameIterator::AsmJSProfilingFrameIterator(const AsmJSActivation & void AsmJSProfilingFrameIterator::operator++() { - if (exitReason_ != AsmJSNoExit) { + if (exitReason_ != AsmJSExit::None) { JS_ASSERT(codeRange_); - exitReason_ = AsmJSNoExit; + exitReason_ = AsmJSExit::None; JS_ASSERT(!done()); return; } @@ -578,8 +587,9 @@ AsmJSProfilingFrameIterator::operator++() case AsmJSModule::CodeRange::FFI: case AsmJSModule::CodeRange::Interrupt: case AsmJSModule::CodeRange::Inline: + case AsmJSModule::CodeRange::Thunk: callerPC_ = ReturnAddressFromFP(callerFP_); - AssertMatchesCallSite(*module_, callerPC_, CallerFPFromFP(callerFP_), callerFP_); + AssertMatchesCallSite(*module_, codeRange, callerPC_, CallerFPFromFP(callerFP_), callerFP_); callerFP_ = CallerFPFromFP(callerFP_); break; } @@ -592,12 +602,14 @@ AsmJSProfilingFrameIterator::kind() const { JS_ASSERT(!done()); - switch (exitReason_) { - case AsmJSNoExit: + switch (AsmJSExit::ExtractReasonKind(exitReason_)) { + case AsmJSExit::Reason_None: break; - case AsmJSInterrupt: - case AsmJSFFI: + case AsmJSExit::Reason_Interrupt: + case AsmJSExit::Reason_FFI: return JS::ProfilingFrameIterator::AsmJSTrampoline; + case AsmJSExit::Reason_Builtin: + return JS::ProfilingFrameIterator::CppFunction; } auto codeRange = reinterpret_cast(codeRange_); @@ -609,6 +621,8 @@ AsmJSProfilingFrameIterator::kind() const case AsmJSModule::CodeRange::Interrupt: case AsmJSModule::CodeRange::Inline: return JS::ProfilingFrameIterator::AsmJSTrampoline; + case AsmJSModule::CodeRange::Thunk: + return JS::ProfilingFrameIterator::CppFunction; } MOZ_ASSUME_UNREACHABLE("Bad kind"); @@ -628,23 +642,55 @@ AsmJSProfilingFrameIterator::functionFilename() const return module_->scriptSource()->filename(); } +static const char * +BuiltinToName(AsmJSExit::BuiltinKind builtin) +{ + switch (builtin) { + case AsmJSExit::Builtin_ToInt32: return "ToInt32"; +#if defined(JS_CODEGEN_ARM) + case AsmJSExit::Builtin_IDivMod: return "software idivmod"; + case AsmJSExit::Builtin_UDivMod: return "software uidivmod"; +#endif + case AsmJSExit::Builtin_ModD: return "fmod"; + case AsmJSExit::Builtin_SinD: return "Math.sin"; + case AsmJSExit::Builtin_CosD: return "Math.cos"; + case AsmJSExit::Builtin_TanD: return "Math.tan"; + case AsmJSExit::Builtin_ASinD: return "Math.asin"; + case AsmJSExit::Builtin_ACosD: return "Math.acos"; + case AsmJSExit::Builtin_ATanD: return "Math.atan"; + case AsmJSExit::Builtin_CeilD: + case AsmJSExit::Builtin_CeilF: return "Math.ceil"; + case AsmJSExit::Builtin_FloorD: + case AsmJSExit::Builtin_FloorF: return "Math.floor"; + case AsmJSExit::Builtin_ExpD: return "Math.exp"; + case AsmJSExit::Builtin_LogD: return "Math.log"; + case AsmJSExit::Builtin_PowD: return "Math.pow"; + case AsmJSExit::Builtin_ATan2D: return "Math.atan2"; + case AsmJSExit::Builtin_Limit: break; + } + MOZ_ASSUME_UNREACHABLE("Bad builtin kind"); +} + const char * AsmJSProfilingFrameIterator::nonFunctionDescription() const { JS_ASSERT(!done()); + JS_ASSERT(kind() != JS::ProfilingFrameIterator::Function); // Use the same string for both time inside and under so that the two // entries will be coalesced by the profiler. const char *ffiDescription = "asm.js FFI trampoline"; const char *interruptDescription = "asm.js slow script interrupt"; - switch (exitReason_) { - case AsmJSNoExit: + switch (AsmJSExit::ExtractReasonKind(exitReason_)) { + case AsmJSExit::Reason_None: break; - case AsmJSFFI: + case AsmJSExit::Reason_FFI: return ffiDescription; - case AsmJSInterrupt: + case AsmJSExit::Reason_Interrupt: return interruptDescription; + case AsmJSExit::Reason_Builtin: + return BuiltinToName(AsmJSExit::ExtractBuiltinKind(exitReason_)); } auto codeRange = reinterpret_cast(codeRange_); @@ -654,8 +700,9 @@ AsmJSProfilingFrameIterator::nonFunctionDescription() const case AsmJSModule::CodeRange::FFI: return ffiDescription; case AsmJSModule::CodeRange::Interrupt: return interruptDescription; case AsmJSModule::CodeRange::Inline: return "asm.js inline stub"; + case AsmJSModule::CodeRange::Thunk: return BuiltinToName(codeRange->thunkTarget()); } - MOZ_ASSUME_UNREACHABLE("Bad kind"); + MOZ_ASSUME_UNREACHABLE("Bad exit kind"); } diff --git a/js/src/jit/AsmJSFrameIterator.h b/js/src/jit/AsmJSFrameIterator.h index 6a5a97482949..25e6e09d4575 100644 --- a/js/src/jit/AsmJSFrameIterator.h +++ b/js/src/jit/AsmJSFrameIterator.h @@ -48,16 +48,64 @@ class AsmJSFrameIterator unsigned computeLine(uint32_t *column) const; }; -// List of reasons for execution leaving asm.js-generated code, stored in -// AsmJSActivation. The initial and default state is AsmJSNoExit. If AsmJSNoExit -// is observed when the pc isn't in asm.js code, execution must have been -// interrupted asynchronously (viz., by a exception/signal handler). -enum AsmJSExitReason +namespace AsmJSExit { - AsmJSNoExit, - AsmJSFFI, - AsmJSInterrupt -}; + // List of reasons for execution leaving asm.js-generated code, stored in + // AsmJSActivation. The initial and default state is AsmJSNoExit. If + // AsmJSNoExit is observed when the pc isn't in asm.js code, execution must + // have been interrupted asynchronously (viz., by a exception/signal + // handler). + enum ReasonKind { + Reason_None, + Reason_FFI, + Reason_Interrupt, + Reason_Builtin + }; + + // For Reason_Builtin, the list of builtins, so they can be displayed in the + // profile call stack. + enum BuiltinKind { + Builtin_ToInt32, +#if defined(JS_CODEGEN_ARM) + Builtin_IDivMod, + Builtin_UDivMod, +#endif + Builtin_ModD, + Builtin_SinD, + Builtin_CosD, + Builtin_TanD, + Builtin_ASinD, + Builtin_ACosD, + Builtin_ATanD, + Builtin_CeilD, + Builtin_CeilF, + Builtin_FloorD, + Builtin_FloorF, + Builtin_ExpD, + Builtin_LogD, + Builtin_PowD, + Builtin_ATan2D, + Builtin_Limit + }; + + // A Reason contains both a ReasonKind and (if Reason_Builtin) a + // BuiltinKind. + typedef uint32_t Reason; + + static const uint32_t None = Reason_None; + static const uint32_t FFI = Reason_FFI; + static const uint32_t Interrupt = Reason_Interrupt; + static inline Reason Builtin(BuiltinKind builtin) { + return uint16_t(Reason_Builtin) | (uint16_t(builtin) << 16); + } + static inline ReasonKind ExtractReasonKind(Reason reason) { + return ReasonKind(uint16_t(reason)); + } + static inline BuiltinKind ExtractBuiltinKind(Reason reason) { + JS_ASSERT(ExtractReasonKind(reason) == Reason_Builtin); + return BuiltinKind(uint16_t(reason >> 16)); + } +} // Iterates over the frames of a single AsmJSActivation, given an // asynchrously-interrupted thread's state. If the activation's @@ -67,7 +115,7 @@ class AsmJSProfilingFrameIterator const AsmJSModule *module_; uint8_t *callerFP_; void *callerPC_; - AsmJSExitReason exitReason_; + AsmJSExit::Reason exitReason_; // Really, a const AsmJSModule::CodeRange*, but no forward declarations of // nested classes, so use void* to avoid pulling in all of AsmJSModule.h. @@ -112,12 +160,13 @@ void GenerateAsmJSEntryEpilogue(jit::MacroAssembler &masm); void -GenerateAsmJSExitPrologue(jit::MacroAssembler &masm, unsigned framePushed, AsmJSExitReason reason, +GenerateAsmJSExitPrologue(jit::MacroAssembler &masm, unsigned framePushed, AsmJSExit::Reason reason, jit::Label *begin); void -GenerateAsmJSExitEpilogue(jit::MacroAssembler &masm, unsigned framePushed, AsmJSExitReason reason, +GenerateAsmJSExitEpilogue(jit::MacroAssembler &masm, unsigned framePushed, AsmJSExit::Reason reason, jit::Label *profilingReturn); + } // namespace js #endif // jit_AsmJSFrameIterator_h diff --git a/js/src/jit/AsmJSModule.cpp b/js/src/jit/AsmJSModule.cpp index 1787aa5bde78..cbe1e92c72cd 100644 --- a/js/src/jit/AsmJSModule.cpp +++ b/js/src/jit/AsmJSModule.cpp @@ -41,6 +41,7 @@ using mozilla::BinarySearch; using mozilla::PodCopy; using mozilla::PodEqual; using mozilla::Compression::LZ4; +using mozilla::Swap; static uint8_t * AllocateExecutableMemory(ExclusiveContext *cx, size_t totalBytes) @@ -172,6 +173,7 @@ AsmJSModule::addSizeOfMisc(mozilla::MallocSizeOf mallocSizeOf, size_t *asmJSModu callSites_.sizeOfExcludingThis(mallocSizeOf) + codeRanges_.sizeOfExcludingThis(mallocSizeOf) + funcPtrTables_.sizeOfExcludingThis(mallocSizeOf) + + builtinThunkOffsets_.sizeOfExcludingThis(mallocSizeOf) + names_.sizeOfExcludingThis(mallocSizeOf) + heapAccesses_.sizeOfExcludingThis(mallocSizeOf) + functionCounts_.sizeOfExcludingThis(mallocSizeOf) + @@ -341,6 +343,8 @@ AsmJSModule::finish(ExclusiveContext *cx, TokenStream &tokenStream, MacroAssembl codeRanges_[i].updateOffsets(masm); JS_ASSERT_IF(i > 0, codeRanges_[i - 1].end() <= codeRanges_[i].begin()); } + for (size_t i = 0; i < builtinThunkOffsets_.length(); i++) + builtinThunkOffsets_[i] = masm.actualOffset(builtinThunkOffsets_[i]); #endif JS_ASSERT(pod.functionBytes_ % AsmJSPageSize == 0); @@ -1173,46 +1177,61 @@ AsmJSModule::CodeRange::CodeRange(uint32_t nameIndex, const AsmJSFunctionLabels : nameIndex_(nameIndex), begin_(l.begin.offset()), profilingReturn_(l.profilingReturn.offset()), - end_(l.end.offset()), - kind_(Function) + end_(l.end.offset()) { + u.kind_ = Function; + setDeltas(l.entry.offset(), l.profilingJump.offset(), l.profilingEpilogue.offset()); + JS_ASSERT(l.begin.offset() < l.entry.offset()); JS_ASSERT(l.entry.offset() < l.profilingJump.offset()); JS_ASSERT(l.profilingJump.offset() < l.profilingEpilogue.offset()); JS_ASSERT(l.profilingEpilogue.offset() < l.profilingReturn.offset()); JS_ASSERT(l.profilingReturn.offset() < l.end.offset()); - - setDeltas(l.entry.offset(), l.profilingJump.offset(), l.profilingEpilogue.offset()); } void AsmJSModule::CodeRange::setDeltas(uint32_t entry, uint32_t profilingJump, uint32_t profilingEpilogue) { JS_ASSERT(entry - begin_ <= UINT8_MAX); - beginToEntry_ = entry - begin_; + u.func.beginToEntry_ = entry - begin_; JS_ASSERT(profilingReturn_ - profilingJump <= UINT8_MAX); - profilingJumpToProfilingReturn_ = profilingReturn_ - profilingJump; + u.func.profilingJumpToProfilingReturn_ = profilingReturn_ - profilingJump; JS_ASSERT(profilingReturn_ - profilingEpilogue <= UINT8_MAX); - profilingEpilogueToProfilingReturn_ = profilingReturn_ - profilingEpilogue; + u.func.profilingEpilogueToProfilingReturn_ = profilingReturn_ - profilingEpilogue; } AsmJSModule::CodeRange::CodeRange(Kind kind, uint32_t begin, uint32_t end) : begin_(begin), - end_(end), - kind_(kind) + end_(end) { + u.kind_ = kind; + JS_ASSERT(begin_ <= end_); - JS_ASSERT(kind_ == Entry || kind_ == Inline); + JS_ASSERT(u.kind_ == Entry || u.kind_ == Inline); } AsmJSModule::CodeRange::CodeRange(Kind kind, uint32_t begin, uint32_t profilingReturn, uint32_t end) : begin_(begin), profilingReturn_(profilingReturn), - end_(end), - kind_(kind) + end_(end) { + u.kind_ = kind; + + JS_ASSERT(begin_ < profilingReturn_); + JS_ASSERT(profilingReturn_ < end_); +} + +AsmJSModule::CodeRange::CodeRange(AsmJSExit::BuiltinKind builtin, uint32_t begin, + uint32_t profilingReturn, uint32_t end) + : begin_(begin), + profilingReturn_(profilingReturn), + end_(end) +{ + u.kind_ = Thunk; + u.thunk.target_ = builtin; + JS_ASSERT(begin_ < profilingReturn_); JS_ASSERT(profilingReturn_ < end_); } @@ -1362,6 +1381,7 @@ AsmJSModule::serializedSize() const SerializedPodVectorSize(callSites_) + SerializedPodVectorSize(codeRanges_) + SerializedPodVectorSize(funcPtrTables_) + + SerializedPodVectorSize(builtinThunkOffsets_) + SerializedVectorSize(names_) + SerializedPodVectorSize(heapAccesses_) + #if defined(MOZ_VTUNE) || defined(JS_ION_PERF) @@ -1384,6 +1404,7 @@ AsmJSModule::serialize(uint8_t *cursor) const cursor = SerializePodVector(cursor, callSites_); cursor = SerializePodVector(cursor, codeRanges_); cursor = SerializePodVector(cursor, funcPtrTables_); + cursor = SerializePodVector(cursor, builtinThunkOffsets_); cursor = SerializeVector(cursor, names_); cursor = SerializePodVector(cursor, heapAccesses_); #if defined(MOZ_VTUNE) || defined(JS_ION_PERF) @@ -1412,6 +1433,7 @@ AsmJSModule::deserialize(ExclusiveContext *cx, const uint8_t *cursor) (cursor = DeserializePodVector(cx, cursor, &callSites_)) && (cursor = DeserializePodVector(cx, cursor, &codeRanges_)) && (cursor = DeserializePodVector(cx, cursor, &funcPtrTables_)) && + (cursor = DeserializePodVector(cx, cursor, &builtinThunkOffsets_)) && (cursor = DeserializeVector(cx, cursor, &names_)) && (cursor = DeserializePodVector(cx, cursor, &heapAccesses_)) && #if defined(MOZ_VTUNE) || defined(JS_ION_PERF) @@ -1485,6 +1507,7 @@ AsmJSModule::clone(JSContext *cx, ScopedJSDeletePtr *moduleOut) con !ClonePodVector(cx, callSites_, &out.callSites_) || !ClonePodVector(cx, codeRanges_, &out.codeRanges_) || !ClonePodVector(cx, funcPtrTables_, &out.funcPtrTables_) || + !ClonePodVector(cx, builtinThunkOffsets_, &out.builtinThunkOffsets_) || !CloneVector(cx, names_, &out.names_) || !ClonePodVector(cx, heapAccesses_, &out.heapAccesses_) || !staticLinkData_.clone(cx, &out.staticLinkData_)) @@ -1613,6 +1636,28 @@ AsmJSModule::setProfilingEnabled(bool enabled) #endif } + // Replace all calls to builtins with calls to profiling thunks that push a + // frame pointer. Since exit unwinding always starts at the caller of fp, + // this avoids losing the innermost asm.js function. + for (unsigned builtin = 0; builtin < AsmJSExit::Builtin_Limit; builtin++) { + AsmJSImmKind imm = BuiltinToImmKind(AsmJSExit::BuiltinKind(builtin)); + const AsmJSModule::OffsetVector &offsets = staticLinkData_.absoluteLinks[imm]; + void *from = AddressOf(AsmJSImmKind(imm), nullptr); + void *to = code_ + builtinThunkOffsets_[builtin]; + if (!enabled) + Swap(from, to); + for (size_t j = 0; j < offsets.length(); j++) { + uint8_t *caller = code_ + offsets[j]; + const AsmJSModule::CodeRange *codeRange = lookupCodeRange(caller); + if (codeRange->isThunk()) + continue; + JS_ASSERT(codeRange->isFunction()); + Assembler::PatchDataWithValueCheck(CodeLocationLabel(caller), + PatchedImmPtr(to), + PatchedImmPtr(from)); + } + } + profilingEnabled_ = enabled; } diff --git a/js/src/jit/AsmJSModule.h b/js/src/jit/AsmJSModule.h index 0a909ad80e30..b9d38e52b002 100644 --- a/js/src/jit/AsmJSModule.h +++ b/js/src/jit/AsmJSModule.h @@ -338,54 +338,69 @@ class AsmJSModule uint32_t begin_; uint32_t profilingReturn_; uint32_t end_; - uint8_t beginToEntry_; - uint8_t profilingJumpToProfilingReturn_; - uint8_t profilingEpilogueToProfilingReturn_; - uint8_t kind_; + union { + struct { + uint8_t kind_; + uint8_t beginToEntry_; + uint8_t profilingJumpToProfilingReturn_; + uint8_t profilingEpilogueToProfilingReturn_; + } func; + struct { + uint8_t kind_; + uint16_t target_; + } thunk; + uint8_t kind_; + } u; void setDeltas(uint32_t entry, uint32_t profilingJump, uint32_t profilingEpilogue); public: - enum Kind { Function, Entry, FFI, Interrupt, Inline }; + enum Kind { Function, Entry, FFI, Interrupt, Thunk, Inline }; CodeRange() {} CodeRange(uint32_t nameIndex, const AsmJSFunctionLabels &l); CodeRange(Kind kind, uint32_t begin, uint32_t end); CodeRange(Kind kind, uint32_t begin, uint32_t profilingReturn, uint32_t end); + CodeRange(AsmJSExit::BuiltinKind builtin, uint32_t begin, uint32_t pret, uint32_t end); void updateOffsets(jit::MacroAssembler &masm); - Kind kind() const { return Kind(kind_); } + Kind kind() const { return Kind(u.kind_); } bool isFunction() const { return kind() == Function; } bool isEntry() const { return kind() == Entry; } bool isFFI() const { return kind() == FFI; } bool isInterrupt() const { return kind() == Interrupt; } + bool isThunk() const { return kind() == Thunk; } uint32_t begin() const { return begin_; } uint32_t entry() const { JS_ASSERT(isFunction()); - return begin_ + beginToEntry_; + return begin_ + u.func.beginToEntry_; } uint32_t end() const { return end_; } uint32_t profilingJump() const { JS_ASSERT(isFunction()); - return profilingReturn_ - profilingJumpToProfilingReturn_; + return profilingReturn_ - u.func.profilingJumpToProfilingReturn_; } uint32_t profilingEpilogue() const { JS_ASSERT(isFunction()); - return profilingReturn_ - profilingEpilogueToProfilingReturn_; + return profilingReturn_ - u.func.profilingEpilogueToProfilingReturn_; } uint32_t profilingReturn() const { - JS_ASSERT(isFunction() || isFFI() || isInterrupt()); + JS_ASSERT(isFunction() || isFFI() || isInterrupt() || isThunk()); return profilingReturn_; } PropertyName *functionName(const AsmJSModule &module) const { - JS_ASSERT(kind() == Function); + JS_ASSERT(isFunction()); return module.names_[nameIndex_].name(); } + AsmJSExit::BuiltinKind thunkTarget() const { + JS_ASSERT(isThunk()); + return AsmJSExit::BuiltinKind(u.thunk.target_); + } }; class FuncPtrTable @@ -586,6 +601,7 @@ class AsmJSModule Vector callSites_; Vector codeRanges_; Vector funcPtrTables_; + Vector builtinThunkOffsets_; Vector names_; Vector heapAccesses_; Vector functionCounts_; @@ -802,6 +818,12 @@ class AsmJSModule bool addInterruptCodeRange(uint32_t begin, uint32_t pret, uint32_t end) { return codeRanges_.append(CodeRange(CodeRange::Interrupt, begin, pret, end)); } + bool addBuiltinThunkCodeRange(AsmJSExit::BuiltinKind builtin, uint32_t begin, + uint32_t profilingReturn, uint32_t end) + { + return builtinThunkOffsets_.append(begin) && + codeRanges_.append(CodeRange(builtin, begin, profilingReturn, end)); + } bool addInlineCodeRange(uint32_t begin, uint32_t end) { return codeRanges_.append(CodeRange(CodeRange::Inline, begin, end)); } diff --git a/js/src/jit/shared/Assembler-shared.h b/js/src/jit/shared/Assembler-shared.h index 46453200440d..9055e8aff379 100644 --- a/js/src/jit/shared/Assembler-shared.h +++ b/js/src/jit/shared/Assembler-shared.h @@ -11,6 +11,7 @@ #include +#include "jit/AsmJSFrameIterator.h" #include "jit/IonAllocPolicy.h" #include "jit/Label.h" #include "jit/Registers.h" @@ -734,6 +735,26 @@ struct AsmJSGlobalAccess // patched after deserialization when the address of global things has changed. enum AsmJSImmKind { + AsmJSImm_ToInt32 = AsmJSExit::Builtin_ToInt32, +#if defined(JS_CODEGEN_ARM) + AsmJSImm_aeabi_idivmod = AsmJSExit::Builtin_IDivMod, + AsmJSImm_aeabi_uidivmod = AsmJSExit::Builtin_UDivMod, +#endif + AsmJSImm_ModD = AsmJSExit::Builtin_ModD, + AsmJSImm_SinD = AsmJSExit::Builtin_SinD, + AsmJSImm_CosD = AsmJSExit::Builtin_CosD, + AsmJSImm_TanD = AsmJSExit::Builtin_TanD, + AsmJSImm_ASinD = AsmJSExit::Builtin_ASinD, + AsmJSImm_ACosD = AsmJSExit::Builtin_ACosD, + AsmJSImm_ATanD = AsmJSExit::Builtin_ATanD, + AsmJSImm_CeilD = AsmJSExit::Builtin_CeilD, + AsmJSImm_CeilF = AsmJSExit::Builtin_CeilF, + AsmJSImm_FloorD = AsmJSExit::Builtin_FloorD, + AsmJSImm_FloorF = AsmJSExit::Builtin_FloorF, + AsmJSImm_ExpD = AsmJSExit::Builtin_ExpD, + AsmJSImm_LogD = AsmJSExit::Builtin_LogD, + AsmJSImm_PowD = AsmJSExit::Builtin_PowD, + AsmJSImm_ATan2D = AsmJSExit::Builtin_ATan2D, AsmJSImm_Runtime, AsmJSImm_RuntimeInterrupt, AsmJSImm_StackLimit, @@ -744,29 +765,15 @@ enum AsmJSImmKind AsmJSImm_InvokeFromAsmJS_ToNumber, AsmJSImm_CoerceInPlace_ToInt32, AsmJSImm_CoerceInPlace_ToNumber, - AsmJSImm_ToInt32, -#if defined(JS_CODEGEN_ARM) - AsmJSImm_aeabi_idivmod, - AsmJSImm_aeabi_uidivmod, -#endif - AsmJSImm_ModD, - AsmJSImm_SinD, - AsmJSImm_CosD, - AsmJSImm_TanD, - AsmJSImm_ASinD, - AsmJSImm_ACosD, - AsmJSImm_ATanD, - AsmJSImm_CeilD, - AsmJSImm_CeilF, - AsmJSImm_FloorD, - AsmJSImm_FloorF, - AsmJSImm_ExpD, - AsmJSImm_LogD, - AsmJSImm_PowD, - AsmJSImm_ATan2D, AsmJSImm_Limit }; +static inline AsmJSImmKind +BuiltinToImmKind(AsmJSExit::BuiltinKind builtin) +{ + return AsmJSImmKind(builtin); +} + // Pointer to be embedded as an immediate in asm.js code. class AsmJSImmPtr { diff --git a/js/src/shell/js.cpp b/js/src/shell/js.cpp index cd80fc84ccf0..969e476e5af0 100644 --- a/js/src/shell/js.cpp +++ b/js/src/shell/js.cpp @@ -4440,6 +4440,11 @@ SingleStepCallback(void *arg, jit::Simulator *sim, void *pc) stack.append('*'); break; } + case JS::ProfilingFrameIterator::CppFunction: { + const char *desc = i.nonFunctionDescription(); + stack.append(desc, strlen(desc)); + break; + } } } diff --git a/js/src/vm/Stack.cpp b/js/src/vm/Stack.cpp index 19aecac7e62e..af02fabd5075 100644 --- a/js/src/vm/Stack.cpp +++ b/js/src/vm/Stack.cpp @@ -1690,7 +1690,7 @@ AsmJSActivation::AsmJSActivation(JSContext *cx, AsmJSModule &module) profiler_(nullptr), resumePC_(nullptr), fp_(nullptr), - exitReason_(AsmJSNoExit) + exitReason_(AsmJSExit::None) { if (cx->runtime()->spsProfiler.enabled()) { // Use a profiler string that matches jsMatch regex in diff --git a/js/src/vm/Stack.h b/js/src/vm/Stack.h index d72551a594bf..1825c934ed31 100644 --- a/js/src/vm/Stack.h +++ b/js/src/vm/Stack.h @@ -1480,7 +1480,7 @@ class AsmJSActivation : public Activation SPSProfiler *profiler_; void *resumePC_; uint8_t *fp_; - uint32_t exitReason_; + AsmJSExit::Reason exitReason_; public: AsmJSActivation(JSContext *cx, AsmJSModule &module); @@ -1495,7 +1495,7 @@ class AsmJSActivation : public Activation uint8_t *fp() const { return fp_; } // Returns the reason why asm.js code called out of asm.js code. - AsmJSExitReason exitReason() const { return AsmJSExitReason(exitReason_); } + AsmJSExit::Reason exitReason() const { return exitReason_; } // Read by JIT code: static unsigned offsetOfContext() { return offsetof(AsmJSActivation, cx_); }