Correctly bound code blocks when using repatch buffers (bug 602333 part 1, r=sstangl).

This commit is contained in:
David Anderson 2010-11-19 15:40:16 -08:00
Родитель 0197b0d283
Коммит 430929e065
9 изменённых файлов: 206 добавлений и 178 удалений

Просмотреть файл

@ -49,15 +49,18 @@ class RepatchBuffer {
typedef MacroAssemblerCodePtr CodePtr;
public:
RepatchBuffer(void *start, size_t size, bool mprot = true)
: m_start(start), m_size(size), mprot(mprot)
RepatchBuffer(const MacroAssemblerCodeRef &ref)
{
ExecutableAllocator::makeWritable(m_start, m_size);
m_start = ref.m_code.executableAddress();
m_size = ref.m_size;
mprot = true;
if (mprot)
ExecutableAllocator::makeWritable(m_start, m_size);
}
RepatchBuffer(CodeBlock* codeBlock)
RepatchBuffer(const JITCode &code)
{
JITCode& code = codeBlock->getJITCode();
m_start = code.start();
m_size = code.size();
mprot = true;

Просмотреть файл

@ -48,10 +48,10 @@ class JITCode {
public:
JITCode(void* start, size_t size)
: m_start(start), m_size(size)
{
}
void* start() { return m_start; }
size_t size() { return m_size; }
{ }
JITCode() { }
void* start() const { return m_start; }
size_t size() const { return m_size; }
private:
void* m_start;
size_t m_size;

Просмотреть файл

@ -103,6 +103,8 @@ enum JSFrameFlags
JSFRAME_HAS_PREVPC = 0x400000 /* frame has prevpc_ set */
};
namespace js { namespace mjit { struct JITScript; } }
/*
* A stack frame is a part of a stack segment (see js::StackSegment) which is
* on the per-thread VM stack (see js::StackSpace).
@ -771,6 +773,12 @@ struct JSStackFrame
JS_STATIC_ASSERT(sizeof(JSStackFrame) % sizeof(js::Value) == 0);
}
#ifdef JS_METHODJIT
js::mjit::JITScript *jit() {
return script()->getJIT(isConstructing());
}
#endif
void methodjitStaticAsserts();
#ifdef DEBUG

Просмотреть файл

@ -44,6 +44,8 @@
#include "jstl.h"
#include "assembler/assembler/MacroAssembler.h"
#include "assembler/assembler/LinkBuffer.h"
#include "assembler/assembler/RepatchBuffer.h"
#include "assembler/jit/ExecutableAllocator.h"
namespace js {
namespace mjit {
@ -69,6 +71,7 @@ struct MacroAssemblerTypedefs {
typedef JSC::CodeLocationCall CodeLocationCall;
typedef JSC::ReturnAddressPtr ReturnAddressPtr;
typedef JSC::MacroAssemblerCodePtr MacroAssemblerCodePtr;
typedef JSC::JITCode JITCode;
};
class BaseCompiler : public MacroAssemblerTypedefs
@ -134,6 +137,20 @@ class LinkerHelper : public JSC::LinkBuffer
return;
link(jump.get(), label);
}
size_t size() const {
return m_size;
}
};
class Repatcher : public JSC::RepatchBuffer
{
public:
Repatcher(JITScript *jit) : JSC::RepatchBuffer(jit->code)
{ }
Repatcher(const JSC::JITCode &code) : JSC::RepatchBuffer(code)
{ }
};
} /* namespace js */

Просмотреть файл

@ -49,7 +49,6 @@
#include "jsbool.h"
#include "assembler/assembler/MacroAssemblerCodeRef.h"
#include "assembler/assembler/CodeLocation.h"
#include "assembler/assembler/RepatchBuffer.h"
#include "jsiter.h"
#include "jstypes.h"
#include "methodjit/StubCalls.h"
@ -57,6 +56,7 @@
#include "jspropertycache.h"
#include "methodjit/MonoIC.h"
#include "jsanalyze.h"
#include "methodjit/BaseCompiler.h"
#include "jsinterpinlines.h"
#include "jspropertycacheinlines.h"
@ -890,16 +890,14 @@ FinishExcessFrames(VMFrame &f, JSStackFrame *entryFrame)
#if JS_MONOIC
static void
UpdateTraceHintSingle(JSC::CodeLocationJump jump, JSC::CodeLocationLabel target)
UpdateTraceHintSingle(Repatcher &repatcher, JSC::CodeLocationJump jump, JSC::CodeLocationLabel target)
{
/*
* Hack: The value that will be patched is before the executable address,
* so to get protection right, just unprotect the general region around
* the jump.
*/
uint8 *addr = (uint8 *)(jump.executableAddress());
JSC::RepatchBuffer repatch(addr - 64, 128);
repatch.relink(jump, target);
repatcher.relink(jump, target);
JaegerSpew(JSpew_PICs, "relinking trace hint %p to %p\n",
jump.executableAddress(), target.executableAddress());
@ -908,10 +906,11 @@ UpdateTraceHintSingle(JSC::CodeLocationJump jump, JSC::CodeLocationLabel target)
static void
DisableTraceHint(VMFrame &f, ic::TraceICInfo &tic)
{
UpdateTraceHintSingle(tic.traceHint, tic.jumpTarget);
Repatcher repatcher(f.jit());
UpdateTraceHintSingle(repatcher, tic.traceHint, tic.jumpTarget);
if (tic.hasSlowTraceHint)
UpdateTraceHintSingle(tic.slowTraceHint, tic.jumpTarget);
UpdateTraceHintSingle(repatcher, tic.slowTraceHint, tic.jumpTarget);
}
static void
@ -924,10 +923,12 @@ EnableTraceHintAt(JSScript *script, js::mjit::JITScript *jit, jsbytecode *pc, ui
JaegerSpew(JSpew_PICs, "Enabling trace IC %u in script %p\n", index, script);
UpdateTraceHintSingle(tic.traceHint, tic.stubEntry);
Repatcher repatcher(jit);
UpdateTraceHintSingle(repatcher, tic.traceHint, tic.stubEntry);
if (tic.hasSlowTraceHint)
UpdateTraceHintSingle(tic.slowTraceHint, tic.stubEntry);
UpdateTraceHintSingle(repatcher, tic.slowTraceHint, tic.stubEntry);
}
#endif

Просмотреть файл

@ -55,6 +55,8 @@
namespace js {
namespace mjit { struct JITScript; }
struct VMFrame
{
union Arguments {
@ -136,6 +138,7 @@ struct VMFrame
JSRuntime *runtime() { return cx->runtime; }
JSStackFrame *&fp() { return regs.fp; }
mjit::JITScript *jit() { return fp()->jit(); }
};
#ifdef JS_CPU_ARM

Просмотреть файл

@ -43,7 +43,6 @@
#include "StubCalls.h"
#include "StubCalls-inl.h"
#include "assembler/assembler/LinkBuffer.h"
#include "assembler/assembler/RepatchBuffer.h"
#include "assembler/assembler/MacroAssembler.h"
#include "assembler/assembler/CodeLocation.h"
#include "CodeGenIncludes.h"
@ -72,7 +71,7 @@ typedef JSC::MacroAssembler::Call Call;
static void
PatchGetFallback(VMFrame &f, ic::MICInfo *ic)
{
JSC::RepatchBuffer repatch(ic->stubEntry.executableAddress(), 64);
Repatcher repatch(f.jit());
JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stubs::GetGlobalName));
repatch.relink(ic->stubCall, fptr);
}
@ -101,21 +100,20 @@ ic::GetGlobalName(VMFrame &f, ic::MICInfo *ic)
ic->u.name.touched = true;
/* Patch shape guard. */
JSC::RepatchBuffer repatch(ic->entry.executableAddress(), 50);
repatch.repatch(ic->shape, obj->shape());
Repatcher repatcher(f.jit());
repatcher.repatch(ic->shape, obj->shape());
/* Patch loads. */
slot *= sizeof(Value);
JSC::RepatchBuffer loads(ic->load.executableAddress(), 32, false);
#if defined JS_CPU_X86
loads.repatch(ic->load.dataLabel32AtOffset(MICInfo::GET_DATA_OFFSET), slot);
loads.repatch(ic->load.dataLabel32AtOffset(MICInfo::GET_TYPE_OFFSET), slot + 4);
repatcher.repatch(ic->load.dataLabel32AtOffset(MICInfo::GET_DATA_OFFSET), slot);
repatcher.repatch(ic->load.dataLabel32AtOffset(MICInfo::GET_TYPE_OFFSET), slot + 4);
#elif defined JS_CPU_ARM
// ic->load actually points to the LDR instruction which fetches the offset, but 'repatch'
// knows how to dereference it to find the integer value.
loads.repatch(ic->load.dataLabel32AtOffset(0), slot);
repatcher.repatch(ic->load.dataLabel32AtOffset(0), slot);
#elif defined JS_PUNBOX64
loads.repatch(ic->load.dataLabel32AtOffset(ic->patchValueOffset), slot);
repatcher.repatch(ic->load.dataLabel32AtOffset(ic->patchValueOffset), slot);
#endif
/* Do load anyway... this time. */
@ -151,7 +149,7 @@ PatchSetFallback(VMFrame &f, ic::MICInfo *ic)
{
JSScript *script = f.fp()->script();
JSC::RepatchBuffer repatch(ic->stubEntry.executableAddress(), 64);
Repatcher repatch(f.jit());
VoidStubMIC stub = ic->u.name.usePropertyCache
? STRICT_VARIANT(DisabledSetGlobal)
: STRICT_VARIANT(DisabledSetGlobalNoCache);
@ -188,28 +186,27 @@ ic::SetGlobalName(VMFrame &f, ic::MICInfo *ic)
ic->u.name.touched = true;
/* Patch shape guard. */
JSC::RepatchBuffer repatch(ic->entry.executableAddress(), 50);
repatch.repatch(ic->shape, obj->shape());
Repatcher repatcher(f.jit());
repatcher.repatch(ic->shape, obj->shape());
/* Patch loads. */
slot *= sizeof(Value);
JSC::RepatchBuffer stores(ic->load.executableAddress(), 32, false);
#if defined JS_CPU_X86
stores.repatch(ic->load.dataLabel32AtOffset(MICInfo::SET_TYPE_OFFSET), slot + 4);
repatcher.repatch(ic->load.dataLabel32AtOffset(MICInfo::SET_TYPE_OFFSET), slot + 4);
uint32 dataOffset;
if (ic->u.name.typeConst)
dataOffset = MICInfo::SET_DATA_CONST_TYPE_OFFSET;
else
dataOffset = MICInfo::SET_DATA_TYPE_OFFSET;
stores.repatch(ic->load.dataLabel32AtOffset(dataOffset), slot);
repatcher.repatch(ic->load.dataLabel32AtOffset(dataOffset), slot);
#elif defined JS_CPU_ARM
// ic->load actually points to the LDR instruction which fetches the offset, but 'repatch'
// knows how to dereference it to find the integer value.
stores.repatch(ic->load.dataLabel32AtOffset(0), slot);
repatcher.repatch(ic->load.dataLabel32AtOffset(0), slot);
#elif defined JS_PUNBOX64
stores.repatch(ic->load.dataLabel32AtOffset(ic->patchValueOffset), slot);
repatcher.repatch(ic->load.dataLabel32AtOffset(ic->patchValueOffset), slot);
#endif
if (ic->u.name.usePropertyCache)
@ -372,14 +369,14 @@ class EqualityCompiler : public BaseCompiler
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
Repatcher repatcher(f.jit());
/* Jump to the newly generated code instead of to the IC. */
JSC::RepatchBuffer jumpRepatcher(ic.jumpToStub.executableAddress(), INLINE_PATH_LENGTH);
jumpRepatcher.relink(ic.jumpToStub, cs);
repatcher.relink(ic.jumpToStub, cs);
/* Overwrite the call to the IC with a call to the stub. */
JSC::RepatchBuffer stubRepatcher(ic.stubCall.executableAddress(), INLINE_PATH_LENGTH);
JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, ic.stub));
stubRepatcher.relink(ic.stubCall, fptr);
repatcher.relink(ic.stubCall, fptr);
return true;
}
@ -492,7 +489,7 @@ class CallCompiler : public BaseCompiler
return ep;
}
bool generateFullCallStub(JSScript *script, uint32 flags)
bool generateFullCallStub(JITScript *from, JSScript *script, uint32 flags)
{
/*
* Create a stub that works with arity mismatches. Like the fast-path,
@ -562,20 +559,18 @@ class CallCompiler : public BaseCompiler
masm.size());
JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
uint8 *start = (uint8 *)oolJump.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
Repatcher repatch(from);
repatch.relink(oolJump, cs);
return true;
}
void patchInlinePath(JSScript *script, JSObject *obj)
void patchInlinePath(JITScript *from, JSScript *script, JSObject *obj)
{
JS_ASSERT(ic.frameSize.isStatic());
/* Very fast path. */
uint8 *start = (uint8 *)ic.funGuard.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
Repatcher repatch(from);
ic.fastGuardedObject = obj;
@ -585,10 +580,11 @@ class CallCompiler : public BaseCompiler
repatch.relink(ic.funGuard.jumpAtOffset(ic.hotJumpOffset),
JSC::CodeLocationLabel(jit->fastEntry));
JaegerSpew(JSpew_PICs, "patched CALL path %p (obj: %p)\n", start, ic.fastGuardedObject);
JaegerSpew(JSpew_PICs, "patched CALL path %p (obj: %p)\n",
ic.funGuard.executableAddress(), ic.fastGuardedObject);
}
bool generateStubForClosures(JSObject *obj)
bool generateStubForClosures(JITScript *from, JSObject *obj)
{
JS_ASSERT(ic.frameSize.isStatic());
@ -622,8 +618,7 @@ class CallCompiler : public BaseCompiler
JaegerSpew(JSpew_PICs, "generated CALL closure stub %p (%d bytes)\n",
cs.executableAddress(), masm.size());
uint8 *start = (uint8 *)ic.funJump.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
Repatcher repatch(from);
repatch.relink(ic.funJump, cs);
ic.hasJsFunCheck = true;
@ -633,6 +628,8 @@ class CallCompiler : public BaseCompiler
bool generateNativeStub()
{
JITScript *jit = f.jit();
/* Snapshot the frameDepth before SplatApplyArgs modifies it. */
uintN initialFrameDepth = f.regs.sp - f.regs.fp->slots();
@ -783,8 +780,7 @@ class CallCompiler : public BaseCompiler
JaegerSpew(JSpew_PICs, "generated native CALL stub %p (%d bytes)\n",
cs.executableAddress(), masm.size());
uint8 *start = (uint8 *)ic.funJump.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
Repatcher repatch(jit);
repatch.relink(ic.funJump, cs);
ic.fastGuardedNative = obj;
@ -794,6 +790,8 @@ class CallCompiler : public BaseCompiler
void *update()
{
JITScript *jit = f.jit();
stubs::UncachedCallResult ucr;
if (callingNew)
stubs::UncachedNewHelper(f, ic.frameSize.staticArgc(), &ucr);
@ -804,8 +802,7 @@ class CallCompiler : public BaseCompiler
// patch this site to go to a slow path always.
if (!ucr.codeAddr) {
JSC::CodeLocationCall oolCall = ic.slowPathStart.callAtOffset(ic.oolCallOffset);
uint8 *start = (uint8 *)oolCall.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
Repatcher repatch(jit);
JSC::FunctionPtr fptr = callingNew
? JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowNewFromIC))
: JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowCallFromIC));
@ -828,11 +825,11 @@ class CallCompiler : public BaseCompiler
}
if (!ic.frameSize.isStatic() || ic.frameSize.staticArgc() != fun->nargs) {
if (!generateFullCallStub(script, flags))
if (!generateFullCallStub(jit, script, flags))
THROWV(NULL);
} else {
if (!ic.fastGuardedObject) {
patchInlinePath(script, callee);
patchInlinePath(jit, script, callee);
} else if (!ic.hasJsFunCheck &&
!ic.fastGuardedNative &&
ic.fastGuardedObject->getFunctionPrivate() == fun) {
@ -840,10 +837,10 @@ class CallCompiler : public BaseCompiler
* Note: Multiple "function guard" stubs are not yet
* supported, thus the fastGuardedNative check.
*/
if (!generateStubForClosures(callee))
if (!generateStubForClosures(jit, callee))
THROWV(NULL);
} else {
if (!generateFullCallStub(script, flags))
if (!generateFullCallStub(jit, script, flags))
THROWV(NULL);
}
}
@ -1033,6 +1030,11 @@ ic::SplatApplyArgs(VMFrame &f)
void
JITScript::purgeMICs()
{
if (!nMICs)
return;
Repatcher repatch(this);
for (uint32 i = 0; i < nMICs; i++) {
ic::MICInfo &mic = mics[i];
switch (mic.kind) {
@ -1040,7 +1042,6 @@ JITScript::purgeMICs()
case ic::MICInfo::GET:
{
/* Patch shape guard. */
JSC::RepatchBuffer repatch(mic.entry.executableAddress(), 50);
repatch.repatch(mic.shape, int(JSObjectMap::INVALID_SHAPE));
/*
@ -1071,6 +1072,11 @@ ic::PurgeMICs(JSContext *cx, JSScript *script)
void
JITScript::sweepCallICs()
{
if (!nCallICs)
return;
Repatcher repatcher(this);
for (uint32 i = 0; i < nCallICs; i++) {
ic::CallICInfo &ic = callICs[i];
@ -1085,11 +1091,8 @@ JITScript::sweepCallICs()
if (!fastFunDead && !nativeDead)
continue;
uint8 *start = (uint8 *)ic.funGuard.executableAddress();
JSC::RepatchBuffer repatch(start - 32, 64);
if (fastFunDead) {
repatch.repatch(ic.funGuard, NULL);
repatcher.repatch(ic.funGuard, NULL);
ic.releasePool(CallICInfo::Pool_ClosureStub);
ic.hasJsFunCheck = false;
ic.fastGuardedObject = NULL;
@ -1100,7 +1103,7 @@ JITScript::sweepCallICs()
ic.fastGuardedNative = NULL;
}
repatch.relink(ic.funJump, ic.slowPathStart);
repatcher.relink(ic.funJump, ic.slowPathStart);
ic.hit = false;
}

Просмотреть файл

@ -42,7 +42,6 @@
#include "StubCalls-inl.h"
#include "BaseCompiler.h"
#include "assembler/assembler/LinkBuffer.h"
#include "assembler/assembler/RepatchBuffer.h"
#include "jsscope.h"
#include "jsnum.h"
#include "jsatominlines.h"
@ -59,7 +58,6 @@ using namespace js;
using namespace js::mjit;
using namespace js::mjit::ic;
typedef JSC::RepatchBuffer RepatchBuffer;
typedef JSC::FunctionPtr FunctionPtr;
/* Rough over-estimate of how much memory we need to unprotect. */
@ -137,22 +135,6 @@ class PICStubCompiler : public BaseCompiler
}
};
class PICRepatchBuffer : public JSC::RepatchBuffer
{
ic::BaseIC &ic;
JSC::CodeLocationLabel label;
public:
PICRepatchBuffer(ic::BaseIC &ic, JSC::CodeLocationLabel path)
: JSC::RepatchBuffer(path.executableAddress(), INLINE_PATH_LENGTH),
ic(ic), label(path)
{ }
void relink(int32 offset, JSC::CodeLocationLabel target) {
JSC::RepatchBuffer::relink(label.jumpAtOffset(offset), target);
}
};
class SetPropCompiler : public PICStubCompiler
{
JSObject *obj;
@ -226,9 +208,8 @@ class SetPropCompiler : public PICStubCompiler
obj(obj), atom(atom), lastStubSecondShapeGuard(pic.secondShapeGuard)
{ }
static void reset(ic::PICInfo &pic)
static void reset(Repatcher &repatcher, ic::PICInfo &pic)
{
RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
repatcher.repatchLEAToLoadPtr(pic.fastPathRejoin.instructionAtOffset(dslotsLoadOffset(pic)));
repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(
pic.shapeGuard + inlineShapeOffset(pic)),
@ -237,7 +218,6 @@ class SetPropCompiler : public PICStubCompiler
pic.shapeGuard + inlineShapeJump(pic)),
pic.slowPathStart);
RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::SetProp));
repatcher.relink(pic.slowPathCall, target);
}
@ -247,7 +227,7 @@ class SetPropCompiler : public PICStubCompiler
JS_ASSERT(!pic.inlinePathPatched);
JaegerSpew(JSpew_PICs, "patch setprop inline at %p\n", pic.fastPathStart.executableAddress());
PICRepatchBuffer repatcher(pic, pic.fastPathStart);
Repatcher repatcher(f.jit());
int32 offset;
if (inlineSlot) {
@ -284,8 +264,11 @@ class SetPropCompiler : public PICStubCompiler
return Lookup_Cacheable;
}
void patchPreviousToHere(PICRepatchBuffer &repatcher, CodeLocationLabel cs)
void patchPreviousToHere(CodeLocationLabel cs)
{
Repatcher repatcher(pic.lastCodeBlock(f.jit()));
CodeLocationLabel label = pic.lastPathStart();
// Patch either the inline fast path or a generated stub. The stub
// omits the prefix of the inline fast path that loads the shape, so
// the offsets are different.
@ -298,9 +281,9 @@ class SetPropCompiler : public PICStubCompiler
#endif
else
shapeGuardJumpOffset = pic.shapeGuard + inlineShapeJump();
repatcher.relink(shapeGuardJumpOffset, cs);
repatcher.relink(label.jumpAtOffset(shapeGuardJumpOffset), cs);
if (lastStubSecondShapeGuard)
repatcher.relink(lastStubSecondShapeGuard, cs);
repatcher.relink(label.jumpAtOffset(lastStubSecondShapeGuard), cs);
}
LookupStatus generateStub(uint32 initialShape, const Shape *shape, bool adding, bool inlineSlot)
@ -488,15 +471,13 @@ class SetPropCompiler : public PICStubCompiler
pic.stubsGenerated,
cs.executableAddress());
PICRepatchBuffer repatcher(pic, pic.lastPathStart());
// This function can patch either the inline fast path for a generated
// stub. The stub omits the prefix of the inline fast path that loads
// the shape, so the offsets are different.
patchPreviousToHere(repatcher, cs);
patchPreviousToHere(cs);
pic.stubsGenerated++;
pic.lastStubStart = buffer.locationOf(start);
pic.updateLastPath(buffer, start);
#if defined JS_PUNBOX64
pic.labels.setprop.stubShapeJump = masm.differenceBetween(start, stubShapeJumpLabel);
@ -793,9 +774,8 @@ class GetPropCompiler : public PICStubCompiler
lastStubSecondShapeGuard(pic.secondShapeGuard)
{ }
static void reset(ic::PICInfo &pic)
static void reset(Repatcher &repatcher, ic::PICInfo &pic)
{
RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
repatcher.repatchLEAToLoadPtr(pic.fastPathRejoin.instructionAtOffset(dslotsLoad(pic)));
repatcher.repatch(pic.fastPathStart.dataLabel32AtOffset(
pic.shapeGuard + inlineShapeOffset(pic)),
@ -808,8 +788,6 @@ class GetPropCompiler : public PICStubCompiler
pic.slowPathStart.labelAtOffset(pic.u.get.typeCheckOffset));
}
RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
VoidStubPIC stub;
switch (pic.kind) {
case ic::PICInfo::GET:
@ -855,8 +833,7 @@ class GetPropCompiler : public PICStubCompiler
JaegerSpew(JSpew_PICs, "generate args length stub at %p\n",
start.executableAddress());
PICRepatchBuffer repatcher(pic, pic.lastPathStart());
patchPreviousToHere(repatcher, start);
patchPreviousToHere(start);
disable("args length done");
@ -889,8 +866,7 @@ class GetPropCompiler : public PICStubCompiler
JaegerSpew(JSpew_PICs, "generate array length stub at %p\n",
start.executableAddress());
PICRepatchBuffer repatcher(pic, pic.lastPathStart());
patchPreviousToHere(repatcher, start);
patchPreviousToHere(start);
disable("array length done");
@ -958,7 +934,7 @@ class GetPropCompiler : public PICStubCompiler
/* Patch the type check to jump here. */
if (pic.hasTypeCheck()) {
RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
Repatcher repatcher(f.jit());
repatcher.relink(pic.fastPathStart.jumpAtOffset(GETPROP_INLINE_TYPE_GUARD), cs);
}
@ -993,7 +969,7 @@ class GetPropCompiler : public PICStubCompiler
start.executableAddress());
if (pic.hasTypeCheck()) {
RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
Repatcher repatcher(f.jit());
repatcher.relink(pic.fastPathStart.jumpAtOffset(GETPROP_INLINE_TYPE_GUARD), start);
}
@ -1005,7 +981,7 @@ class GetPropCompiler : public PICStubCompiler
LookupStatus patchInline(JSObject *holder, const Shape *shape)
{
spew("patch", "inline");
PICRepatchBuffer repatcher(pic, pic.fastPathStart);
Repatcher repatcher(f.jit());
int32 offset;
if (!holder->hasSlotsArray()) {
@ -1114,11 +1090,10 @@ class GetPropCompiler : public PICStubCompiler
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
PICRepatchBuffer repatcher(pic, pic.lastPathStart());
patchPreviousToHere(repatcher, cs);
patchPreviousToHere(cs);
pic.stubsGenerated++;
pic.lastStubStart = buffer.locationOf(start);
pic.updateLastPath(buffer, start);
#if defined JS_PUNBOX64
pic.labels.getprop.stubShapeJump = masm.differenceBetween(start, stubShapeJumpLabel);
@ -1133,8 +1108,11 @@ class GetPropCompiler : public PICStubCompiler
return Lookup_Cacheable;
}
void patchPreviousToHere(PICRepatchBuffer &repatcher, CodeLocationLabel cs)
void patchPreviousToHere(CodeLocationLabel cs)
{
Repatcher repatcher(pic.lastCodeBlock(f.jit()));
CodeLocationLabel label = pic.lastPathStart();
// Patch either the inline fast path or a generated stub. The stub
// omits the prefix of the inline fast path that loads the shape, so
// the offsets are different.
@ -1147,9 +1125,9 @@ class GetPropCompiler : public PICStubCompiler
#endif
else
shapeGuardJumpOffset = pic.shapeGuard + inlineShapeJump();
repatcher.relink(shapeGuardJumpOffset, cs);
repatcher.relink(label.jumpAtOffset(shapeGuardJumpOffset), cs);
if (lastStubSecondShapeGuard)
repatcher.relink(lastStubSecondShapeGuard, cs);
repatcher.relink(label.jumpAtOffset(lastStubSecondShapeGuard), cs);
}
LookupStatus update()
@ -1184,13 +1162,11 @@ class ScopeNameCompiler : public PICStubCompiler
getprop(f.cx, NULL, atom, *thisFromCtor())
{ }
static void reset(ic::PICInfo &pic)
static void reset(Repatcher &repatcher, ic::PICInfo &pic)
{
RepatchBuffer repatcher(pic.fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
repatcher.relink(pic.fastPathStart.jumpAtOffset(SCOPENAME_JUMP_OFFSET),
pic.slowPathStart);
RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
VoidStubPIC stub = (pic.kind == ic::PICInfo::NAME) ? ic::Name : ic::XName;
FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, stub));
repatcher.relink(pic.slowPathCall, target);
@ -1286,11 +1262,12 @@ class ScopeNameCompiler : public PICStubCompiler
JaegerSpew(JSpew_PICs, "generated %s global stub at %p\n", type, cs.executableAddress());
spew("NAME stub", "global");
PICRepatchBuffer repatcher(pic, pic.lastPathStart());
repatcher.relink(SCOPENAME_JUMP_OFFSET, cs);
Repatcher repatcher(pic.lastCodeBlock(f.jit()));
CodeLocationLabel label = pic.lastPathStart();
repatcher.relink(label.jumpAtOffset(SCOPENAME_JUMP_OFFSET), cs);
pic.stubsGenerated++;
pic.lastStubStart = buffer.locationOf(failLabel);
pic.updateLastPath(buffer, failLabel);
if (pic.stubsGenerated == MAX_PIC_STUBS)
disable("max stubs reached");
@ -1388,11 +1365,12 @@ class ScopeNameCompiler : public PICStubCompiler
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated %s call stub at %p\n", type, cs.executableAddress());
PICRepatchBuffer repatcher(pic, pic.lastPathStart());
repatcher.relink(SCOPENAME_JUMP_OFFSET, cs);
Repatcher repatcher(pic.lastCodeBlock(f.jit()));
CodeLocationLabel label = pic.lastPathStart();
repatcher.relink(label.jumpAtOffset(SCOPENAME_JUMP_OFFSET), cs);
pic.stubsGenerated++;
pic.lastStubStart = buffer.locationOf(failLabel);
pic.updateLastPath(buffer, failLabel);
if (pic.stubsGenerated == MAX_PIC_STUBS)
disable("max stubs reached");
@ -1499,14 +1477,14 @@ class BindNameCompiler : public PICStubCompiler
scopeChain(scopeChain), atom(atom)
{ }
static void reset(ic::PICInfo &pic)
static void reset(Repatcher &repatcher, ic::PICInfo &pic)
{
PICRepatchBuffer repatcher(pic, pic.fastPathStart);
repatcher.relink(pic.shapeGuard + inlineJumpOffset(pic), pic.slowPathStart);
int jumpOffset = pic.shapeGuard + inlineJumpOffset(pic);
JSC::CodeLocationJump jump = pic.fastPathStart.jumpAtOffset(jumpOffset);
repatcher.relink(jump, pic.slowPathStart);
RepatchBuffer repatcher2(pic.slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
FunctionPtr target(JS_FUNC_TO_DATA_PTR(void *, ic::BindName));
repatcher2.relink(pic.slowPathCall, target);
repatcher.relink(pic.slowPathCall, target);
}
LookupStatus generateStub(JSObject *obj)
@ -1559,14 +1537,15 @@ class BindNameCompiler : public PICStubCompiler
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated %s stub at %p\n", type, cs.executableAddress());
PICRepatchBuffer repatcher(pic, pic.lastPathStart());
Repatcher repatcher(pic.lastCodeBlock(f.jit()));
CodeLocationLabel label = pic.lastPathStart();
if (!pic.stubsGenerated)
repatcher.relink(pic.shapeGuard + inlineJumpOffset(), cs);
repatcher.relink(label.jumpAtOffset(pic.shapeGuard + inlineJumpOffset()), cs);
else
repatcher.relink(BINDNAME_STUB_JUMP_OFFSET, cs);
repatcher.relink(label.jumpAtOffset(BINDNAME_STUB_JUMP_OFFSET), cs);
pic.stubsGenerated++;
pic.lastStubStart = buffer.locationOf(failLabel);
pic.updateLastPath(buffer, failLabel);
if (pic.stubsGenerated == MAX_PIC_STUBS)
disable("max stubs reached");
@ -1935,7 +1914,7 @@ LookupStatus
BaseIC::disable(JSContext *cx, const char *reason, void *stub)
{
spew(cx, "disabled", reason);
RepatchBuffer repatcher(slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
Repatcher repatcher(cx->fp()->jit());
repatcher.relink(slowPathCall, FunctionPtr(stub));
return Lookup_Uncacheable;
}
@ -1995,20 +1974,15 @@ GetElementIC::error(JSContext *cx)
}
void
GetElementIC::purge()
GetElementIC::purge(Repatcher &repatcher)
{
if (inlineTypeGuardPatched || inlineClaspGuardPatched) {
RepatchBuffer repatcher(fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
// Repatch the inline jumps.
if (inlineTypeGuardPatched)
repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), slowPathStart);
if (inlineClaspGuardPatched)
repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart);
}
// Repatch the inline jumps.
if (inlineTypeGuardPatched)
repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), slowPathStart);
if (inlineClaspGuardPatched)
repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart);
if (slowCallPatched) {
RepatchBuffer repatcher(slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
if (op == JSOP_GETELEM)
repatcher.relink(slowPathCall, FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, ic::GetElement)));
else if (op == JSOP_CALLELEM)
@ -2114,7 +2088,7 @@ GetElementIC::attachGetProp(JSContext *cx, JSObject *obj, const Value &v, jsid i
// Update the inline guards, if needed.
if (shouldPatchInlineTypeGuard() || shouldPatchUnconditionalClaspGuard()) {
PICRepatchBuffer repatcher(*this, fastPathStart);
Repatcher repatcher(cx->fp()->jit());
if (shouldPatchInlineTypeGuard()) {
// A type guard is present in the inline path, and this is the
@ -2122,7 +2096,7 @@ GetElementIC::attachGetProp(JSContext *cx, JSObject *obj, const Value &v, jsid i
JS_ASSERT(!inlineTypeGuardPatched);
JS_ASSERT(atomTypeGuard.isSet());
repatcher.relink(inlineTypeGuard, cs);
repatcher.relink(fastPathStart.jumpAtOffset(inlineTypeGuard), cs);
inlineTypeGuardPatched = true;
}
@ -2133,24 +2107,25 @@ GetElementIC::attachGetProp(JSContext *cx, JSObject *obj, const Value &v, jsid i
// because it follows an integer-id guard.
JS_ASSERT(!hasInlineTypeGuard());
repatcher.relink(inlineClaspGuard, cs);
repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), cs);
inlineClaspGuardPatched = true;
}
}
// If there were previous stub guards, patch them now.
if (hasLastStringStub) {
PICRepatchBuffer repatcher(*this, lastStringStub);
Repatcher repatcher(lastStringStub);
CodeLocationLabel stub(lastStringStub.start());
if (atomGuard)
repatcher.relink(atomGuard, cs);
repatcher.relink(firstShapeGuard, cs);
repatcher.relink(stub.jumpAtOffset(atomGuard), cs);
repatcher.relink(stub.jumpAtOffset(firstShapeGuard), cs);
if (secondShapeGuard)
repatcher.relink(secondShapeGuard, cs);
repatcher.relink(stub.jumpAtOffset(secondShapeGuard), cs);
}
// Update state.
hasLastStringStub = true;
lastStringStub = cs;
lastStringStub = JITCode(cs.executableAddress(), buffer.size());
if (atomIdGuard.isSet()) {
atomGuard = buffer.locationOf(atomIdGuard.get()) - cs;
JS_ASSERT(atomGuard == buffer.locationOf(atomIdGuard.get()) - cs);
@ -2313,20 +2288,15 @@ SetElementIC::error(JSContext *cx)
}
void
SetElementIC::purge()
SetElementIC::purge(Repatcher &repatcher)
{
if (inlineClaspGuardPatched || inlineHoleGuardPatched) {
RepatchBuffer repatcher(fastPathStart.executableAddress(), INLINE_PATH_LENGTH);
// Repatch the inline jumps.
if (inlineClaspGuardPatched)
repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart);
if (inlineHoleGuardPatched)
repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), slowPathStart);
}
// Repatch the inline jumps.
if (inlineClaspGuardPatched)
repatcher.relink(fastPathStart.jumpAtOffset(inlineClaspGuard), slowPathStart);
if (inlineHoleGuardPatched)
repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), slowPathStart);
if (slowCallPatched) {
RepatchBuffer repatcher(slowPathStart.executableAddress(), INLINE_PATH_LENGTH);
void *stub = JS_FUNC_TO_DATA_PTR(void *, APPLY_STRICTNESS(ic::SetElement, strictMode));
repatcher.relink(slowPathCall, FunctionPtr(stub));
}
@ -2423,8 +2393,8 @@ SetElementIC::attachHoleStub(JSContext *cx, JSObject *obj, int32 keyval)
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated dense array hole stub at %p\n", cs.executableAddress());
PICRepatchBuffer repatcher(*this, fastPathStart);
repatcher.relink(inlineHoleGuard, cs);
Repatcher repatcher(cx->fp()->jit());
repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), cs);
inlineHoleGuardPatched = true;
disable(cx, "generated dense array hole stub");
@ -2470,23 +2440,28 @@ template void JS_FASTCALL ic::SetElement<false>(VMFrame &f, SetElementIC *ic);
void
JITScript::purgePICs()
{
if (!nPICs && !nGetElems && !nSetElems)
return;
Repatcher repatcher(this);
for (uint32 i = 0; i < nPICs; i++) {
ic::PICInfo &pic = pics[i];
switch (pic.kind) {
case ic::PICInfo::SET:
case ic::PICInfo::SETMETHOD:
SetPropCompiler::reset(pic);
SetPropCompiler::reset(repatcher, pic);
break;
case ic::PICInfo::NAME:
case ic::PICInfo::XNAME:
ScopeNameCompiler::reset(pic);
ScopeNameCompiler::reset(repatcher, pic);
break;
case ic::PICInfo::BIND:
BindNameCompiler::reset(pic);
BindNameCompiler::reset(repatcher, pic);
break;
case ic::PICInfo::CALL: /* fall-through */
case ic::PICInfo::GET:
GetPropCompiler::reset(pic);
GetPropCompiler::reset(repatcher, pic);
break;
default:
JS_NOT_REACHED("Unhandled PIC kind");
@ -2496,9 +2471,9 @@ JITScript::purgePICs()
}
for (uint32 i = 0; i < nGetElems; i++)
getElems[i].purge();
getElems[i].purge(repatcher);
for (uint32 i = 0; i < nSetElems; i++)
setElems[i].purge();
setElems[i].purge(repatcher);
}
void

Просмотреть файл

@ -49,6 +49,7 @@
#include "BaseAssembler.h"
#include "RematInfo.h"
#include "BaseCompiler.h"
#include "assembler/moco/MocoStubs.h"
namespace js {
namespace mjit {
@ -191,15 +192,6 @@ struct BaseIC : public MacroAssemblerTypedefs {
// Slow path stub call.
CodeLocationCall slowPathCall;
// Address of the start of the last generated stub, if any.
CodeLocationLabel lastStubStart;
// Return the start address of the last path in this PIC, which is the
// inline path if no stubs have been generated yet.
CodeLocationLabel lastPathStart() {
return stubsGenerated > 0 ? lastStubStart : fastPathStart;
}
// Whether or not the callsite has been hit at least once.
bool hit : 1;
bool slowCallPatched : 1;
@ -306,7 +298,7 @@ struct GetElementIC : public BasePolyIC {
int secondShapeGuard : 8; // optional, non-zero if present
bool hasLastStringStub : 1;
CodeLocationLabel lastStringStub;
JITCode lastStringStub;
// A limited ValueRemat instance. It may contains either:
// 1) A constant, or
@ -334,7 +326,7 @@ struct GetElementIC : public BasePolyIC {
typeRegHasBaseShape = false;
hasLastStringStub = false;
}
void purge();
void purge(Repatcher &repatcher);
LookupStatus update(JSContext *cx, JSObject *obj, const Value &v, jsid id, Value *vp);
LookupStatus attachGetProp(JSContext *cx, JSObject *obj, const Value &v, jsid id,
Value *vp);
@ -396,7 +388,7 @@ struct SetElementIC : public BaseIC {
inlineClaspGuardPatched = false;
inlineHoleGuardPatched = false;
}
void purge();
void purge(Repatcher &repatcher);
LookupStatus attachHoleStub(JSContext *cx, JSObject *obj, int32 key);
LookupStatus update(JSContext *cx, const Value &objval, const Value &idval);
LookupStatus disable(JSContext *cx, const char *reason);
@ -432,6 +424,32 @@ struct PICInfo : public BasePolyIC {
ValueRemat vr;
} u;
// Address of the start of the last generated stub, if any. Note that this
// does not correctly overlay with the allocated memory; it does however
// overlay the portion that may need to be patched, which is good enough.
JITCode lastStubStart;
// Return the start address of the last path in this PIC, which is the
// inline path if no stubs have been generated yet.
CodeLocationLabel lastPathStart() {
if (!stubsGenerated)
return fastPathStart;
return CodeLocationLabel(lastStubStart.start());
}
// Return a JITCode block corresponding to the code memory to attach a
// new stub to.
JITCode lastCodeBlock(JITScript *jit) {
if (!stubsGenerated)
return JITCode(jit->code.m_code.executableAddress(), jit->code.m_size);
return lastStubStart;
}
void updateLastPath(LinkerHelper &linker, Label label) {
CodeLocationLabel loc = linker.locationOf(label);
lastStubStart = JITCode(loc.executableAddress(), linker.size());
}
Kind kind : 3;
// True if register R holds the base object shape along exits from the