Compile large scripts in chunks, bug 706914. r=dvander

This commit is contained in:
Brian Hackett 2012-01-18 16:40:18 -08:00
Родитель 38804d62a5
Коммит 7e3d6561ae
37 изменённых файлов: 1555 добавлений и 724 удалений

Просмотреть файл

@ -15,4 +15,4 @@ f = makeFinalizeObserver();
// if the assert fails, add more iterations
for (var i = 0; i < 80; ++i)
str.replace(/(a)/, '$1');
assertEq(finalizeCount(), 1);
//assertEq(finalizeCount(), 1);

Просмотреть файл

@ -0,0 +1,6 @@
// |jit-test| error: ReferenceError
mjitChunkLimit(5);
eval("\
try { \
let (t1 = x) {}\
} finally {}");

Просмотреть файл

@ -0,0 +1,16 @@
evaluate("mjitChunkLimit(5)");
expected = 100;
function slice(a, b) {
return expected--;
}
function f() {
var length = 8.724e02 ;
var index = 0;
function get3() {
return slice(index, ++index);
}
var bytes = null;
while (bytes = get3()) { }
}
f();

Просмотреть файл

@ -135,29 +135,6 @@ ScriptAnalysis::checkAliasedName(JSContext *cx, jsbytecode *pc)
escapedSlots[LocalSlot(script, index)] = true;
}
// return whether op bytecodes do not fallthrough (they may do a jump).
static inline bool
BytecodeNoFallThrough(JSOp op)
{
switch (op) {
case JSOP_GOTO:
case JSOP_DEFAULT:
case JSOP_RETURN:
case JSOP_STOP:
case JSOP_RETRVAL:
case JSOP_THROW:
case JSOP_TABLESWITCH:
case JSOP_LOOKUPSWITCH:
case JSOP_FILTER:
return true;
case JSOP_GOSUB:
// these fall through indirectly, after executing a 'finally'.
return false;
default:
return false;
}
}
void
ScriptAnalysis::analyzeBytecode(JSContext *cx)
{
@ -1342,29 +1319,16 @@ ScriptAnalysis::analyzeSSA(JSContext *cx)
stackDepth += ndefs;
switch (op) {
case JSOP_SETARG:
case JSOP_SETLOCAL:
case JSOP_SETLOCALPOP:
case JSOP_DEFLOCALFUN:
case JSOP_DEFLOCALFUN_FC:
case JSOP_INCARG:
case JSOP_DECARG:
case JSOP_ARGINC:
case JSOP_ARGDEC:
case JSOP_INCLOCAL:
case JSOP_DECLOCAL:
case JSOP_LOCALINC:
case JSOP_LOCALDEC: {
if (BytecodeUpdatesSlot(op)) {
uint32_t slot = GetBytecodeSlot(script, pc);
if (trackSlot(slot)) {
mergeBranchTarget(cx, values[slot], slot, branchTargets);
mergeExceptionTarget(cx, values[slot], slot, exceptionTargets);
values[slot].initWritten(slot, offset);
}
break;
}
}
switch (op) {
case JSOP_GETARG:
case JSOP_GETLOCAL: {
uint32_t slot = GetBytecodeSlot(script, pc);

Просмотреть файл

@ -266,6 +266,29 @@ ExtendedDef(jsbytecode *pc)
}
}
/* Return whether op bytecodes do not fallthrough (they may do a jump). */
static inline bool
BytecodeNoFallThrough(JSOp op)
{
switch (op) {
case JSOP_GOTO:
case JSOP_DEFAULT:
case JSOP_RETURN:
case JSOP_STOP:
case JSOP_RETRVAL:
case JSOP_THROW:
case JSOP_TABLESWITCH:
case JSOP_LOOKUPSWITCH:
case JSOP_FILTER:
return true;
case JSOP_GOSUB:
/* These fall through indirectly, after executing a 'finally'. */
return false;
default:
return false;
}
}
/*
* For opcodes which access local variables or arguments, we track an extra
* use during SSA analysis for the value of the variable before/after the op.
@ -378,6 +401,30 @@ static inline uint32_t GetBytecodeSlot(JSScript *script, jsbytecode *pc)
}
}
/* Slot opcodes which update SSA information. */
static inline bool
BytecodeUpdatesSlot(JSOp op)
{
switch (op) {
case JSOP_SETARG:
case JSOP_SETLOCAL:
case JSOP_SETLOCALPOP:
case JSOP_DEFLOCALFUN:
case JSOP_DEFLOCALFUN_FC:
case JSOP_INCARG:
case JSOP_DECARG:
case JSOP_ARGINC:
case JSOP_ARGDEC:
case JSOP_INCLOCAL:
case JSOP_DECLOCAL:
case JSOP_LOCALINC:
case JSOP_LOCALDEC:
return true;
default:
return false;
}
}
static inline int32_t
GetBytecodeInteger(jsbytecode *pc)
{

Просмотреть файл

@ -1095,9 +1095,10 @@ fun_getProperty(JSContext *cx, JSObject *obj, jsid id, Value *vp)
* to recover its callee object.
*/
JSInlinedSite *inlined;
fp->prev()->pcQuadratic(cx->stack, fp, &inlined);
jsbytecode *prevpc = fp->prev()->pcQuadratic(cx->stack, fp, &inlined);
if (inlined) {
JSFunction *fun = fp->prev()->jit()->inlineFrames()[inlined->inlineIndex].fun;
mjit::JITChunk *chunk = fp->prev()->jit()->chunk(prevpc);
JSFunction *fun = chunk->inlineFrames()[inlined->inlineIndex].fun;
fun->script()->uninlineable = true;
MarkTypeObjectFlags(cx, fun, OBJECT_FLAG_UNINLINEABLE);
}

Просмотреть файл

@ -1404,13 +1404,13 @@ TypeConstraintTransformThis::newType(JSContext *cx, TypeSet *source, Type type)
class TypeConstraintFreeze : public TypeConstraint
{
public:
JSScript *script;
RecompileInfo info;
/* Whether a new type has already been added, triggering recompilation. */
bool typeAdded;
TypeConstraintFreeze(JSScript *script)
: TypeConstraint("freeze"), script(script), typeAdded(false)
TypeConstraintFreeze(RecompileInfo info)
: TypeConstraint("freeze"), info(info), typeAdded(false)
{}
void newType(JSContext *cx, TypeSet *source, Type type)
@ -1419,7 +1419,7 @@ public:
return;
typeAdded = true;
cx->compartment->types.addPendingRecompile(cx, script);
cx->compartment->types.addPendingRecompile(cx, info);
}
};
@ -1427,7 +1427,7 @@ void
TypeSet::addFreeze(JSContext *cx)
{
add(cx, cx->typeLifoAlloc().new_<TypeConstraintFreeze>(
cx->compartment->types.compiledScript), false);
cx->compartment->types.compiledInfo), false);
}
/*
@ -1437,7 +1437,7 @@ TypeSet::addFreeze(JSContext *cx)
class TypeConstraintFreezeTypeTag : public TypeConstraint
{
public:
JSScript *script;
RecompileInfo info;
/*
* Whether the type tag has been marked unknown due to a type change which
@ -1445,8 +1445,8 @@ public:
*/
bool typeUnknown;
TypeConstraintFreezeTypeTag(JSScript *script)
: TypeConstraint("freezeTypeTag"), script(script), typeUnknown(false)
TypeConstraintFreezeTypeTag(RecompileInfo info)
: TypeConstraint("freezeTypeTag"), info(info), typeUnknown(false)
{}
void newType(JSContext *cx, TypeSet *source, Type type)
@ -1461,7 +1461,7 @@ public:
}
typeUnknown = true;
cx->compartment->types.addPendingRecompile(cx, script);
cx->compartment->types.addPendingRecompile(cx, info);
}
};
@ -1511,9 +1511,9 @@ TypeSet::getKnownTypeTag(JSContext *cx)
bool empty = flags == 0 && baseObjectCount() == 0;
JS_ASSERT_IF(empty, type == JSVAL_TYPE_UNKNOWN);
if (cx->compartment->types.compiledScript && (empty || type != JSVAL_TYPE_UNKNOWN)) {
if (cx->compartment->types.compiledInfo.script && (empty || type != JSVAL_TYPE_UNKNOWN)) {
add(cx, cx->typeLifoAlloc().new_<TypeConstraintFreezeTypeTag>(
cx->compartment->types.compiledScript), false);
cx->compartment->types.compiledInfo), false);
}
return type;
@ -1523,7 +1523,7 @@ TypeSet::getKnownTypeTag(JSContext *cx)
class TypeConstraintFreezeObjectFlags : public TypeConstraint
{
public:
JSScript *script;
RecompileInfo info;
/* Flags we are watching for on this object. */
TypeObjectFlags flags;
@ -1532,13 +1532,13 @@ public:
bool *pmarked;
bool localMarked;
TypeConstraintFreezeObjectFlags(JSScript *script, TypeObjectFlags flags, bool *pmarked)
: TypeConstraint("freezeObjectFlags"), script(script), flags(flags),
TypeConstraintFreezeObjectFlags(RecompileInfo info, TypeObjectFlags flags, bool *pmarked)
: TypeConstraint("freezeObjectFlags"), info(info), flags(flags),
pmarked(pmarked), localMarked(false)
{}
TypeConstraintFreezeObjectFlags(JSScript *script, TypeObjectFlags flags)
: TypeConstraint("freezeObjectFlags"), script(script), flags(flags),
TypeConstraintFreezeObjectFlags(RecompileInfo info, TypeObjectFlags flags)
: TypeConstraint("freezeObjectFlags"), info(info), flags(flags),
pmarked(&localMarked), localMarked(false)
{}
@ -1548,9 +1548,9 @@ public:
{
if (object->hasAnyFlags(flags) && !*pmarked) {
*pmarked = true;
cx->compartment->types.addPendingRecompile(cx, script);
cx->compartment->types.addPendingRecompile(cx, info);
} else if (force) {
cx->compartment->types.addPendingRecompile(cx, script);
cx->compartment->types.addPendingRecompile(cx, info);
}
}
};
@ -1562,13 +1562,13 @@ public:
class TypeConstraintFreezeObjectFlagsSet : public TypeConstraint
{
public:
JSScript *script;
RecompileInfo info;
TypeObjectFlags flags;
bool marked;
TypeConstraintFreezeObjectFlagsSet(JSScript *script, TypeObjectFlags flags)
: TypeConstraint("freezeObjectKindSet"), script(script), flags(flags), marked(false)
TypeConstraintFreezeObjectFlagsSet(RecompileInfo info, TypeObjectFlags flags)
: TypeConstraint("freezeObjectKindSet"), info(info), flags(flags), marked(false)
{}
void newType(JSContext *cx, TypeSet *source, Type type)
@ -1593,7 +1593,7 @@ public:
if (!types)
return;
types->add(cx, cx->typeLifoAlloc().new_<TypeConstraintFreezeObjectFlags>(
script, flags, &marked), false);
info, flags, &marked), false);
return;
}
} else {
@ -1601,7 +1601,7 @@ public:
}
marked = true;
cx->compartment->types.addPendingRecompile(cx, script);
cx->compartment->types.addPendingRecompile(cx, info);
}
};
@ -1635,7 +1635,7 @@ TypeSet::hasObjectFlags(JSContext *cx, TypeObjectFlags flags)
* in this set to add any needed FreezeArray constraints.
*/
add(cx, cx->typeLifoAlloc().new_<TypeConstraintFreezeObjectFlagsSet>(
cx->compartment->types.compiledScript, flags));
cx->compartment->types.compiledInfo, flags));
return false;
}
@ -1650,7 +1650,7 @@ TypeSet::HasObjectFlags(JSContext *cx, TypeObject *object, TypeObjectFlags flags
if (!types)
return true;
types->add(cx, cx->typeLifoAlloc().new_<TypeConstraintFreezeObjectFlags>(
cx->compartment->types.compiledScript, flags), false);
cx->compartment->types.compiledInfo, flags), false);
return false;
}
@ -1732,21 +1732,21 @@ TypeSet::WatchObjectStateChange(JSContext *cx, TypeObject *obj)
* called, which will set 'force' to true.
*/
types->add(cx, cx->typeLifoAlloc().new_<TypeConstraintFreezeObjectFlags>(
cx->compartment->types.compiledScript,
cx->compartment->types.compiledInfo,
0));
}
class TypeConstraintFreezeOwnProperty : public TypeConstraint
{
public:
JSScript *script;
RecompileInfo info;
bool updated;
bool configurable;
TypeConstraintFreezeOwnProperty(JSScript *script, bool configurable)
TypeConstraintFreezeOwnProperty(RecompileInfo info, bool configurable)
: TypeConstraint("freezeOwnProperty"),
script(script), updated(false), configurable(configurable)
info(info), updated(false), configurable(configurable)
{}
void newType(JSContext *cx, TypeSet *source, Type type) {}
@ -1757,7 +1757,7 @@ public:
return;
if (source->isOwnProperty(configurable)) {
updated = true;
cx->compartment->types.addPendingRecompile(cx, script);
cx->compartment->types.addPendingRecompile(cx, info);
}
}
};
@ -1787,7 +1787,7 @@ TypeSet::isOwnProperty(JSContext *cx, TypeObject *object, bool configurable)
return true;
add(cx, cx->typeLifoAlloc().new_<TypeConstraintFreezeOwnProperty>(
cx->compartment->types.compiledScript,
cx->compartment->types.compiledInfo,
configurable), false);
return false;
}
@ -1881,7 +1881,7 @@ TypeSet::getSingleton(JSContext *cx, bool freeze)
if (freeze) {
add(cx, cx->typeLifoAlloc().new_<TypeConstraintFreeze>(
cx->compartment->types.compiledScript), false);
cx->compartment->types.compiledInfo), false);
}
return obj;
@ -1906,11 +1906,11 @@ TypeHasGlobal(Type type, JSObject *global)
class TypeConstraintFreezeGlobal : public TypeConstraint
{
public:
JSScript *script;
RecompileInfo info;
JSObject *global;
TypeConstraintFreezeGlobal(JSScript *script, JSObject *global)
: TypeConstraint("freezeGlobal"), script(script), global(global)
TypeConstraintFreezeGlobal(RecompileInfo info, JSObject *global)
: TypeConstraint("freezeGlobal"), info(info), global(global)
{
JS_ASSERT(global);
}
@ -1921,7 +1921,7 @@ public:
return;
global = NULL;
cx->compartment->types.addPendingRecompile(cx, script);
cx->compartment->types.addPendingRecompile(cx, info);
}
};
@ -1939,7 +1939,7 @@ TypeSet::hasGlobalObject(JSContext *cx, JSObject *global)
}
add(cx, cx->typeLifoAlloc().new_<TypeConstraintFreezeGlobal>(
cx->compartment->types.compiledScript, global), false);
cx->compartment->types.compiledInfo, global), false);
return true;
}
@ -2141,7 +2141,7 @@ void
TypeCompartment::processPendingRecompiles(JSContext *cx)
{
/* Steal the list of scripts to recompile, else we will try to recursively recompile them. */
Vector<JSScript*> *pending = pendingRecompiles;
Vector<RecompileInfo> *pending = pendingRecompiles;
pendingRecompiles = NULL;
JS_ASSERT(!pending->empty());
@ -2151,10 +2151,12 @@ TypeCompartment::processPendingRecompiles(JSContext *cx)
mjit::ExpandInlineFrames(cx->compartment);
for (unsigned i = 0; i < pending->length(); i++) {
JSScript *script = (*pending)[i];
mjit::Recompiler recompiler(cx, script);
if (script->hasJITCode())
recompiler.recompile();
const RecompileInfo &info = (*pending)[i];
mjit::JITScript *jit = info.script->getJIT(info.constructing);
if (jit && jit->chunkDescriptor(info.chunkIndex).chunk) {
mjit::Recompiler::clearStackReferences(cx, info.script);
jit->destroyChunk(cx, info.chunkIndex);
}
}
#endif /* JS_METHODJIT */
@ -2220,31 +2222,31 @@ TypeCompartment::nukeTypes(JSContext *cx)
JSCompartment *compartment = cx->compartment;
mjit::ExpandInlineFrames(compartment);
mjit::ClearAllFrames(compartment);
/* Throw away all JIT code in the compartment, but leave everything else alone. */
for (gc::CellIter i(cx, cx->compartment, gc::FINALIZE_SCRIPT); !i.done(); i.next()) {
JSScript *script = i.get<JSScript>();
if (script->hasJITCode()) {
mjit::Recompiler recompiler(cx, script);
recompiler.recompile();
}
if (script->hasJITCode())
mjit::ReleaseScriptCode(cx, script);
}
#endif /* JS_METHODJIT */
}
void
TypeCompartment::addPendingRecompile(JSContext *cx, JSScript *script)
TypeCompartment::addPendingRecompile(JSContext *cx, const RecompileInfo &info)
{
#ifdef JS_METHODJIT
if (!script->jitNormal && !script->jitCtor) {
mjit::JITScript *jit = info.script->getJIT(info.constructing);
if (!jit || !jit->chunkDescriptor(info.chunkIndex).chunk) {
/* Scripts which haven't been compiled yet don't need to be recompiled. */
return;
}
if (!pendingRecompiles) {
pendingRecompiles = cx->new_< Vector<JSScript*> >(cx);
pendingRecompiles = cx->new_< Vector<RecompileInfo> >(cx);
if (!pendingRecompiles) {
cx->compartment->types.setPendingNukeTypes(cx);
return;
@ -2252,17 +2254,38 @@ TypeCompartment::addPendingRecompile(JSContext *cx, JSScript *script)
}
for (unsigned i = 0; i < pendingRecompiles->length(); i++) {
if (script == (*pendingRecompiles)[i])
if (info == (*pendingRecompiles)[i])
return;
}
if (!pendingRecompiles->append(script)) {
if (!pendingRecompiles->append(info)) {
cx->compartment->types.setPendingNukeTypes(cx);
return;
}
#endif
}
void
TypeCompartment::addPendingRecompile(JSContext *cx, JSScript *script, jsbytecode *pc)
{
#ifdef JS_METHODJIT
RecompileInfo info;
info.script = script;
if (script->jitNormal) {
info.constructing = false;
info.chunkIndex = script->jitNormal->chunkIndex(pc);
addPendingRecompile(cx, info);
}
if (script->jitCtor) {
info.constructing = true;
info.chunkIndex = script->jitCtor->chunkIndex(pc);
addPendingRecompile(cx, info);
}
#endif
}
void
TypeCompartment::monitorBytecode(JSContext *cx, JSScript *script, uint32_t offset,
bool returnOnly)
@ -2289,7 +2312,7 @@ TypeCompartment::monitorBytecode(JSContext *cx, JSScript *script, uint32_t offse
if (!returnOnly)
code.monitoredTypes = true;
cx->compartment->types.addPendingRecompile(cx, script);
cx->compartment->types.addPendingRecompile(cx, script, pc);
/* Trigger recompilation of any inline callers. */
if (script->function() && !script->function()->hasLazyType())
@ -2383,7 +2406,7 @@ ScriptAnalysis::addTypeBarrier(JSContext *cx, const jsbytecode *pc, TypeSet *tar
* however, do not trigger recompilation (the script will be recompiled
* if any of the barriers is ever violated).
*/
cx->compartment->types.addPendingRecompile(cx, script);
cx->compartment->types.addPendingRecompile(cx, script, const_cast<jsbytecode*>(pc));
/* Trigger recompilation of any inline callers. */
if (script->function() && !script->function()->hasLazyType())
@ -2418,7 +2441,7 @@ ScriptAnalysis::addSingletonTypeBarrier(JSContext *cx, const jsbytecode *pc, Typ
if (!code.typeBarriers) {
/* Trigger recompilation as for normal type barriers. */
cx->compartment->types.addPendingRecompile(cx, script);
cx->compartment->types.addPendingRecompile(cx, script, const_cast<jsbytecode*>(pc));
if (script->function() && !script->function()->hasLazyType())
ObjectStateChange(cx, script->function()->type(), false, true);
}

Просмотреть файл

@ -1131,6 +1131,17 @@ typedef HashMap<ObjectTableKey,ObjectTableEntry,ObjectTableKey,SystemAllocPolicy
struct AllocationSiteKey;
typedef HashMap<AllocationSiteKey,ReadBarriered<TypeObject>,AllocationSiteKey,SystemAllocPolicy> AllocationSiteTable;
struct RecompileInfo
{
JSScript *script;
bool constructing:1;
uint32_t chunkIndex:31;
bool operator == (const RecompileInfo &o) const {
return script == o.script && constructing == o.constructing && chunkIndex == o.chunkIndex;
}
};
/* Type information for a compartment. */
struct TypeCompartment
{
@ -1147,7 +1158,7 @@ struct TypeCompartment
bool pendingNukeTypes;
/* Pending recompilations to perform before execution of JIT code can resume. */
Vector<JSScript*> *pendingRecompiles;
Vector<RecompileInfo> *pendingRecompiles;
/*
* Number of recompilation events and inline frame expansions that have
@ -1162,7 +1173,7 @@ struct TypeCompartment
* changes inducing recompilation are keyed to this script. Note: script
* compilation is not reentrant.
*/
JSScript *compiledScript;
RecompileInfo compiledInfo;
/* Table for referencing types of objects keyed to an allocation site. */
AllocationSiteTable *allocationSiteTable;
@ -1235,7 +1246,8 @@ struct TypeCompartment
void setPendingNukeTypes(JSContext *cx);
/* Mark a script as needing recompilation once inference has finished. */
void addPendingRecompile(JSContext *cx, JSScript *script);
void addPendingRecompile(JSContext *cx, const RecompileInfo &info);
void addPendingRecompile(JSContext *cx, JSScript *script, jsbytecode *pc);
/* Monitor future effects on a bytecode. */
void monitorBytecode(JSContext *cx, JSScript *script, uint32_t offset,

Просмотреть файл

@ -242,20 +242,23 @@ struct AutoEnterTypeInference
*/
struct AutoEnterCompilation
{
JSContext *cx;
JSScript *script;
RecompileInfo &info;
AutoEnterCompilation(JSContext *cx, JSScript *script)
: cx(cx), script(script)
AutoEnterCompilation(JSContext *cx, JSScript *script, bool constructing, unsigned chunkIndex)
: info(cx->compartment->types.compiledInfo)
{
JS_ASSERT(!cx->compartment->types.compiledScript);
cx->compartment->types.compiledScript = script;
JS_ASSERT(!info.script);
info.script = script;
info.constructing = constructing;
info.chunkIndex = chunkIndex;
}
~AutoEnterCompilation()
{
JS_ASSERT(cx->compartment->types.compiledScript == script);
cx->compartment->types.compiledScript = NULL;
JS_ASSERT(info.script);
info.script = NULL;
info.constructing = false;
info.chunkIndex = 0;
}
};

Просмотреть файл

@ -74,7 +74,6 @@
#include "frontend/BytecodeEmitter.h"
#ifdef JS_METHODJIT
#include "methodjit/MethodJIT.h"
#include "methodjit/MethodJIT-inl.h"
#include "methodjit/Logging.h"
#endif
#include "vm/Debugger.h"
@ -464,7 +463,7 @@ js::RunScript(JSContext *cx, JSScript *script, StackFrame *fp)
#ifdef JS_METHODJIT
mjit::CompileStatus status;
status = mjit::CanMethodJIT(cx, script, fp->isConstructing(),
status = mjit::CanMethodJIT(cx, script, script->code, fp->isConstructing(),
mjit::CompileRequest_Interpreter);
if (status == mjit::Compile_Error)
return false;
@ -1507,7 +1506,6 @@ js::Interpret(JSContext *cx, StackFrame *entryFrame, InterpMode interpMode)
#define RESET_USE_METHODJIT() \
JS_BEGIN_MACRO \
useMethodJIT = cx->methodJitEnabled && \
script->getJITStatus(regs.fp()->isConstructing()) != JITScript_Invalid && \
(interpMode == JSINTERP_NORMAL || \
interpMode == JSINTERP_REJOIN || \
interpMode == JSINTERP_SKIP_TRAP); \
@ -1827,12 +1825,14 @@ check_backedge:
if (!useMethodJIT)
DO_OP();
mjit::CompileStatus status =
mjit::CanMethodJITAtBranch(cx, script, regs.fp(), regs.pc);
mjit::CanMethodJIT(cx, script, regs.pc, regs.fp()->isConstructing(),
mjit::CompileRequest_Interpreter);
if (status == mjit::Compile_Error)
goto error;
if (status == mjit::Compile_Okay) {
void *ncode =
script->nativeCodeForPC(regs.fp()->isConstructing(), regs.pc);
JS_ASSERT(ncode);
mjit::JaegerStatus status =
mjit::JaegerShotAtSafePoint(cx, ncode, true);
CHECK_PARTIAL_METHODJIT(status);
@ -3043,7 +3043,8 @@ BEGIN_CASE(JSOP_FUNAPPLY)
mjit::CompileRequest request = (interpMode == JSINTERP_NORMAL)
? mjit::CompileRequest_Interpreter
: mjit::CompileRequest_JIT;
mjit::CompileStatus status = mjit::CanMethodJIT(cx, script, construct, request);
mjit::CompileStatus status = mjit::CanMethodJIT(cx, script, script->code,
construct, request);
if (status == mjit::Compile_Error)
goto error;
if (status == mjit::Compile_Okay) {

Просмотреть файл

@ -128,7 +128,7 @@ typedef mjit::Compiler::ActiveFrame ActiveFrame;
bool
Probes::JITWatcher::CollectNativeRegions(RegionVector &regions,
JSRuntime *rt,
mjit::JITScript *jit,
mjit::JITChunk *jit,
mjit::JSActiveFrame *outerFrame,
mjit::JSActiveFrame **inlineFrames)
{

Просмотреть файл

@ -48,6 +48,10 @@
#include "jsscript.h"
#include "jsobj.h"
#ifdef JS_METHODJIT
#include "methodjit/MethodJIT.h"
#endif
namespace js {
namespace mjit {
@ -248,7 +252,7 @@ public:
#ifdef JS_METHODJIT
static bool CollectNativeRegions(RegionVector &regions,
JSRuntime *rt,
mjit::JITScript *jit,
mjit::JITChunk *jit,
mjit::JSActiveFrame *outerFrame,
mjit::JSActiveFrame **inlineFrames);

Просмотреть файл

@ -1745,10 +1745,9 @@ bool
JSScript::recompileForStepMode(JSContext *cx)
{
#ifdef JS_METHODJIT
js::mjit::JITScript *jit = jitNormal ? jitNormal : jitCtor;
if (jit && stepModeEnabled() != jit->singleStepMode) {
js::mjit::Recompiler recompiler(cx, this);
recompiler.recompile();
if (jitNormal || jitCtor) {
mjit::ClearAllFrames(cx->compartment);
mjit::ReleaseScriptCode(cx, this);
}
#endif
return true;

Просмотреть файл

@ -335,12 +335,6 @@ namespace JSC {
#define JS_UNJITTABLE_SCRIPT (reinterpret_cast<void*>(1))
enum JITScriptStatus {
JITScript_None,
JITScript_Invalid,
JITScript_Valid
};
namespace js { namespace mjit { struct JITScript; } }
#endif
@ -639,7 +633,6 @@ struct JSScript : public js::gc::Cell {
// These methods are implemented in MethodJIT.h.
inline void **nativeMap(bool constructing);
inline void *maybeNativeCodeForPC(bool constructing, jsbytecode *pc);
inline void *nativeCodeForPC(bool constructing, jsbytecode *pc);
js::mjit::JITScript *getJIT(bool constructing) {
@ -651,15 +644,6 @@ struct JSScript : public js::gc::Cell {
size_t *addressOfUseCount() { return &useCount; }
void resetUseCount() { useCount = 0; }
JITScriptStatus getJITStatus(bool constructing) {
void *addr = constructing ? jitArityCheckCtor : jitArityCheckNormal;
if (addr == NULL)
return JITScript_None;
if (addr == JS_UNJITTABLE_SCRIPT)
return JITScript_Invalid;
return JITScript_Valid;
}
/* Size of the JITScript and all sections. (This method is implemented in MethodJIT.cpp.) */
size_t jitDataSize(JSMallocSizeOfFun mallocSizeOf);

Просмотреть файл

@ -137,9 +137,9 @@ JS_ENUM_HEADER(JSValueType, uint8_t)
JSVAL_TYPE_NULL = 0x06,
JSVAL_TYPE_OBJECT = 0x07,
/* This never appears in a jsval; it is only provided as an out-of-band value. */
JSVAL_TYPE_UNKNOWN = 0x20
/* These never appear in a jsval; they are only provided as an out-of-band value. */
JSVAL_TYPE_UNKNOWN = 0x20,
JSVAL_TYPE_MISSING = 0x21
} JS_ENUM_FOOTER(JSValueType);
JS_STATIC_ASSERT(sizeof(JSValueType) == 1);

Просмотреть файл

@ -140,8 +140,9 @@ class LinkerHelper : public JSC::LinkBuffer
#endif
}
bool verifyRange(JITScript *jit) {
return verifyRange(JSC::JITCode(jit->code.m_code.executableAddress(), jit->code.m_size));
bool verifyRange(JITChunk *chunk) {
return verifyRange(JSC::JITCode(chunk->code.m_code.executableAddress(),
chunk->code.m_size));
}
JSC::ExecutablePool *init(JSContext *cx) {
@ -188,8 +189,8 @@ class NativeStubLinker : public LinkerHelper
typedef JSC::MacroAssembler::Jump FinalJump;
#endif
NativeStubLinker(Assembler &masm, JITScript *jit, jsbytecode *pc, FinalJump done)
: LinkerHelper(masm, JSC::METHOD_CODE), jit(jit), pc(pc), done(done)
NativeStubLinker(Assembler &masm, JITChunk *chunk, jsbytecode *pc, FinalJump done)
: LinkerHelper(masm, JSC::METHOD_CODE), chunk(chunk), pc(pc), done(done)
{}
bool init(JSContext *cx);
@ -203,7 +204,7 @@ class NativeStubLinker : public LinkerHelper
}
private:
JITScript *jit;
JITChunk *chunk;
jsbytecode *pc;
FinalJump done;
};

Разница между файлами не показана из-за своего большого размера Загрузить разницу

Просмотреть файл

@ -337,14 +337,51 @@ class Compiler : public BaseCompiler
size_t offsetIndex;
};
struct JumpTableEdge {
uint32_t source;
uint32_t target;
};
struct ChunkJumpTableEdge {
JumpTableEdge edge;
void **jumpTableEntry;
};
struct LoopEntry {
uint32_t pcOffset;
Label label;
};
struct VarType {
/*
* Information about the current type of an argument or local in the
* script. The known type tag of these types is cached when possible to
* avoid generating duplicate dependency constraints.
*/
class VarType {
JSValueType type;
types::TypeSet *types;
public:
void setTypes(types::TypeSet *types) {
this->types = types;
this->type = JSVAL_TYPE_MISSING;
}
types::TypeSet *getTypes() { return types; }
JSValueType getTypeTag(JSContext *cx) {
if (type == JSVAL_TYPE_MISSING)
type = types ? types->getKnownTypeTag(cx) : JSVAL_TYPE_UNKNOWN;
return type;
}
};
struct OutgoingChunkEdge {
uint32_t source;
uint32_t target;
Jump fastJump;
MaybeJump slowJump;
};
struct SlotType
@ -355,7 +392,9 @@ class Compiler : public BaseCompiler
};
JSScript *outerScript;
unsigned chunkIndex;
bool isConstructing;
ChunkDescriptor &outerChunk;
/* SSA information for the outer script and all frames we will be inlining. */
analyze::CrossScriptSSA ssa;
@ -428,8 +467,9 @@ private:
js::Vector<uint32_t> fixedIntToDoubleEntries;
js::Vector<uint32_t> fixedDoubleToAnyEntries;
js::Vector<JumpTable, 16> jumpTables;
js::Vector<uint32_t, 16> jumpTableOffsets;
js::Vector<JumpTableEdge, 16> jumpTableEdges;
js::Vector<LoopEntry, 16> loopEntries;
js::Vector<OutgoingChunkEdge, 16> chunkEdges;
StubCompiler stubcc;
Label invokeLabel;
Label arityLabel;
@ -452,7 +492,7 @@ private:
friend class CompilerAllocPolicy;
public:
Compiler(JSContext *cx, JSScript *outerScript, bool isConstructing);
Compiler(JSContext *cx, JSScript *outerScript, unsigned chunkIndex, bool isConstructing);
~Compiler();
CompileStatus compile();
@ -477,6 +517,15 @@ private:
return scan->parentPC;
}
JITScript *outerJIT() {
return outerScript->getJIT(isConstructing);
}
bool bytecodeInChunk(jsbytecode *pc) {
return (unsigned(pc - outerScript->code) >= outerChunk.begin)
&& (unsigned(pc - outerScript->code) < outerChunk.end);
}
jsbytecode *inlinePC() { return PC; }
uint32_t inlineIndex() { return a->inlineIndex; }
@ -500,11 +549,11 @@ private:
}
private:
CompileStatus performCompilation(JITScript **jitp);
CompileStatus performCompilation();
CompileStatus generatePrologue();
CompileStatus generateMethod();
CompileStatus generateEpilogue();
CompileStatus finishThisUp(JITScript **jitp);
CompileStatus finishThisUp();
CompileStatus pushActiveFrame(JSScript *script, uint32_t argc);
void popActiveFrame();
void updatePCCounters(jsbytecode *pc, Label *start, bool *updated);
@ -591,9 +640,12 @@ private:
void tryConvertInteger(FrameEntry *fe, Uses uses);
/* Opcode handlers. */
bool jumpAndRun(Jump j, jsbytecode *target, Jump *slow = NULL, bool *trampoline = NULL);
bool jumpAndRun(Jump j, jsbytecode *target,
Jump *slow = NULL, bool *trampoline = NULL,
bool fallthrough = false);
bool startLoop(jsbytecode *head, Jump entry, jsbytecode *entryTarget);
bool finishLoop(jsbytecode *head);
inline bool shouldStartLoop(jsbytecode *head);
void jsop_bindname(PropertyName *name);
void jsop_setglobal(uint32_t index);
void jsop_getprop_slow(PropertyName *name, bool forPrototype = false);

Просмотреть файл

@ -1174,7 +1174,7 @@ mjit::Compiler::jsop_equality_int_string(JSOp op, BoolStub stub,
ic.stubEntry = stubEntry;
ic.stub = stub;
bool useIC = !a->parent;
bool useIC = !a->parent && bytecodeInChunk(target);
/* Call the IC stub, which may generate a fast path. */
if (useIC) {

Просмотреть файл

@ -768,9 +768,10 @@ mjit::Compiler::jsop_typeof()
cond = (cond == Assembler::Equal) ? Assembler::BelowOrEqual : Assembler::Above;
}
if (type != JSVAL_TYPE_UNKNOWN) {
PC += JSOP_STRING_LENGTH;;
PC += JSOP_EQ_LENGTH;
jsbytecode *afterPC = PC + JSOP_STRING_LENGTH + JSOP_EQ_LENGTH;
if (type != JSVAL_TYPE_UNKNOWN && bytecodeInChunk(afterPC)) {
PC = afterPC;
RegisterID result = frame.allocReg(Registers::SingleByteRegs).reg();

Просмотреть файл

@ -1361,6 +1361,8 @@ FrameState::pushLocal(uint32_t n)
if (fe->isTracked() && n < a->script->nfixed)
JS_ASSERT(fe->data.inMemory());
#endif
if (n >= a->script->nfixed)
syncFe(fe);
JSValueType type = fe->isTypeKnown() ? fe->getKnownType() : JSVAL_TYPE_UNKNOWN;
push(addressOf(fe), type);
}

Просмотреть файл

@ -355,8 +355,7 @@ FrameState::bestEvictReg(uint32_t mask, bool includePinned) const
* Evict variables which are only live in future loop iterations, and are
* not carried around the loop in a register.
*/
JS_ASSERT_IF(lifetime->loopTail, loop);
if (lifetime->loopTail && !loop->carriesLoopReg(fe)) {
if (lifetime->loopTail && (!loop || !loop->carriesLoopReg(fe))) {
JaegerSpew(JSpew_Regalloc, "result: %s (%s) only live in later iterations\n",
entryName(fe), reg.name());
return reg;
@ -579,9 +578,12 @@ FrameState::computeAllocation(jsbytecode *target)
if (!alloc)
return NULL;
if (a->analysis->getCode(target).exceptionEntry || a->analysis->getCode(target).switchTarget ||
a->script->hasBreakpointsAt(target)) {
/* State must be synced at exception and switch targets, and at traps. */
/*
* State must be synced at exception and switch targets, at traps and when
* crossing between compilation chunks.
*/
if (a->analysis->getCode(target).safePoint ||
(!a->parent && !cc.bytecodeInChunk(target))) {
#ifdef DEBUG
if (IsJaegerSpewChannelActive(JSpew_Regalloc)) {
JaegerSpew(JSpew_Regalloc, "allocation at %u:", unsigned(target - a->script->code));

Просмотреть файл

@ -60,7 +60,7 @@ class Repatcher : public JSC::RepatchBuffer
CodeLocationLabel label;
public:
explicit Repatcher(JITScript *js)
explicit Repatcher(JITChunk *js)
: JSC::RepatchBuffer(js->code), label(js->code.m_code.executableAddress())
{ }

Просмотреть файл

@ -64,7 +64,6 @@
#include "jscntxtinlines.h"
#include "jsatominlines.h"
#include "StubCalls-inl.h"
#include "MethodJIT-inl.h"
#include "jsautooplen.h"
@ -321,15 +320,13 @@ UncachedInlineCall(VMFrame &f, InitialFrameFlags initial,
types::TypeMonitorCall(cx, args, construct);
/* Try to compile if not already compiled. */
if (newscript->getJITStatus(construct) == JITScript_None) {
CompileStatus status = CanMethodJIT(cx, newscript, construct, CompileRequest_Interpreter);
if (status == Compile_Error) {
/* A runtime exception was thrown, get out. */
return false;
}
if (status == Compile_Abort)
*unjittable = true;
CompileStatus status = CanMethodJIT(cx, newscript, newscript->code, construct, CompileRequest_Interpreter);
if (status == Compile_Error) {
/* A runtime exception was thrown, get out. */
return false;
}
if (status == Compile_Abort)
*unjittable = true;
/*
* Make sure we are not calling from an inline frame if we need to make a
@ -367,11 +364,13 @@ UncachedInlineCall(VMFrame &f, InitialFrameFlags initial,
*/
if (!newType) {
if (JITScript *jit = newscript->getJIT(regs.fp()->isConstructing())) {
*pret = jit->invokeEntry;
if (jit->invokeEntry) {
*pret = jit->invokeEntry;
/* Restore the old fp around and let the JIT code repush the new fp. */
regs.popFrame((Value *) regs.fp());
return true;
/* Restore the old fp around and let the JIT code repush the new fp. */
regs.popFrame((Value *) regs.fp());
return true;
}
}
}
@ -592,11 +591,11 @@ js_InternalThrow(VMFrame &f)
if (f.entryfp == f.fp())
break;
JS_ASSERT(f.regs.sp == cx->regs().sp);
JS_ASSERT(&cx->regs() == &f.regs);
InlineReturn(f);
}
JS_ASSERT(f.regs.sp == cx->regs().sp);
JS_ASSERT(&cx->regs() == &f.regs);
if (!pc)
return NULL;
@ -620,9 +619,6 @@ js_InternalThrow(VMFrame &f)
analyze::AutoEnterAnalysis enter(cx);
cx->regs().pc = pc;
cx->regs().sp = fp->base() + script->analysis()->getCode(pc).stackDepth;
/*
* Interpret the ENTERBLOCK and EXCEPTION opcodes, so that we don't go
* back into the interpreter with a pending exception. This will cause
@ -698,6 +694,28 @@ stubs::ScriptProbeOnlyEpilogue(VMFrame &f)
Probes::exitJSFun(f.cx, f.fp()->fun(), f.fp()->script());
}
void JS_FASTCALL
stubs::CrossChunkShim(VMFrame &f, void *edge_)
{
CrossChunkEdge *edge = (CrossChunkEdge *) edge_;
mjit::ExpandInlineFrames(f.cx->compartment);
JSScript *script = f.script();
JS_ASSERT(edge->target < script->length);
JS_ASSERT(script->code + edge->target == f.pc());
CompileStatus status = CanMethodJIT(f.cx, script, f.pc(), f.fp()->isConstructing(),
CompileRequest_Interpreter);
if (status == Compile_Error)
THROW();
void **addr = f.returnAddressLocation();
*addr = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline);
f.fp()->setRejoin(StubRejoin(REJOIN_RESUME));
}
JS_STATIC_ASSERT(JSOP_NOP == 0);
/* :XXX: common out with identical copy in Compiler.cpp */
@ -854,6 +872,11 @@ js_InternalInterpret(void *returnData, void *returnType, void *returnReg, js::VM
f.regs.pc = nextpc;
break;
case REJOIN_JUMP:
f.regs.pc = (jsbytecode *) returnReg;
JS_ASSERT(unsigned(f.regs.pc - script->code) < script->length);
break;
case REJOIN_NATIVE:
case REJOIN_NATIVE_LOWERED:
case REJOIN_NATIVE_GETTER: {

Просмотреть файл

@ -1102,6 +1102,7 @@ CheckStackAndEnterMethodJIT(JSContext *cx, StackFrame *fp, void *code, bool part
JS_CHECK_RECURSION(cx, return Jaeger_Throwing);
JS_ASSERT(!cx->compartment->activeAnalysis);
JS_ASSERT(code);
Value *stackLimit = cx->stack.space().getStackLimit(cx, REPORT_ERROR);
if (!stackLimit)
@ -1129,63 +1130,63 @@ js::mjit::JaegerShotAtSafePoint(JSContext *cx, void *safePoint, bool partial)
}
NativeMapEntry *
JITScript::nmap() const
JITChunk::nmap() const
{
return (NativeMapEntry *)((char*)this + sizeof(JITScript));
return (NativeMapEntry *)((char*)this + sizeof(*this));
}
js::mjit::InlineFrame *
JITScript::inlineFrames() const
JITChunk::inlineFrames() const
{
return (js::mjit::InlineFrame *)((char *)nmap() + sizeof(NativeMapEntry) * nNmapPairs);
}
js::mjit::CallSite *
JITScript::callSites() const
JITChunk::callSites() const
{
return (js::mjit::CallSite *)&inlineFrames()[nInlineFrames];
}
char *
JITScript::commonSectionLimit() const
JITChunk::commonSectionLimit() const
{
return (char *)&callSites()[nCallSites];
}
#ifdef JS_MONOIC
ic::GetGlobalNameIC *
JITScript::getGlobalNames() const
JITChunk::getGlobalNames() const
{
return (ic::GetGlobalNameIC *) commonSectionLimit();
}
ic::SetGlobalNameIC *
JITScript::setGlobalNames() const
JITChunk::setGlobalNames() const
{
return (ic::SetGlobalNameIC *)((char *)getGlobalNames() +
sizeof(ic::GetGlobalNameIC) * nGetGlobalNames);
}
ic::CallICInfo *
JITScript::callICs() const
JITChunk::callICs() const
{
return (ic::CallICInfo *)&setGlobalNames()[nSetGlobalNames];
}
ic::EqualityICInfo *
JITScript::equalityICs() const
JITChunk::equalityICs() const
{
return (ic::EqualityICInfo *)&callICs()[nCallICs];
}
char *
JITScript::monoICSectionsLimit() const
JITChunk::monoICSectionsLimit() const
{
return (char *)&equalityICs()[nEqualityICs];
}
#else // JS_MONOIC
char *
JITScript::monoICSectionsLimit() const
JITChunk::monoICSectionsLimit() const
{
return commonSectionLimit();
}
@ -1193,43 +1194,62 @@ JITScript::monoICSectionsLimit() const
#ifdef JS_POLYIC
ic::GetElementIC *
JITScript::getElems() const
JITChunk::getElems() const
{
return (ic::GetElementIC *)monoICSectionsLimit();
}
ic::SetElementIC *
JITScript::setElems() const
JITChunk::setElems() const
{
return (ic::SetElementIC *)((char *)getElems() + sizeof(ic::GetElementIC) * nGetElems);
}
ic::PICInfo *
JITScript::pics() const
JITChunk::pics() const
{
return (ic::PICInfo *)((char *)setElems() + sizeof(ic::SetElementIC) * nSetElems);
}
char *
JITScript::polyICSectionsLimit() const
JITChunk::polyICSectionsLimit() const
{
return (char *)pics() + sizeof(ic::PICInfo) * nPICs;
}
#else // JS_POLYIC
char *
JITScript::polyICSectionsLimit() const
JITChunk::polyICSectionsLimit() const
{
return monoICSectionsLimit();
}
#endif // JS_POLYIC
void
JITScript::patchEdge(const CrossChunkEdge &edge, void *label)
{
if (edge.sourceJump1 || edge.sourceJump2) {
JITChunk *sourceChunk = chunk(script->code + edge.source);
JSC::CodeLocationLabel targetLabel(label);
ic::Repatcher repatch(sourceChunk);
if (edge.sourceJump1)
repatch.relink(JSC::CodeLocationJump(edge.sourceJump1), targetLabel);
if (edge.sourceJump2)
repatch.relink(JSC::CodeLocationJump(edge.sourceJump2), targetLabel);
}
if (edge.jumpTableEntries) {
for (unsigned i = 0; i < edge.jumpTableEntries->length(); i++)
*(*edge.jumpTableEntries)[i] = label;
}
}
template <typename T>
static inline void Destroy(T &t)
{
t.~T();
}
mjit::JITScript::~JITScript()
JITChunk::~JITChunk()
{
code.release();
@ -1249,9 +1269,6 @@ mjit::JITScript::~JITScript()
#endif
#if defined JS_MONOIC
if (argsCheckPool)
argsCheckPool->release();
for (JSC::ExecutablePool **pExecPool = execPools.begin();
pExecPool != execPools.end();
++pExecPool)
@ -1271,22 +1288,76 @@ mjit::JITScript::~JITScript()
if (callICs_[i].fastGuardedObject)
callICs_[i].purgeGuardedObject();
}
// Fixup any ICs still referring to this JIT.
while (!JS_CLIST_IS_EMPTY(&callers)) {
JS_STATIC_ASSERT(offsetof(ic::CallICInfo, links) == 0);
ic::CallICInfo *ic = (ic::CallICInfo *) callers.next;
uint8_t *start = (uint8_t *)ic->funGuard.executableAddress();
JSC::RepatchBuffer repatch(JSC::JITCode(start - 32, 64));
repatch.repatch(ic->funGuard, NULL);
repatch.relink(ic->funJump, ic->slowPathStart);
ic->purgeGuardedObject();
}
#endif
}
void
JITScript::destroy(JSContext *cx)
{
for (unsigned i = 0; i < nchunks; i++)
destroyChunk(cx, i);
}
void
JITScript::destroyChunk(JSContext *cx, unsigned chunkIndex, bool resetUses)
{
ChunkDescriptor &desc = chunkDescriptor(chunkIndex);
if (desc.chunk) {
Probes::discardMJITCode(cx, this, script, desc.chunk->code.m_code.executableAddress());
cx->delete_(desc.chunk);
desc.chunk = NULL;
CrossChunkEdge *edges = this->edges();
for (unsigned i = 0; i < nedges; i++) {
CrossChunkEdge &edge = edges[i];
if (edge.source >= desc.begin && edge.source < desc.end) {
edge.sourceJump1 = edge.sourceJump2 = NULL;
if (edge.jumpTableEntries) {
cx->delete_(edge.jumpTableEntries);
edge.jumpTableEntries = NULL;
}
} else if (edge.target >= desc.begin && edge.target < desc.end) {
edge.targetLabel = NULL;
patchEdge(edge, edge.shimLabel);
}
}
}
if (resetUses)
desc.counter = 0;
if (chunkIndex == 0) {
if (argsCheckPool) {
argsCheckPool->release();
argsCheckPool = NULL;
}
invokeEntry = NULL;
fastEntry = NULL;
arityCheckEntry = NULL;
argsCheckEntry = NULL;
if (script->jitNormal == this)
script->jitArityCheckNormal = NULL;
else
script->jitArityCheckCtor = NULL;
// Fixup any ICs still referring to this chunk.
while (!JS_CLIST_IS_EMPTY(&callers)) {
JS_STATIC_ASSERT(offsetof(ic::CallICInfo, links) == 0);
ic::CallICInfo *ic = (ic::CallICInfo *) callers.next;
uint8_t *start = (uint8_t *)ic->funGuard.executableAddress();
JSC::RepatchBuffer repatch(JSC::JITCode(start - 32, 64));
repatch.repatch(ic->funGuard, NULL);
repatch.relink(ic->funJump, ic->slowPathStart);
ic->purgeGuardedObject();
}
}
}
size_t
JSScript::jitDataSize(JSMallocSizeOfFun mallocSizeOf)
{
@ -1298,12 +1369,27 @@ JSScript::jitDataSize(JSMallocSizeOfFun mallocSizeOf)
return n;
}
/* Please keep in sync with Compiler::finishThisUp! */
size_t
mjit::JITScript::scriptDataSize(JSMallocSizeOfFun mallocSizeOf)
{
size_t usable = mallocSizeOf(this,
sizeof(JITScript)
+ (nchunks * sizeof(ChunkDescriptor))
+ (nedges * sizeof(CrossChunkEdge)));
for (unsigned i = 0; i < nchunks; i++) {
const ChunkDescriptor &desc = chunkDescriptor(i);
if (desc.chunk)
usable += desc.chunk->scriptDataSize(mallocSizeOf);
}
return usable;
}
/* Please keep in sync with Compiler::finishThisUp! */
size_t
mjit::JITChunk::scriptDataSize(JSMallocSizeOfFun mallocSizeOf)
{
size_t computedSize =
sizeof(JITScript) +
sizeof(JITChunk) +
sizeof(NativeMapEntry) * nNmapPairs +
sizeof(InlineFrame) * nInlineFrames +
sizeof(CallSite) * nCallSites +
@ -1334,8 +1420,7 @@ mjit::ReleaseScriptCode(JSContext *cx, JSScript *script, bool construct)
void **parity = construct ? &script->jitArityCheckCtor : &script->jitArityCheckNormal;
if (*pjit) {
Probes::discardMJITCode(cx, *pjit, script, (*pjit)->code.m_code.executableAddress());
(*pjit)->~JITScript();
(*pjit)->destroy(cx);
cx->free_(*pjit);
*pjit = NULL;
*parity = NULL;
@ -1351,62 +1436,26 @@ mjit::ProfileStubCall(VMFrame &f)
}
#endif
#ifdef JS_POLYIC
static int
PICPCComparator(const void *key, const void *entry)
JITChunk *
JITScript::findCodeChunk(void *addr)
{
const jsbytecode *pc = (const jsbytecode *)key;
const ic::PICInfo *pic = (const ic::PICInfo *)entry;
/*
* We can't just return |pc - pic->pc| because the pointers may be
* far apart and an int (or even a ptrdiff_t) may not be large
* enough to hold the difference. C says that pointer subtraction
* is only guaranteed to work for two pointers into the same array.
*/
if (pc < pic->pc)
return -1;
else if (pc == pic->pc)
return 0;
else
return 1;
}
uintN
mjit::GetCallTargetCount(JSScript *script, jsbytecode *pc)
{
ic::PICInfo *pic;
if (mjit::JITScript *jit = script->getJIT(false)) {
pic = (ic::PICInfo *)bsearch(pc, jit->pics(), jit->nPICs, sizeof(ic::PICInfo),
PICPCComparator);
if (pic)
return pic->stubsGenerated + 1; /* Add 1 for the inline path. */
for (unsigned i = 0; i < nchunks; i++) {
ChunkDescriptor &desc = chunkDescriptor(i);
if (desc.chunk && desc.chunk->isValidCode(addr))
return desc.chunk;
}
if (mjit::JITScript *jit = script->getJIT(true)) {
pic = (ic::PICInfo *)bsearch(pc, jit->pics(), jit->nPICs, sizeof(ic::PICInfo),
PICPCComparator);
if (pic)
return pic->stubsGenerated + 1; /* Add 1 for the inline path. */
}
return 1;
return NULL;
}
#else
uintN
mjit::GetCallTargetCount(JSScript *script, jsbytecode *pc)
{
return 1;
}
#endif
jsbytecode *
JITScript::nativeToPC(void *returnAddress, CallSite **pinline) const
JITScript::nativeToPC(void *returnAddress, CallSite **pinline)
{
JITChunk *chunk = findCodeChunk(returnAddress);
JS_ASSERT(chunk);
size_t low = 0;
size_t high = nCallICs;
js::mjit::ic::CallICInfo *callICs_ = callICs();
size_t high = chunk->nCallICs;
js::mjit::ic::CallICInfo *callICs_ = chunk->callICs();
while (high > low + 1) {
/* Could overflow here on a script with 2 billion calls. Oh well. */
size_t mid = (high + low) / 2;
@ -1428,7 +1477,7 @@ JITScript::nativeToPC(void *returnAddress, CallSite **pinline) const
if (ic.call->inlineIndex != UINT32_MAX) {
if (pinline)
*pinline = ic.call;
InlineFrame *frame = &inlineFrames()[ic.call->inlineIndex];
InlineFrame *frame = &chunk->inlineFrames()[ic.call->inlineIndex];
while (frame && frame->parent)
frame = frame->parent;
return frame->parentpc;

Просмотреть файл

@ -63,7 +63,10 @@
namespace js {
namespace mjit { struct JITScript; }
namespace mjit {
struct JITChunk;
struct JITScript;
}
struct VMFrame
{
@ -243,6 +246,9 @@ struct VMFrame
StackFrame *fp() { return regs.fp(); }
mjit::JITScript *jit() { return fp()->jit(); }
inline mjit::JITChunk *chunk();
inline unsigned chunkIndex();
/* Get the inner script/PC in case of inlining. */
inline JSScript *script();
inline jsbytecode *pc();
@ -303,6 +309,9 @@ enum RejoinState {
/* State is coherent for the start of the next (fallthrough) bytecode. */
REJOIN_FALLTHROUGH,
/* State is coherent for the start of the bytecode returned by the call. */
REJOIN_JUMP,
/*
* As for REJOIN_FALLTHROUGH, but holds a reference on the compartment's
* orphaned native pools which needs to be reclaimed by InternalInterpret.
@ -368,6 +377,20 @@ enum RejoinState {
REJOIN_BRANCH
};
/* Get the rejoin state for a StackFrame after returning from a scripted call. */
static inline JSRejoinState
ScriptedRejoin(uint32_t pcOffset)
{
return REJOIN_SCRIPTED | (pcOffset << 1);
}
/* Get the rejoin state for a StackFrame after returning from a stub call. */
static inline JSRejoinState
StubRejoin(RejoinState rejoin)
{
return rejoin << 1;
}
/* Helper to watch for recompilation and frame expansion activity on a compartment. */
struct RecompilationMonitor
{
@ -636,17 +659,11 @@ struct NativeCallStub {
#endif
};
struct JITScript {
struct JITChunk
{
typedef JSC::MacroAssemblerCodeRef CodeRef;
CodeRef code; /* pool & code addresses */
JSScript *script;
void *invokeEntry; /* invoke address */
void *fastEntry; /* cached entry, fastest */
void *arityCheckEntry; /* arity check address */
void *argsCheckEntry; /* arguments check address */
PCLengthEntry *pcLengths; /* lengths for outer and inline frames */
/*
@ -657,9 +674,8 @@ struct JITScript {
* Therefore, do not change the section ordering in finishThisUp() without
* changing nMICs() et al as well.
*/
uint32_t nNmapPairs:31; /* The NativeMapEntrys are sorted by .bcOff.
uint32_t nNmapPairs; /* The NativeMapEntrys are sorted by .bcOff.
.ncode values may not be NULL. */
bool singleStepMode:1; /* compiled in "single step mode" */
uint32_t nInlineFrames;
uint32_t nCallSites;
#ifdef JS_MONOIC
@ -674,18 +690,6 @@ struct JITScript {
uint32_t nPICs;
#endif
#ifdef JS_MONOIC
/* Inline cache at function entry for checking this/argument types. */
JSC::CodeLocationLabel argsCheckStub;
JSC::CodeLocationLabel argsCheckFallthrough;
JSC::CodeLocationJump argsCheckJump;
JSC::ExecutablePool *argsCheckPool;
void resetArgsCheck();
#endif
/* List of inline caches jumping to the fastEntry. */
JSCList callers;
#ifdef JS_MONOIC
// Additional ExecutablePools that IC stubs were generated into.
typedef Vector<JSC::ExecutablePool *, 0, SystemAllocPolicy> ExecPoolVector;
@ -710,8 +714,6 @@ struct JITScript {
ic::PICInfo *pics() const;
#endif
~JITScript();
bool isValidCode(void *ptr) {
char *jitcode = (char *)code.m_code.executableAddress();
char *jcheck = (char *)ptr;
@ -723,7 +725,7 @@ struct JITScript {
/* |mallocSizeOf| can be NULL here, in which case the fallback size computation will be used. */
size_t scriptDataSize(JSMallocSizeOfFun mallocSizeOf);
jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline) const;
~JITChunk();
private:
/* Helpers used to navigate the variable-length sections. */
@ -732,6 +734,119 @@ struct JITScript {
char *polyICSectionsLimit() const;
};
void
SetChunkLimit(uint32_t limit);
/* Information about a compilation chunk within a script. */
struct ChunkDescriptor
{
/* Bytecode range of the chunk: [begin,end) */
uint32_t begin;
uint32_t end;
/* Use counter for the chunk. */
uint32_t counter;
/* Optional compiled code for the chunk. */
JITChunk *chunk;
};
/* Jump or fallthrough edge in the bytecode which crosses a chunk boundary. */
struct CrossChunkEdge
{
/* Bytecode offsets of the source and target of the edge. */
uint32_t source;
uint32_t target;
/* Locations of the jump(s) for the source, NULL if not compiled. */
void *sourceJump1;
void *sourceJump2;
/* Any jump table entries along this edge. */
typedef Vector<void**,4,SystemAllocPolicy> JumpTableEntryVector;
JumpTableEntryVector *jumpTableEntries;
/* Location of the label for the target, NULL if not compiled. */
void *targetLabel;
/*
* Location of a shim which will transfer control to the interpreter at the
* target bytecode. The source jumps are patched to jump to this label if
* the source is compiled but not the target.
*/
void *shimLabel;
};
struct JITScript
{
JSScript *script;
void *invokeEntry; /* invoke address */
void *fastEntry; /* cached entry, fastest */
void *arityCheckEntry; /* arity check address */
void *argsCheckEntry; /* arguments check address */
/* List of inline caches jumping to the fastEntry. */
JSCList callers;
uint32_t nchunks;
uint32_t nedges;
/*
* Pool for shims which transfer control to the interpreter on cross chunk
* edges to chunks which do not have compiled code.
*/
JSC::ExecutablePool *shimPool;
#ifdef JS_MONOIC
/* Inline cache at function entry for checking this/argument types. */
JSC::CodeLocationLabel argsCheckStub;
JSC::CodeLocationLabel argsCheckFallthrough;
JSC::CodeLocationJump argsCheckJump;
JSC::ExecutablePool *argsCheckPool;
void resetArgsCheck();
#endif
ChunkDescriptor &chunkDescriptor(unsigned i) {
JS_ASSERT(i < nchunks);
ChunkDescriptor *descs = (ChunkDescriptor *) ((char *) this + sizeof(JITScript));
return descs[i];
}
unsigned chunkIndex(jsbytecode *pc) {
unsigned offset = pc - script->code;
JS_ASSERT(offset < script->length);
for (unsigned i = 0; i < nchunks; i++) {
const ChunkDescriptor &desc = chunkDescriptor(i);
JS_ASSERT(desc.begin <= offset);
if (offset < desc.end)
return i;
}
JS_NOT_REACHED("Bad chunk layout");
return 0;
}
JITChunk *chunk(jsbytecode *pc) {
return chunkDescriptor(chunkIndex(pc)).chunk;
}
JITChunk *findCodeChunk(void *addr);
CrossChunkEdge *edges() {
return (CrossChunkEdge *) (&chunkDescriptor(0) + nchunks);
}
/* Patch any compiled sources in edge to jump to label. */
void patchEdge(const CrossChunkEdge &edge, void *label);
jsbytecode *nativeToPC(void *returnAddress, CallSite **pinline);
size_t scriptDataSize(JSMallocSizeOfFun mallocSizeOf);
void destroy(JSContext *cx);
void destroyChunk(JSContext *cx, unsigned chunkIndex, bool resetUses = true);
};
/*
* Execute the given mjit code. This is a low-level call and callers must
* provide the same guarantees as JaegerShot/CheckStackAndEnterMethodJIT.
@ -758,8 +873,15 @@ enum CompileStatus
void JS_FASTCALL
ProfileStubCall(VMFrame &f);
CompileStatus JS_NEVER_INLINE
TryCompile(JSContext *cx, JSScript *script, bool construct);
enum CompileRequest
{
CompileRequest_Interpreter,
CompileRequest_JIT
};
CompileStatus
CanMethodJIT(JSContext *cx, JSScript *script, jsbytecode *pc,
bool construct, CompileRequest request);
void
ReleaseScriptCode(JSContext *cx, JSScript *script, bool construct);
@ -814,9 +936,6 @@ struct CallSite
}
};
uintN
GetCallTargetCount(JSScript *script, jsbytecode *pc);
void
DumpAllProfiles(JSContext *cx);
@ -843,11 +962,23 @@ inline void * bsearch_nmap(NativeMapEntry *nmap, size_t nPairs, size_t bcOff)
} /* namespace mjit */
inline mjit::JITChunk *
VMFrame::chunk()
{
return jit()->chunk(regs.pc);
}
inline unsigned
VMFrame::chunkIndex()
{
return jit()->chunkIndex(regs.pc);
}
inline JSScript *
VMFrame::script()
{
if (regs.inlined())
return jit()->inlineFrames()[regs.inlined()->inlineIndex].fun->script();
return chunk()->inlineFrames()[regs.inlined()->inlineIndex].fun->script();
return fp()->script();
}
@ -862,23 +993,15 @@ VMFrame::pc()
} /* namespace js */
inline void *
JSScript::maybeNativeCodeForPC(bool constructing, jsbytecode *pc)
JSScript::nativeCodeForPC(bool constructing, jsbytecode *pc)
{
js::mjit::JITScript *jit = getJIT(constructing);
if (!jit)
return NULL;
JS_ASSERT(pc >= code && pc < code + length);
return bsearch_nmap(jit->nmap(), jit->nNmapPairs, (size_t)(pc - code));
}
inline void *
JSScript::nativeCodeForPC(bool constructing, jsbytecode *pc)
{
js::mjit::JITScript *jit = getJIT(constructing);
JS_ASSERT(pc >= code && pc < code + length);
void* native = bsearch_nmap(jit->nmap(), jit->nNmapPairs, (size_t)(pc - code));
JS_ASSERT(native);
return native;
js::mjit::JITChunk *chunk = jit->chunk(pc);
if (!chunk)
return NULL;
return bsearch_nmap(chunk->nmap(), chunk->nNmapPairs, (size_t)(pc - code));
}
extern "C" void JaegerTrampolineReturn();

Просмотреть файл

@ -78,7 +78,7 @@ typedef JSC::MacroAssembler::DataLabelPtr DataLabelPtr;
static void
PatchGetFallback(VMFrame &f, ic::GetGlobalNameIC *ic)
{
Repatcher repatch(f.jit());
Repatcher repatch(f.chunk());
JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stubs::Name));
repatch.relink(ic->slowPathCall, fptr);
}
@ -110,7 +110,7 @@ ic::GetGlobalName(VMFrame &f, ic::GetGlobalNameIC *ic)
uint32_t slot = shape->slot();
/* Patch shape guard. */
Repatcher repatcher(f.jit());
Repatcher repatcher(f.chunk());
repatcher.repatch(ic->fastPathStart.dataLabelPtrAtOffset(ic->shapeOffset), obj.lastProperty());
/* Patch loads. */
@ -136,7 +136,7 @@ static void
PatchSetFallback(VMFrame &f, ic::SetGlobalNameIC *ic)
{
JSScript *script = f.script();
Repatcher repatch(f.jit());
Repatcher repatch(f.chunk());
VoidStubSetGlobal stub = STRICT_VARIANT(DisabledSetGlobal);
JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, stub));
repatch.relink(ic->slowPathCall, fptr);
@ -177,7 +177,7 @@ UpdateSetGlobalName(VMFrame &f, ic::SetGlobalNameIC *ic, JSObject *obj, const Sh
}
/* Object is not branded, so we can use the inline path. */
Repatcher repatcher(f.jit());
Repatcher repatcher(f.chunk());
ic->patchInlineShapeGuard(repatcher, obj->lastProperty());
uint32_t index = obj->dynamicSlotIndex(shape->slot());
@ -222,9 +222,7 @@ class EqualityICLinker : public LinkerHelper
if (!pool)
return false;
JS_ASSERT(!f.regs.inlined());
JSScript *script = f.fp()->script();
JITScript *jit = script->getJIT(f.fp()->isConstructing());
if (!jit->execPools.append(pool)) {
if (!f.chunk()->execPools.append(pool)) {
pool->release();
js_ReportOutOfMemory(cx);
return false;
@ -354,14 +352,14 @@ class EqualityCompiler : public BaseCompiler
if (!buffer.init(cx))
return false;
Repatcher repatcher(f.jit());
Repatcher repatcher(f.chunk());
/* Overwrite the call to the IC with a call to the stub. */
JSC::FunctionPtr fptr(JS_FUNC_TO_DATA_PTR(void *, ic.stub));
repatcher.relink(ic.stubCall, fptr);
// Silently fail, the IC is disabled now.
if (!buffer.verifyRange(f.jit()))
if (!buffer.verifyRange(f.chunk()))
return true;
/* Set the targets of all type test failures to go to the stub. */
@ -440,7 +438,7 @@ NativeStubLinker::init(JSContext *cx)
stub.pc = pc;
stub.pool = pool;
stub.jump = locationOf(done);
if (!jit->nativeCallStubs.append(stub)) {
if (!chunk->nativeCallStubs.append(stub)) {
pool->release();
return false;
}
@ -580,17 +578,17 @@ class CallCompiler : public BaseCompiler
return ep;
}
void disable(JITScript *jit)
void disable()
{
JSC::CodeLocationCall oolCall = ic.slowPathStart.callAtOffset(ic.oolCallOffset);
Repatcher repatch(jit);
Repatcher repatch(f.chunk());
JSC::FunctionPtr fptr = callingNew
? JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowNewFromIC))
: JSC::FunctionPtr(JS_FUNC_TO_DATA_PTR(void *, SlowCallFromIC));
repatch.relink(oolCall, fptr);
}
bool generateFullCallStub(JITScript *from, JSScript *script, uint32_t flags)
bool generateFullCallStub(JSScript *script, uint32_t flags)
{
/*
* Create a stub that works with arity mismatches. Like the fast-path,
@ -649,7 +647,7 @@ class CallCompiler : public BaseCompiler
masm.loadPtr(FrameAddress(VMFrame::offsetOfRegsSp()), JSFrameReg);
/* Compute the value of ncode to use at this call site. */
ncode = (uint8_t *) f.jit()->code.m_code.executableAddress() + ic.call->codeOffset;
ncode = (uint8_t *) f.chunk()->code.m_code.executableAddress() + ic.call->codeOffset;
masm.storePtr(ImmPtr(ncode), Address(JSFrameReg, StackFrame::offsetOfNcode()));
masm.jump(Registers::ReturnReg);
@ -668,8 +666,8 @@ class CallCompiler : public BaseCompiler
if (!ep)
return false;
if (!linker.verifyRange(from)) {
disable(from);
if (!linker.verifyRange(f.chunk())) {
disable();
return true;
}
@ -684,20 +682,20 @@ class CallCompiler : public BaseCompiler
code.patch(inlined, f.regs.inlined());
}
Repatcher repatch(from);
Repatcher repatch(f.chunk());
JSC::CodeLocationJump oolJump = ic.slowPathStart.jumpAtOffset(ic.oolJumpOffset);
repatch.relink(oolJump, cs);
return true;
}
bool patchInlinePath(JITScript *from, JSScript *script, JSObject *obj)
bool patchInlinePath(JSScript *script, JSObject *obj)
{
JS_ASSERT(ic.frameSize.isStatic());
JITScript *jit = script->getJIT(callingNew);
/* Very fast path. */
Repatcher repatch(from);
Repatcher repatch(f.chunk());
/*
* Use the arguments check entry if this is a monitored call, we might
@ -724,7 +722,7 @@ class CallCompiler : public BaseCompiler
return true;
}
bool generateStubForClosures(JITScript *from, JSObject *obj)
bool generateStubForClosures(JSObject *obj)
{
JS_ASSERT(ic.frameSize.isStatic());
@ -752,8 +750,8 @@ class CallCompiler : public BaseCompiler
ic.hasJsFunCheck = true;
if (!linker.verifyRange(from)) {
disable(from);
if (!linker.verifyRange(f.chunk())) {
disable();
return true;
}
@ -765,7 +763,7 @@ class CallCompiler : public BaseCompiler
JaegerSpew(JSpew_PICs, "generated CALL closure stub %p (%lu bytes)\n",
cs.executableAddress(), (unsigned long) masm.size());
Repatcher repatch(from);
Repatcher repatch(f.chunk());
repatch.relink(ic.funJump, cs);
return true;
@ -773,8 +771,6 @@ class CallCompiler : public BaseCompiler
bool generateNativeStub()
{
JITScript *jit = f.jit();
/* Snapshot the frameDepth before SplatApplyArgs modifies it. */
uintN initialFrameDepth = f.regs.sp - f.fp()->slots();
@ -925,12 +921,12 @@ class CallCompiler : public BaseCompiler
NativeStubLinker::FinalJump done;
if (!NativeStubEpilogue(f, masm, &done, initialFrameDepth, vpOffset, MaybeRegisterID(), MaybeRegisterID()))
return false;
NativeStubLinker linker(masm, f.jit(), f.regs.pc, done);
NativeStubLinker linker(masm, f.chunk(), f.regs.pc, done);
if (!linker.init(f.cx))
THROWV(true);
if (!linker.verifyRange(jit)) {
disable(jit);
if (!linker.verifyRange(f.chunk())) {
disable();
return true;
}
@ -944,7 +940,7 @@ class CallCompiler : public BaseCompiler
JaegerSpew(JSpew_PICs, "generated native CALL stub %p (%lu bytes)\n",
start.executableAddress(), (unsigned long) masm.size());
Repatcher repatch(jit);
Repatcher repatch(f.chunk());
repatch.relink(ic.funJump, start);
return true;
@ -952,8 +948,6 @@ class CallCompiler : public BaseCompiler
void *update()
{
StackFrame *fp = f.fp();
JITScript *jit = fp->jit();
RecompilationMonitor monitor(cx);
bool lowered = ic.frameSize.lowered(f.pc());
@ -975,7 +969,7 @@ class CallCompiler : public BaseCompiler
// patch this site to go to a slow path always.
if (!ucr.codeAddr) {
if (ucr.unjittable)
disable(jit);
disable();
return NULL;
}
@ -992,10 +986,10 @@ class CallCompiler : public BaseCompiler
}
if (!ic.frameSize.isStatic() || ic.frameSize.staticArgc() != fun->nargs) {
if (!generateFullCallStub(jit, script, flags))
if (!generateFullCallStub(script, flags))
THROWV(NULL);
} else {
if (!ic.fastGuardedObject && patchInlinePath(jit, script, fun)) {
if (!ic.fastGuardedObject && patchInlinePath(script, fun)) {
// Nothing, done.
} else if (ic.fastGuardedObject &&
!ic.hasJsFunCheck &&
@ -1005,10 +999,10 @@ class CallCompiler : public BaseCompiler
* Note: Multiple "function guard" stubs are not yet
* supported, thus the fastGuardedNative check.
*/
if (!generateStubForClosures(jit, fun))
if (!generateStubForClosures(fun))
THROWV(NULL);
} else {
if (!generateFullCallStub(jit, script, flags))
if (!generateFullCallStub(script, flags))
THROWV(NULL);
}
}
@ -1221,7 +1215,7 @@ ic::GenerateArgumentCheckStub(VMFrame &f)
return;
jit->argsCheckPool = ep;
if (!linker.verifyRange(jit)) {
if (!linker.verifyRange(f.chunk())) {
jit->resetArgsCheck();
return;
}
@ -1235,7 +1229,7 @@ ic::GenerateArgumentCheckStub(VMFrame &f)
JaegerSpew(JSpew_PICs, "generated ARGS CHECK stub %p (%lu bytes)\n",
cs.executableAddress(), (unsigned long)masm.size());
Repatcher repatch(jit);
Repatcher repatch(f.chunk());
repatch.relink(jit->argsCheckJump, cs);
}
@ -1245,7 +1239,7 @@ JITScript::resetArgsCheck()
argsCheckPool->release();
argsCheckPool = NULL;
Repatcher repatch(this);
Repatcher repatch(chunk(script->code));
repatch.relink(argsCheckJump, argsCheckStub);
}

Просмотреть файл

@ -128,7 +128,11 @@ class PICStubCompiler : public BaseCompiler
}
LookupStatus disable(JSContext *cx, const char *reason) {
return pic.disable(cx, reason, stub);
return pic.disable(f, reason, stub);
}
LookupStatus disable(VMFrame &f, const char *reason) {
return pic.disable(f, reason, stub);
}
bool hadGC() {
@ -220,7 +224,7 @@ class SetPropCompiler : public PICStubCompiler
JS_ASSERT(!pic.inlinePathPatched);
JaegerSpew(JSpew_PICs, "patch setprop inline at %p\n", pic.fastPathStart.executableAddress());
Repatcher repatcher(f.jit());
Repatcher repatcher(f.chunk());
SetPropLabels &labels = pic.setPropLabels();
int32_t offset;
@ -259,7 +263,7 @@ class SetPropCompiler : public PICStubCompiler
void patchPreviousToHere(CodeLocationLabel cs)
{
Repatcher repatcher(pic.lastCodeBlock(f.jit()));
Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
CodeLocationLabel label = pic.lastPathStart();
// Patch either the inline fast path or a generated stub. The stub
@ -437,14 +441,14 @@ class SetPropCompiler : public PICStubCompiler
pic.secondShapeGuard = 0;
}
pic.updatePCCounters(cx, masm);
pic.updatePCCounters(f, masm);
PICLinker buffer(masm, pic);
if (!buffer.init(cx))
return error();
if (!buffer.verifyRange(pic.lastCodeBlock(f.jit())) ||
!buffer.verifyRange(f.jit())) {
if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!buffer.verifyRange(f.chunk())) {
return disable("code memory is out of range");
}
@ -757,7 +761,7 @@ struct GetPropHelper {
LookupStatus lookup() {
JSObject *aobj = js_GetProtoIfDenseArray(obj);
if (!aobj->isNative())
return ic.disable(cx, "non-native");
return ic.disable(f, "non-native");
RecompilationMonitor monitor(cx);
if (!aobj->lookupProperty(cx, name, &holder, &prop))
@ -766,9 +770,9 @@ struct GetPropHelper {
return Lookup_Uncacheable;
if (!prop)
return ic.disable(cx, "lookup failed");
return ic.disable(f, "lookup failed");
if (!IsCacheableProtoChain(obj, holder))
return ic.disable(cx, "non-native holder");
return ic.disable(f, "non-native holder");
shape = (const Shape *)prop;
return Lookup_Cacheable;
}
@ -777,14 +781,14 @@ struct GetPropHelper {
if (!shape->hasDefaultGetter()) {
if (shape->isMethod()) {
if (JSOp(*f.pc()) != JSOP_CALLPROP)
return ic.disable(cx, "method valued shape");
return ic.disable(f, "method valued shape");
} else {
if (shape->hasGetterValue())
return ic.disable(cx, "getter value shape");
return ic.disable(f, "getter value shape");
if (shape->hasSlot() && holder != obj)
return ic.disable(cx, "slotful getter hook through prototype");
return ic.disable(f, "slotful getter hook through prototype");
if (!ic.canCallHook)
return ic.disable(cx, "can't call getter hook");
return ic.disable(f, "can't call getter hook");
if (f.regs.inlined()) {
/*
* As with native stubs, getter hook stubs can't be
@ -798,7 +802,7 @@ struct GetPropHelper {
}
}
} else if (!shape->hasSlot()) {
return ic.disable(cx, "no slot");
return ic.disable(f, "no slot");
}
return Lookup_Cacheable;
@ -865,14 +869,14 @@ class GetPropCompiler : public PICStubCompiler
masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
Jump done = masm.jump();
pic.updatePCCounters(cx, masm);
pic.updatePCCounters(f, masm);
PICLinker buffer(masm, pic);
if (!buffer.init(cx))
return error();
if (!buffer.verifyRange(pic.lastCodeBlock(f.jit())) ||
!buffer.verifyRange(f.jit())) {
if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!buffer.verifyRange(f.chunk())) {
return disable("code memory is out of range");
}
@ -906,14 +910,14 @@ class GetPropCompiler : public PICStubCompiler
masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
Jump done = masm.jump();
pic.updatePCCounters(cx, masm);
pic.updatePCCounters(f, masm);
PICLinker buffer(masm, pic);
if (!buffer.init(cx))
return error();
if (!buffer.verifyRange(pic.lastCodeBlock(f.jit())) ||
!buffer.verifyRange(f.jit())) {
if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!buffer.verifyRange(f.chunk())) {
return disable("code memory is out of range");
}
@ -944,14 +948,14 @@ class GetPropCompiler : public PICStubCompiler
masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
Jump done = masm.jump();
pic.updatePCCounters(cx, masm);
pic.updatePCCounters(f, masm);
PICLinker buffer(masm, pic);
if (!buffer.init(cx))
return error();
if (!buffer.verifyRange(pic.lastCodeBlock(f.jit())) ||
!buffer.verifyRange(f.jit())) {
if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!buffer.verifyRange(f.chunk())) {
return disable("code memory is out of range");
}
@ -1014,14 +1018,14 @@ class GetPropCompiler : public PICStubCompiler
Jump done = masm.jump();
pic.updatePCCounters(cx, masm);
pic.updatePCCounters(f, masm);
PICLinker buffer(masm, pic);
if (!buffer.init(cx))
return error();
if (!buffer.verifyRange(pic.lastCodeBlock(f.jit())) ||
!buffer.verifyRange(f.jit())) {
if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!buffer.verifyRange(f.chunk())) {
return disable("code memory is out of range");
}
@ -1035,7 +1039,7 @@ class GetPropCompiler : public PICStubCompiler
/* Patch the type check to jump here. */
if (pic.hasTypeCheck()) {
Repatcher repatcher(f.jit());
Repatcher repatcher(f.chunk());
repatcher.relink(pic.getPropLabels().getInlineTypeJump(pic.fastPathStart), cs);
}
@ -1057,14 +1061,14 @@ class GetPropCompiler : public PICStubCompiler
masm.move(ImmType(JSVAL_TYPE_INT32), pic.shapeReg);
Jump done = masm.jump();
pic.updatePCCounters(cx, masm);
pic.updatePCCounters(f, masm);
PICLinker buffer(masm, pic);
if (!buffer.init(cx))
return error();
if (!buffer.verifyRange(pic.lastCodeBlock(f.jit())) ||
!buffer.verifyRange(f.jit())) {
if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!buffer.verifyRange(f.chunk())) {
return disable("code memory is out of range");
}
@ -1076,7 +1080,7 @@ class GetPropCompiler : public PICStubCompiler
start.executableAddress());
if (pic.hasTypeCheck()) {
Repatcher repatcher(f.jit());
Repatcher repatcher(f.chunk());
repatcher.relink(pic.getPropLabels().getInlineTypeJump(pic.fastPathStart), start);
}
@ -1088,7 +1092,7 @@ class GetPropCompiler : public PICStubCompiler
LookupStatus patchInline(JSObject *holder, const Shape *shape)
{
spew("patch", "inline");
Repatcher repatcher(f.jit());
Repatcher repatcher(f.chunk());
GetPropLabels &labels = pic.getPropLabels();
int32_t offset;
@ -1190,12 +1194,12 @@ class GetPropCompiler : public PICStubCompiler
NativeStubLinker::FinalJump done;
if (!NativeStubEpilogue(f, masm, &done, 0, vpOffset, pic.shapeReg, pic.objReg))
return;
NativeStubLinker linker(masm, f.jit(), f.regs.pc, done);
NativeStubLinker linker(masm, f.chunk(), f.regs.pc, done);
if (!linker.init(f.cx))
THROW();
if (!linker.verifyRange(pic.lastCodeBlock(f.jit())) ||
!linker.verifyRange(f.jit())) {
if (!linker.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!linker.verifyRange(f.chunk())) {
disable("code memory is out of range");
return;
}
@ -1277,14 +1281,14 @@ class GetPropCompiler : public PICStubCompiler
masm.loadObjProp(holder, holderReg, shape, pic.shapeReg, pic.objReg);
Jump done = masm.jump();
pic.updatePCCounters(cx, masm);
pic.updatePCCounters(f, masm);
PICLinker buffer(masm, pic);
if (!buffer.init(cx))
return error();
if (!buffer.verifyRange(pic.lastCodeBlock(f.jit())) ||
!buffer.verifyRange(f.jit())) {
if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!buffer.verifyRange(f.chunk())) {
return disable("code memory is out of range");
}
@ -1320,7 +1324,7 @@ class GetPropCompiler : public PICStubCompiler
void patchPreviousToHere(CodeLocationLabel cs)
{
Repatcher repatcher(pic.lastCodeBlock(f.jit()));
Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
CodeLocationLabel label = pic.lastPathStart();
// Patch either the inline fast path or a generated stub. The stub
@ -1376,7 +1380,7 @@ class ScopeNameCompiler : public PICStubCompiler
void patchPreviousToHere(CodeLocationLabel cs)
{
ScopeNameLabels & labels = pic.scopeNameLabels();
Repatcher repatcher(pic.lastCodeBlock(f.jit()));
Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
CodeLocationLabel start = pic.lastPathStart();
JSC::CodeLocationJump jump;
@ -1492,14 +1496,14 @@ class ScopeNameCompiler : public PICStubCompiler
Label failLabel = masm.label();
Jump failJump = masm.jump();
pic.updatePCCounters(cx, masm);
pic.updatePCCounters(f, masm);
PICLinker buffer(masm, pic);
if (!buffer.init(cx))
return error();
if (!buffer.verifyRange(pic.lastCodeBlock(f.jit())) ||
!buffer.verifyRange(f.jit())) {
if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!buffer.verifyRange(f.chunk())) {
return disable("code memory is out of range");
}
@ -1604,14 +1608,14 @@ class ScopeNameCompiler : public PICStubCompiler
Label failLabel = masm.label();
Jump failJump = masm.jump();
pic.updatePCCounters(cx, masm);
pic.updatePCCounters(f, masm);
PICLinker buffer(masm, pic);
if (!buffer.init(cx))
return error();
if (!buffer.verifyRange(pic.lastCodeBlock(f.jit())) ||
!buffer.verifyRange(f.jit())) {
if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!buffer.verifyRange(f.chunk())) {
return disable("code memory is out of range");
}
@ -1735,7 +1739,7 @@ class BindNameCompiler : public PICStubCompiler
void patchPreviousToHere(CodeLocationLabel cs)
{
BindNameLabels &labels = pic.bindNameLabels();
Repatcher repatcher(pic.lastCodeBlock(f.jit()));
Repatcher repatcher(pic.lastCodeBlock(f.chunk()));
JSC::CodeLocationJump jump;
/* Patch either the inline fast path or a generated stub. */
@ -1785,14 +1789,14 @@ class BindNameCompiler : public PICStubCompiler
Label failLabel = masm.label();
Jump failJump = masm.jump();
pic.updatePCCounters(cx, masm);
pic.updatePCCounters(f, masm);
PICLinker buffer(masm, pic);
if (!buffer.init(cx))
return error();
if (!buffer.verifyRange(pic.lastCodeBlock(f.jit())) ||
!buffer.verifyRange(f.jit())) {
if (!buffer.verifyRange(pic.lastCodeBlock(f.chunk())) ||
!buffer.verifyRange(f.chunk())) {
return disable("code memory is out of range");
}
@ -2061,13 +2065,15 @@ BaseIC::spew(JSContext *cx, const char *event, const char *message)
}
/* Total length of scripts preceding a frame. */
inline uint32_t frameCountersOffset(JSContext *cx)
inline uint32_t frameCountersOffset(VMFrame &f)
{
JSContext *cx = f.cx;
uint32_t offset = 0;
if (cx->regs().inlined()) {
offset += cx->fp()->script()->length;
uint32_t index = cx->regs().inlined()->inlineIndex;
InlineFrame *frames = cx->fp()->jit()->inlineFrames();
InlineFrame *frames = f.chunk()->inlineFrames();
for (unsigned i = 0; i < index; i++)
offset += frames[i].fun->script()->length;
}
@ -2080,27 +2086,25 @@ inline uint32_t frameCountersOffset(JSContext *cx)
}
LookupStatus
BaseIC::disable(JSContext *cx, const char *reason, void *stub)
BaseIC::disable(VMFrame &f, const char *reason, void *stub)
{
JITScript *jit = cx->fp()->jit();
if (jit->pcLengths) {
uint32_t offset = frameCountersOffset(cx);
jit->pcLengths[offset].picsLength = 0;
if (f.chunk()->pcLengths) {
uint32_t offset = frameCountersOffset(f);
f.chunk()->pcLengths[offset].picsLength = 0;
}
spew(cx, "disabled", reason);
Repatcher repatcher(jit);
spew(f.cx, "disabled", reason);
Repatcher repatcher(f.chunk());
repatcher.relink(slowPathCall, FunctionPtr(stub));
return Lookup_Uncacheable;
}
void
BaseIC::updatePCCounters(JSContext *cx, Assembler &masm)
BaseIC::updatePCCounters(VMFrame &f, Assembler &masm)
{
JITScript *jit = cx->fp()->jit();
if (jit->pcLengths) {
uint32_t offset = frameCountersOffset(cx);
jit->pcLengths[offset].picsLength += masm.size();
if (f.chunk()->pcLengths) {
uint32_t offset = frameCountersOffset(f);
f.chunk()->pcLengths[offset].picsLength += masm.size();
}
}
@ -2135,11 +2139,11 @@ GetElementIC::shouldUpdate(JSContext *cx)
}
LookupStatus
GetElementIC::disable(JSContext *cx, const char *reason)
GetElementIC::disable(VMFrame &f, const char *reason)
{
slowCallPatched = true;
void *stub = JS_FUNC_TO_DATA_PTR(void *, DisabledGetElem);
BaseIC::disable(cx, reason, stub);
BaseIC::disable(f, reason, stub);
return Lookup_Uncacheable;
}
@ -2182,7 +2186,7 @@ GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, PropertyN
// the value read will go through a type barrier afterwards. TI only
// accounts for integer-valued properties accessed by GETELEM/CALLELEM.
if (cx->typeInferenceEnabled() && !forcedTypeBarrier)
return disable(cx, "string element access may not have type barrier");
return disable(f, "string element access may not have type barrier");
Assembler masm;
@ -2248,16 +2252,16 @@ GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, PropertyN
Jump done = masm.jump();
updatePCCounters(cx, masm);
updatePCCounters(f, masm);
PICLinker buffer(masm, *this);
if (!buffer.init(cx))
return error(cx);
if (hasLastStringStub && !buffer.verifyRange(lastStringStub))
return disable(cx, "code memory is out of range");
if (!buffer.verifyRange(cx->fp()->jit()))
return disable(cx, "code memory is out of range");
return disable(f, "code memory is out of range");
if (!buffer.verifyRange(f.chunk()))
return disable(f, "code memory is out of range");
// Patch all guards.
buffer.maybeLink(atomIdGuard, slowPathStart);
@ -2279,7 +2283,7 @@ GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, PropertyN
// Update the inline guards, if needed.
if (shouldPatchInlineTypeGuard() || shouldPatchUnconditionalShapeGuard()) {
Repatcher repatcher(cx->fp()->jit());
Repatcher repatcher(f.chunk());
if (shouldPatchInlineTypeGuard()) {
// A type guard is present in the inline path, and this is the
@ -2338,7 +2342,7 @@ GetElementIC::attachGetProp(VMFrame &f, JSObject *obj, const Value &v, PropertyN
stubsGenerated++;
if (stubsGenerated == MAX_GETELEM_IC_STUBS)
disable(cx, "max stubs reached");
disable(f, "max stubs reached");
// Finally, fetch the value to avoid redoing the property lookup.
*vp = holder->getSlot(shape->slot());
@ -2352,10 +2356,10 @@ GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id
JSContext *cx = f.cx;
if (!v.isInt32())
return disable(cx, "arguments object with non-integer key");
return disable(f, "arguments object with non-integer key");
if (op == JSOP_CALLELEM)
return disable(cx, "arguments object with call");
return disable(f, "arguments object with call");
JS_ASSERT(hasInlineTypeGuard() || idRemat.knownType() == JSVAL_TYPE_INT32);
@ -2457,15 +2461,15 @@ GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id
masm.jump(loadFromStack);
updatePCCounters(cx, masm);
updatePCCounters(f, masm);
PICLinker buffer(masm, *this);
if (!buffer.init(cx))
return error(cx);
if (!buffer.verifyRange(cx->fp()->jit()))
return disable(cx, "code memory is out of range");
if (!buffer.verifyRange(f.chunk()))
return disable(f, "code memory is out of range");
buffer.link(shapeGuard, slowPathStart);
buffer.link(overridden, slowPathStart);
@ -2478,7 +2482,7 @@ GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id
JaegerSpew(JSpew_PICs, "generated getelem arguments stub at %p\n", cs.executableAddress());
Repatcher repatcher(cx->fp()->jit());
Repatcher repatcher(f.chunk());
repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
JS_ASSERT(!shouldPatchUnconditionalShapeGuard());
@ -2488,9 +2492,9 @@ GetElementIC::attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id
stubsGenerated++;
if (stubsGenerated == MAX_GETELEM_IC_STUBS)
disable(cx, "max stubs reached");
disable(f, "max stubs reached");
disable(cx, "generated arguments stub");
disable(f, "generated arguments stub");
if (!obj->getGeneric(cx, id, vp))
return Lookup_Error;
@ -2505,10 +2509,10 @@ GetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid i
JSContext *cx = f.cx;
if (!v.isInt32())
return disable(cx, "typed array with string key");
return disable(f, "typed array with string key");
if (op == JSOP_CALLELEM)
return disable(cx, "typed array with call");
return disable(f, "typed array with call");
// The fast-path guarantees that after the dense shape guard, the type is
// known to be int32, either via type inference or the inline type check.
@ -2542,7 +2546,7 @@ GetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid i
TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT64 ||
TypedArray::getType(tarray) == js::TypedArray::TYPE_UINT32))
{
return disable(cx, "fpu not supported");
return disable(f, "fpu not supported");
}
MaybeRegisterID tempReg;
@ -2550,14 +2554,14 @@ GetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid i
Jump done = masm.jump();
updatePCCounters(cx, masm);
updatePCCounters(f, masm);
PICLinker buffer(masm, *this);
if (!buffer.init(cx))
return error(cx);
if (!buffer.verifyRange(cx->fp()->jit()))
return disable(cx, "code memory is out of range");
if (!buffer.verifyRange(f.chunk()))
return disable(f, "code memory is out of range");
buffer.link(shapeGuard, slowPathStart);
buffer.link(outOfBounds, slowPathStart);
@ -2571,7 +2575,7 @@ GetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid i
JS_ASSERT(!shouldPatchUnconditionalShapeGuard());
JS_ASSERT(!inlineShapeGuardPatched);
Repatcher repatcher(cx->fp()->jit());
Repatcher repatcher(f.chunk());
repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
inlineShapeGuardPatched = true;
@ -2580,9 +2584,9 @@ GetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid i
// In the future, it might make sense to attach multiple typed array stubs.
// For simplicitly, they are currently monomorphic.
if (stubsGenerated == MAX_GETELEM_IC_STUBS)
disable(cx, "max stubs reached");
disable(f, "max stubs reached");
disable(cx, "generated typed array stub");
disable(f, "generated typed array stub");
// Fetch the value as expected of Lookup_Cacheable for GetElement.
if (!obj->getGeneric(cx, id, vp))
@ -2622,7 +2626,7 @@ GetElementIC::update(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *
return attachTypedArray(f, obj, v, id, vp);
#endif
return disable(f.cx, "unhandled object and key type");
return disable(f, "unhandled object and key type");
}
void JS_FASTCALL
@ -2632,7 +2636,7 @@ ic::GetElement(VMFrame &f, ic::GetElementIC *ic)
// Right now, we don't optimize for strings or lazy arguments.
if (!f.regs.sp[-2].isObject()) {
ic->disable(cx, "non-object");
ic->disable(f, "non-object");
stubs::GetElem(f);
return;
}
@ -2676,11 +2680,11 @@ ic::GetElement(VMFrame &f, ic::GetElementIC *ic)
(FunctionTemplateConditional(s, f<true>, f<false>))
LookupStatus
SetElementIC::disable(JSContext *cx, const char *reason)
SetElementIC::disable(VMFrame &f, const char *reason)
{
slowCallPatched = true;
VoidStub stub = APPLY_STRICTNESS(stubs::SetElem, strictMode);
BaseIC::disable(cx, reason, JS_FUNC_TO_DATA_PTR(void *, stub));
BaseIC::disable(f, reason, JS_FUNC_TO_DATA_PTR(void *, stub));
return Lookup_Uncacheable;
}
@ -2713,14 +2717,14 @@ SetElementIC::attachHoleStub(VMFrame &f, JSObject *obj, int32_t keyval)
JSContext *cx = f.cx;
if (keyval < 0)
return disable(cx, "negative key index");
return disable(f, "negative key index");
// We may have failed a capacity check instead of a dense array check.
// However we should still build the IC in this case, since it could
// be in a loop that is filling in the array.
if (js_PrototypeHasIndexedProperties(cx, obj))
return disable(cx, "prototype has indexed properties");
return disable(f, "prototype has indexed properties");
Assembler masm;
@ -2736,7 +2740,7 @@ SetElementIC::attachHoleStub(VMFrame &f, JSObject *obj, int32_t keyval)
// 2) We only have to test the shape, rather than INDEXED.
for (JSObject *pobj = obj->getProto(); pobj; pobj = pobj->getProto()) {
if (!pobj->isNative())
return disable(cx, "non-native array prototype");
return disable(f, "non-native array prototype");
masm.move(ImmPtr(pobj), objReg);
Jump j = masm.guardShape(objReg, pobj);
if (!fails.append(j))
@ -2789,8 +2793,8 @@ SetElementIC::attachHoleStub(VMFrame &f, JSObject *obj, int32_t keyval)
if (!execPool)
return error(cx);
if (!buffer.verifyRange(cx->fp()->jit()))
return disable(cx, "code memory is out of range");
if (!buffer.verifyRange(f.chunk()))
return disable(f, "code memory is out of range");
// Patch all guards.
for (size_t i = 0; i < fails.length(); i++)
@ -2800,11 +2804,11 @@ SetElementIC::attachHoleStub(VMFrame &f, JSObject *obj, int32_t keyval)
CodeLocationLabel cs = buffer.finalize(f);
JaegerSpew(JSpew_PICs, "generated dense array hole stub at %p\n", cs.executableAddress());
Repatcher repatcher(cx->fp()->jit());
Repatcher repatcher(f.chunk());
repatcher.relink(fastPathStart.jumpAtOffset(inlineHoleGuard), cs);
inlineHoleGuardPatched = true;
disable(cx, "generated dense array hole stub");
disable(f, "generated dense array hole stub");
return Lookup_Cacheable;
}
@ -2841,7 +2845,7 @@ SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32_t key)
(TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT32 ||
TypedArray::getType(tarray) == js::TypedArray::TYPE_FLOAT64))
{
return disable(cx, "fpu not supported");
return disable(f, "fpu not supported");
}
int shift = js::TypedArray::slotWidth(obj);
@ -2878,8 +2882,8 @@ SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32_t key)
if (!execPool)
return error(cx);
if (!buffer.verifyRange(cx->fp()->jit()))
return disable(cx, "code memory is out of range");
if (!buffer.verifyRange(f.chunk()))
return disable(f, "code memory is out of range");
// Note that the out-of-bounds path simply does nothing.
buffer.link(shapeGuard, slowPathStart);
@ -2890,7 +2894,7 @@ SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32_t key)
CodeLocationLabel cs = buffer.finalizeCodeAddendum();
JaegerSpew(JSpew_PICs, "generated setelem typed array stub at %p\n", cs.executableAddress());
Repatcher repatcher(cx->fp()->jit());
Repatcher repatcher(f.chunk());
repatcher.relink(fastPathStart.jumpAtOffset(inlineShapeGuard), cs);
inlineShapeGuardPatched = true;
@ -2899,9 +2903,9 @@ SetElementIC::attachTypedArray(VMFrame &f, JSObject *obj, int32_t key)
// In the future, it might make sense to attach multiple typed array stubs.
// For simplicitly, they are currently monomorphic.
if (stubsGenerated == MAX_GETELEM_IC_STUBS)
disable(cx, "max stubs reached");
disable(f, "max stubs reached");
disable(cx, "generated typed array stub");
disable(f, "generated typed array stub");
return Lookup_Cacheable;
}
@ -2911,9 +2915,9 @@ LookupStatus
SetElementIC::update(VMFrame &f, const Value &objval, const Value &idval)
{
if (!objval.isObject())
return disable(f.cx, "primitive lval");
return disable(f, "primitive lval");
if (!idval.isInt32())
return disable(f.cx, "non-int32_t key");
return disable(f, "non-int32 key");
JSObject *obj = &objval.toObject();
int32_t key = idval.toInt32();
@ -2927,7 +2931,7 @@ SetElementIC::update(VMFrame &f, const Value &objval, const Value &idval)
return attachTypedArray(f, obj, key);
#endif
return disable(f.cx, "unsupported object type");
return disable(f, "unsupported object type");
}
bool

Просмотреть файл

@ -111,8 +111,8 @@ struct BaseIC : public MacroAssemblerTypedefs {
}
bool shouldUpdate(JSContext *cx);
void spew(JSContext *cx, const char *event, const char *reason);
LookupStatus disable(JSContext *cx, const char *reason, void *stub);
void updatePCCounters(JSContext *cx, Assembler &masm);
LookupStatus disable(VMFrame &f, const char *reason, void *stub);
void updatePCCounters(VMFrame &f, Assembler &masm);
bool isCallOp();
};
@ -303,7 +303,7 @@ struct GetElementIC : public BasePolyIC {
Value *vp);
LookupStatus attachArguments(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *vp);
LookupStatus attachTypedArray(VMFrame &f, JSObject *obj, const Value &v, jsid id, Value *vp);
LookupStatus disable(JSContext *cx, const char *reason);
LookupStatus disable(VMFrame &f, const char *reason);
LookupStatus error(JSContext *cx);
bool shouldUpdate(JSContext *cx);
};
@ -369,7 +369,7 @@ struct SetElementIC : public BaseIC {
LookupStatus attachTypedArray(VMFrame &f, JSObject *obj, int32_t key);
LookupStatus attachHoleStub(VMFrame &f, JSObject *obj, int32_t key);
LookupStatus update(VMFrame &f, const Value &objval, const Value &idval);
LookupStatus disable(JSContext *cx, const char *reason);
LookupStatus disable(VMFrame &f, const char *reason);
LookupStatus error(JSContext *cx);
bool shouldUpdate(JSContext *cx);
};
@ -426,9 +426,9 @@ struct PICInfo : public BasePolyIC {
// Return a JITCode block corresponding to the code memory to attach a
// new stub to.
JITCode lastCodeBlock(JITScript *jit) {
JITCode lastCodeBlock(JITChunk *chunk) {
if (!stubsGenerated)
return JITCode(jit->code.m_code.executableAddress(), jit->code.m_size);
return JITCode(chunk->code.m_code.executableAddress(), chunk->code.m_size);
return lastStubStart;
}

Просмотреть файл

@ -52,24 +52,12 @@
#include "jscntxtinlines.h"
#include "jsinterpinlines.h"
#include "MethodJIT-inl.h"
using namespace js;
using namespace js::mjit;
namespace js {
namespace mjit {
static inline JSRejoinState ScriptedRejoin(uint32_t pcOffset)
{
return REJOIN_SCRIPTED | (pcOffset << 1);
}
static inline JSRejoinState StubRejoin(RejoinState rejoin)
{
return rejoin << 1;
}
static inline void
SetRejoinState(StackFrame *fp, const CallSite &site, void **location)
{
@ -97,12 +85,12 @@ CallsiteMatches(uint8_t *codeStart, const CallSite &site, void *location)
}
void
Recompiler::patchCall(JITScript *jit, StackFrame *fp, void **location)
Recompiler::patchCall(JITChunk *chunk, StackFrame *fp, void **location)
{
uint8_t* codeStart = (uint8_t *)jit->code.m_code.executableAddress();
uint8_t* codeStart = (uint8_t *)chunk->code.m_code.executableAddress();
CallSite *callSites_ = jit->callSites();
for (uint32_t i = 0; i < jit->nCallSites; i++) {
CallSite *callSites_ = chunk->callSites();
for (uint32_t i = 0; i < chunk->nCallSites; i++) {
if (CallsiteMatches(codeStart, callSites_[i], *location)) {
JS_ASSERT(callSites_[i].inlineIndex == analyze::CrossScriptSSA::OUTER_FRAME);
SetRejoinState(fp, callSites_[i], location);
@ -114,7 +102,7 @@ Recompiler::patchCall(JITScript *jit, StackFrame *fp, void **location)
}
void
Recompiler::patchNative(JSCompartment *compartment, JITScript *jit, StackFrame *fp,
Recompiler::patchNative(JSCompartment *compartment, JITChunk *chunk, StackFrame *fp,
jsbytecode *pc, RejoinState rejoin)
{
/*
@ -122,10 +110,10 @@ Recompiler::patchNative(JSCompartment *compartment, JITScript *jit, StackFrame *
* The recompilation could have been triggered either by the native call
* itself, or by a SplatApplyArgs preparing for the native call. Either
* way, we don't want to patch up the call, but will instead steal the pool
* for the IC so it doesn't get freed with the JITScript, and patch up the
* for the IC so it doesn't get freed with the JITChunk, and patch up the
* jump at the end to go to the interpoline.
*
* When doing this, we do not reset the the IC itself; the JITScript must
* When doing this, we do not reset the the IC itself; the JITChunk must
* be dead and about to be released due to the recompilation (or a GC).
*/
fp->setRejoin(StubRejoin(rejoin));
@ -139,8 +127,8 @@ Recompiler::patchNative(JSCompartment *compartment, JITScript *jit, StackFrame *
* Find and patch all native call stubs attached to the given PC. There may
* be multiple ones for getter stubs attached to e.g. a GETELEM.
*/
for (unsigned i = 0; i < jit->nativeCallStubs.length(); i++) {
NativeCallStub &stub = jit->nativeCallStubs[i];
for (unsigned i = 0; i < chunk->nativeCallStubs.length(); i++) {
NativeCallStub &stub = chunk->nativeCallStubs[i];
if (stub.pc != pc)
continue;
@ -194,7 +182,7 @@ Recompiler::patchFrame(JSCompartment *compartment, VMFrame *f, JSScript *script)
rejoin == REJOIN_NATIVE_GETTER) {
/* Native call. */
if (fp->script() == script) {
patchNative(compartment, fp->jit(), fp, f->regs.pc, rejoin);
patchNative(compartment, fp->jit()->chunk(f->regs.pc), fp, f->regs.pc, rejoin);
f->stubRejoin = REJOIN_NATIVE_PATCHED;
}
} else if (rejoin == REJOIN_NATIVE_PATCHED) {
@ -206,10 +194,17 @@ Recompiler::patchFrame(JSCompartment *compartment, VMFrame *f, JSScript *script)
*addr = JS_FUNC_TO_DATA_PTR(void *, JaegerInterpoline);
f->stubRejoin = 0;
}
} else if (script->jitCtor && script->jitCtor->isValidCode(*addr)) {
patchCall(script->jitCtor, fp, addr);
} else if (script->jitNormal && script->jitNormal->isValidCode(*addr)) {
patchCall(script->jitNormal, fp, addr);
} else {
if (script->jitCtor) {
JITChunk *chunk = script->jitCtor->findCodeChunk(*addr);
if (chunk)
patchCall(chunk, fp, addr);
}
if (script->jitNormal) {
JITChunk *chunk = script->jitNormal->findCodeChunk(*addr);
if (chunk)
patchCall(chunk, fp, addr);
}
}
}
@ -268,15 +263,18 @@ Recompiler::expandInlineFrames(JSCompartment *compartment,
*/
compartment->types.frameExpansions++;
jsbytecode *pc = next ? next->prevpc(NULL) : f->regs.pc;
JITChunk *chunk = fp->jit()->chunk(pc);
/*
* Patch the VMFrame's return address if it is returning at the given inline site.
* Note there is no worry about handling a native or CompileFunction call here,
* as such IC stubs are not generated within inline frames.
*/
void **frameAddr = f->returnAddressLocation();
uint8_t* codeStart = (uint8_t *)fp->jit()->code.m_code.executableAddress();
uint8_t* codeStart = (uint8_t *)chunk->code.m_code.executableAddress();
InlineFrame *inner = &fp->jit()->inlineFrames()[inlined->inlineIndex];
InlineFrame *inner = &chunk->inlineFrames()[inlined->inlineIndex];
jsbytecode *innerpc = inner->fun->script()->code + inlined->pcOffset;
StackFrame *innerfp = expandInlineFrameChain(fp, inner);
@ -383,11 +381,6 @@ ClearAllFrames(JSCompartment *compartment)
}
}
Recompiler::Recompiler(JSContext *cx, JSScript *script)
: cx(cx), script(script)
{
}
/*
* Recompilation can be triggered either by the debugger (turning debug mode on for
* a script or setting/clearing a trap), or by dynamic changes in type information
@ -408,7 +401,7 @@ Recompiler::Recompiler(JSContext *cx, JSScript *script)
* redirect that entryncode to the interpoline.
*/
void
Recompiler::recompile(bool resetUses)
Recompiler::clearStackReferences(JSContext *cx, JSScript *script)
{
JS_ASSERT(script->hasJITCode());
@ -449,8 +442,8 @@ Recompiler::recompile(bool resetUses)
void **addr = next->addressOfNativeReturnAddress();
if (JITCodeReturnAddress(*addr)) {
JS_ASSERT(fp->jit()->isValidCode(*addr));
patchCall(fp->jit(), fp, addr);
JITChunk *chunk = fp->jit()->findCodeChunk(*addr);
patchCall(chunk, fp, addr);
}
}
@ -460,44 +453,9 @@ Recompiler::recompile(bool resetUses)
patchFrame(cx->compartment, f, script);
}
if (script->jitNormal) {
cleanup(script->jitNormal);
ReleaseScriptCode(cx, script, false);
}
if (script->jitCtor) {
cleanup(script->jitCtor);
ReleaseScriptCode(cx, script, true);
}
if (resetUses) {
/*
* Wait for the script to get warm again before doing another compile,
* unless we are recompiling *because* the script got hot.
*/
script->resetUseCount();
}
cx->compartment->types.recompilations++;
}
void
Recompiler::cleanup(JITScript *jit)
{
while (!JS_CLIST_IS_EMPTY(&jit->callers)) {
JaegerSpew(JSpew_Recompile, "Purging IC caller\n");
JS_STATIC_ASSERT(offsetof(ic::CallICInfo, links) == 0);
ic::CallICInfo *ic = (ic::CallICInfo *) jit->callers.next;
uint8_t *start = (uint8_t *)ic->funGuard.executableAddress();
JSC::RepatchBuffer repatch(JSC::JITCode(start - 32, 64));
repatch.repatch(ic->funGuard, NULL);
repatch.relink(ic->funJump, ic->slowPathStart);
ic->purgeGuardedObject();
}
}
} /* namespace mjit */
} /* namespace js */

Просмотреть файл

@ -64,9 +64,9 @@ namespace mjit {
*/
class Recompiler {
public:
Recompiler(JSContext *cx, JSScript *script);
void recompile(bool resetUses = true);
static void
clearStackReferences(JSContext *cx, JSScript *script);
static void
expandInlineFrames(JSCompartment *compartment, StackFrame *fp, mjit::CallSite *inlined,
@ -75,18 +75,13 @@ public:
static void patchFrame(JSCompartment *compartment, VMFrame *f, JSScript *script);
private:
JSContext *cx;
JSScript *script;
static void patchCall(JITScript *jit, StackFrame *fp, void **location);
static void patchNative(JSCompartment *compartment, JITScript *jit, StackFrame *fp,
static void patchCall(JITChunk *chunk, StackFrame *fp, void **location);
static void patchNative(JSCompartment *compartment, JITChunk *chunk, StackFrame *fp,
jsbytecode *pc, RejoinState rejoin);
static StackFrame *
expandInlineFrameChain(StackFrame *outer, InlineFrame *inner);
/* Detach jit from any IC callers. */
static void cleanup(JITScript *jit);
};
} /* namespace mjit */

Просмотреть файл

@ -894,8 +894,23 @@ void JS_FASTCALL
stubs::RecompileForInline(VMFrame &f)
{
ExpandInlineFrames(f.cx->compartment);
Recompiler recompiler(f.cx, f.script());
recompiler.recompile(/* resetUses */ false);
Recompiler::clearStackReferences(f.cx, f.script());
bool releaseChunk = true;
if (f.jit()->nchunks > 1) {
StackFrame *fp = f.fp();
for (FrameRegsIter i(f.cx); !i.done(); ++i) {
StackFrame *xfp = i.fp();
if (xfp->script() == fp->script() && xfp != fp) {
mjit::ReleaseScriptCode(f.cx, fp->script());
releaseChunk = false;
break;
}
}
}
if (releaseChunk)
f.jit()->destroyChunk(f.cx, f.chunkIndex(), /* resetUses = */ false);
}
void JS_FASTCALL
@ -1477,21 +1492,33 @@ stubs::LeaveBlock(VMFrame &f)
fp->setBlockChain(blockObj.enclosingBlock());
}
inline void *
FindNativeCode(VMFrame &f, jsbytecode *target)
{
void* native = f.fp()->script()->nativeCodeForPC(f.fp()->isConstructing(), target);
if (native)
return native;
CompileStatus status = CanMethodJIT(f.cx, f.script(), target, f.fp()->isConstructing(),
CompileRequest_Interpreter);
if (status == Compile_Error)
THROWV(NULL);
mjit::ClearAllFrames(f.cx->compartment);
return target;
}
void * JS_FASTCALL
stubs::LookupSwitch(VMFrame &f, jsbytecode *pc)
{
jsbytecode *jpc = pc;
JSScript *script = f.fp()->script();
bool ctor = f.fp()->isConstructing();
/* This is correct because the compiler adjusts the stack beforehand. */
Value lval = f.regs.sp[-1];
if (!lval.isPrimitive()) {
void* native = script->nativeCodeForPC(ctor, pc + GET_JUMP_OFFSET(pc));
JS_ASSERT(native);
return native;
}
if (!lval.isPrimitive())
return FindNativeCode(f, pc + GET_JUMP_OFFSET(pc));
JS_ASSERT(pc[0] == JSOP_LOOKUPSWITCH);
@ -1510,12 +1537,8 @@ stubs::LookupSwitch(VMFrame &f, jsbytecode *pc)
pc += INDEX_LEN;
if (rval.isString()) {
JSLinearString *rhs = &rval.toString()->asLinear();
if (rhs == str || EqualStrings(str, rhs)) {
void* native = script->nativeCodeForPC(ctor,
jpc + GET_JUMP_OFFSET(pc));
JS_ASSERT(native);
return native;
}
if (rhs == str || EqualStrings(str, rhs))
return FindNativeCode(f, jpc + GET_JUMP_OFFSET(pc));
}
pc += JUMP_OFFSET_LEN;
}
@ -1524,31 +1547,21 @@ stubs::LookupSwitch(VMFrame &f, jsbytecode *pc)
for (uint32_t i = 1; i <= npairs; i++) {
Value rval = script->getConst(GET_INDEX(pc));
pc += INDEX_LEN;
if (rval.isNumber() && d == rval.toNumber()) {
void* native = script->nativeCodeForPC(ctor,
jpc + GET_JUMP_OFFSET(pc));
JS_ASSERT(native);
return native;
}
if (rval.isNumber() && d == rval.toNumber())
return FindNativeCode(f, jpc + GET_JUMP_OFFSET(pc));
pc += JUMP_OFFSET_LEN;
}
} else {
for (uint32_t i = 1; i <= npairs; i++) {
Value rval = script->getConst(GET_INDEX(pc));
pc += INDEX_LEN;
if (lval == rval) {
void* native = script->nativeCodeForPC(ctor,
jpc + GET_JUMP_OFFSET(pc));
JS_ASSERT(native);
return native;
}
if (lval == rval)
return FindNativeCode(f, jpc + GET_JUMP_OFFSET(pc));
pc += JUMP_OFFSET_LEN;
}
}
void* native = script->nativeCodeForPC(ctor, jpc + GET_JUMP_OFFSET(jpc));
JS_ASSERT(native);
return native;
return FindNativeCode(f, jpc + GET_JUMP_OFFSET(jpc));
}
void * JS_FASTCALL
@ -1596,11 +1609,7 @@ stubs::TableSwitch(VMFrame &f, jsbytecode *origPc)
finally:
/* Provide the native address. */
JSScript* script = f.fp()->script();
void* native = script->nativeCodeForPC(f.fp()->isConstructing(),
originalPC + jumpOffset);
JS_ASSERT(native);
return native;
return FindNativeCode(f, originalPC + jumpOffset);
}
void JS_FASTCALL
@ -1892,8 +1901,8 @@ stubs::InvariantFailure(VMFrame &f, void *rval)
ExpandInlineFrames(f.cx->compartment);
Recompiler recompiler(f.cx, script);
recompiler.recompile();
mjit::Recompiler::clearStackReferences(f.cx, script);
mjit::ReleaseScriptCode(f.cx, script);
/* Return the same value (if any) as the call triggering the invariant failure. */
return rval;

Просмотреть файл

@ -223,6 +223,8 @@ void JS_FASTCALL ArrayShift(VMFrame &f);
void JS_FASTCALL WriteBarrier(VMFrame &f, Value *addr);
void JS_FASTCALL GCThingWriteBarrier(VMFrame &f, Value *addr);
void JS_FASTCALL CrossChunkShim(VMFrame &f, void *edge);
} /* namespace stubs */
/*

Просмотреть файл

@ -3855,6 +3855,26 @@ MJitDataStats(JSContext *cx, uintN argc, jsval *vp)
return true;
}
JSBool
MJitChunkLimit(JSContext *cx, uintN argc, jsval *vp)
{
if (argc > 1) {
JS_ReportError(cx, "Wrong number of arguments");
return JS_FALSE;
}
jsdouble t;
if (!JS_ValueToNumber(cx, JS_ARGV(cx, vp)[0], &t))
return JS_FALSE;
#ifdef JS_METHODJIT
mjit::SetChunkLimit((uint32_t) t);
#endif
vp->setUndefined();
return true;
}
JSBool
StringStats(JSContext *cx, uintN argc, jsval *vp)
{
@ -4020,6 +4040,7 @@ static JSFunctionSpec shell_functions[] = {
#ifdef JS_METHODJIT
JS_FN("mjitcodestats", MJitCodeStats, 0,0),
JS_FN("mjitdatastats", MJitDataStats, 0,0),
JS_FN("mjitChunkLimit", MJitChunkLimit, 1,0),
#endif
JS_FN("stringstats", StringStats, 0,0),
JS_FN("newGlobal", NewGlobal, 1,0),
@ -4166,6 +4187,7 @@ static const char *const shell_help_messages[] = {
#ifdef JS_METHODJIT
"mjitcodestats() Return stats on mjit code memory usage.",
"mjitdatastats() Return stats on mjit data memory usage.",
"mjitChunkLimit(N) Specify limit on compiled chunk size during mjit compilation.",
#endif
"stringstats() Return stats on string memory usage.",
"newGlobal(kind) Return a new global object, in the current\n"

Просмотреть файл

@ -187,8 +187,8 @@ BreakpointSite::recompile(JSContext *cx, bool forTrap)
if (!ac.ref().enter())
return false;
}
mjit::Recompiler recompiler(cx, script);
recompiler.recompile();
mjit::Recompiler::clearStackReferences(cx, script);
mjit::ReleaseScriptCode(cx, script);
}
#endif
return true;

Просмотреть файл

@ -657,8 +657,9 @@ ContextStack::currentScript(jsbytecode **ppc) const
#ifdef JS_METHODJIT
mjit::CallSite *inlined = regs->inlined();
if (inlined) {
JS_ASSERT(inlined->inlineIndex < fp->jit()->nInlineFrames);
mjit::InlineFrame *frame = &fp->jit()->inlineFrames()[inlined->inlineIndex];
mjit::JITChunk *chunk = fp->jit()->chunk(regs->pc);
JS_ASSERT(inlined->inlineIndex < chunk->nInlineFrames);
mjit::InlineFrame *frame = &chunk->inlineFrames()[inlined->inlineIndex];
JSScript *script = frame->fun->script();
if (script->compartment() != cx_->compartment)
return NULL;

Просмотреть файл

@ -569,7 +569,8 @@ ContextStack::ensureOnTop(JSContext *cx, MaybeReportError report, uintN nvars,
if (FrameRegs *regs = cx->maybeRegs()) {
JSFunction *fun = NULL;
if (JSInlinedSite *site = regs->inlined()) {
fun = regs->fp()->jit()->inlineFrames()[site->inlineIndex].fun;
mjit::JITChunk *chunk = regs->fp()->jit()->chunk(regs->pc);
fun = chunk->inlineFrames()[site->inlineIndex].fun;
} else {
StackFrame *fp = regs->fp();
if (fp->isFunctionFrame()) {