зеркало из https://github.com/mozilla/gecko-dev.git
Bug 1124377 - Try to provide scratch registers for memory->memory MoveGroup moves, r=sunfish.
This commit is contained in:
Родитель
17bd657462
Коммит
e016373ce7
|
@ -128,6 +128,9 @@ BacktrackingAllocator::go()
|
|||
if (!populateSafepoints())
|
||||
return false;
|
||||
|
||||
if (!annotateMoveGroups())
|
||||
return false;
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
|
@ -1184,7 +1187,7 @@ BacktrackingAllocator::resolveControlFlow()
|
|||
|
||||
CodePosition start = interval->start();
|
||||
LNode *ins = insData[start];
|
||||
if (interval->start() > entryOf(ins->block())) {
|
||||
if (start > entryOf(ins->block())) {
|
||||
MOZ_ASSERT(start == inputOf(ins) || start == outputOf(ins));
|
||||
|
||||
LiveInterval *prevInterval = reg->intervalFor(start.previous());
|
||||
|
@ -1454,6 +1457,63 @@ BacktrackingAllocator::populateSafepoints()
|
|||
return true;
|
||||
}
|
||||
|
||||
bool
|
||||
BacktrackingAllocator::annotateMoveGroups()
|
||||
{
|
||||
// Annotate move groups in the LIR graph with any register that is not
|
||||
// allocated at that point and can be used as a scratch register. This is
|
||||
// only required for x86, as other platforms always have scratch registers
|
||||
// available for use.
|
||||
#ifdef JS_CODEGEN_X86
|
||||
for (size_t i = 0; i < graph.numBlocks(); i++) {
|
||||
if (mir->shouldCancel("Backtracking Annotate Move Groups"))
|
||||
return false;
|
||||
|
||||
LBlock *block = graph.getBlock(i);
|
||||
LInstruction *last = nullptr;
|
||||
for (LInstructionIterator iter = block->begin(); iter != block->end(); ++iter) {
|
||||
if (iter->isMoveGroup()) {
|
||||
CodePosition from = last ? outputOf(last) : entryOf(block);
|
||||
LiveInterval::Range range(from, from.next());
|
||||
AllocatedRange search(nullptr, &range), existing;
|
||||
|
||||
for (size_t i = 0; i < AnyRegister::Total; i++) {
|
||||
PhysicalRegister ® = registers[i];
|
||||
if (reg.reg.isFloat() || !reg.allocatable)
|
||||
continue;
|
||||
|
||||
// This register is unavailable for use if (a) it is in use
|
||||
// by some live interval immediately before the move group,
|
||||
// or (b) it is an operand in one of the group's moves. The
|
||||
// latter case handles live intervals which end immediately
|
||||
// before the move group or start immediately after.
|
||||
|
||||
bool found = false;
|
||||
LGeneralReg alloc(reg.reg.gpr());
|
||||
for (size_t j = 0; j < iter->toMoveGroup()->numMoves(); j++) {
|
||||
LMove move = iter->toMoveGroup()->getMove(j);
|
||||
if (*move.from() == alloc || *move.to() == alloc) {
|
||||
found = true;
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
if (found || reg.allocations.contains(search, &existing))
|
||||
continue;
|
||||
|
||||
iter->toMoveGroup()->setScratchRegister(reg.reg.gpr());
|
||||
break;
|
||||
}
|
||||
} else {
|
||||
last = *iter;
|
||||
}
|
||||
}
|
||||
}
|
||||
#endif
|
||||
|
||||
return true;
|
||||
}
|
||||
|
||||
void
|
||||
BacktrackingAllocator::dumpRegisterGroups()
|
||||
{
|
||||
|
|
|
@ -241,6 +241,7 @@ class BacktrackingAllocator
|
|||
bool resolveControlFlow();
|
||||
bool reifyAllocations();
|
||||
bool populateSafepoints();
|
||||
bool annotateMoveGroups();
|
||||
|
||||
void dumpRegisterGroups();
|
||||
void dumpFixedRanges();
|
||||
|
|
|
@ -2133,6 +2133,8 @@ CodeGenerator::visitMoveGroup(LMoveGroup *group)
|
|||
masm.propagateOOM(resolver.resolve());
|
||||
|
||||
MoveEmitter emitter(masm);
|
||||
if (group->maybeScratchRegister().isGeneralReg())
|
||||
emitter.setScratchRegister(group->maybeScratchRegister().toGeneralReg()->reg());
|
||||
emitter.emit(resolver);
|
||||
emitter.finish();
|
||||
}
|
||||
|
|
|
@ -108,6 +108,11 @@ class LMoveGroup : public LInstructionHelper<0, 0, 0>
|
|||
{
|
||||
js::Vector<LMove, 2, JitAllocPolicy> moves_;
|
||||
|
||||
#ifdef JS_CODEGEN_X86
|
||||
// Optional general register available for use when executing moves.
|
||||
LAllocation scratchRegister_;
|
||||
#endif
|
||||
|
||||
explicit LMoveGroup(TempAllocator &alloc)
|
||||
: moves_(alloc)
|
||||
{ }
|
||||
|
@ -133,6 +138,19 @@ class LMoveGroup : public LInstructionHelper<0, 0, 0>
|
|||
const LMove &getMove(size_t i) const {
|
||||
return moves_[i];
|
||||
}
|
||||
|
||||
#ifdef JS_CODEGEN_X86
|
||||
void setScratchRegister(Register reg) {
|
||||
scratchRegister_ = LGeneralReg(reg);
|
||||
}
|
||||
#endif
|
||||
LAllocation maybeScratchRegister() {
|
||||
#ifdef JS_CODEGEN_X86
|
||||
return scratchRegister_;
|
||||
#else
|
||||
return LAllocation();
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
|
||||
|
|
|
@ -405,7 +405,7 @@ class TypedRegisterSet
|
|||
#error "Bad architecture"
|
||||
#endif
|
||||
}
|
||||
// Determemine if some register are still allocated. This function should
|
||||
// Determine if some register are still allocated. This function should
|
||||
// be used with the set of allocatable registers used for the initialization
|
||||
// of the current set.
|
||||
bool someAllocated(const TypedRegisterSet &allocatable) const {
|
||||
|
|
|
@ -56,6 +56,8 @@ class MoveEmitterARM
|
|||
~MoveEmitterARM();
|
||||
void emit(const MoveResolver &moves);
|
||||
void finish();
|
||||
|
||||
void setScratchRegister(Register reg) {}
|
||||
};
|
||||
|
||||
typedef MoveEmitterARM MoveEmitter;
|
||||
|
|
|
@ -56,6 +56,8 @@ class MoveEmitterMIPS
|
|||
~MoveEmitterMIPS();
|
||||
void emit(const MoveResolver &moves);
|
||||
void finish();
|
||||
|
||||
void setScratchRegister(Register reg) {}
|
||||
};
|
||||
|
||||
typedef MoveEmitterMIPS MoveEmitter;
|
||||
|
|
|
@ -19,6 +19,7 @@ class MoveEmitterNone
|
|||
MoveEmitterNone(MacroAssemblerNone &) { MOZ_CRASH(); }
|
||||
void emit(const MoveResolver &) { MOZ_CRASH(); }
|
||||
void finish() { MOZ_CRASH(); }
|
||||
void setScratchRegister(Register) { MOZ_CRASH(); }
|
||||
};
|
||||
|
||||
typedef MoveEmitterNone MoveEmitter;
|
||||
|
|
|
@ -97,6 +97,12 @@ MoveEmitterX86::maybeEmitOptimizedCycle(const MoveResolver &moves, size_t i,
|
|||
void
|
||||
MoveEmitterX86::emit(const MoveResolver &moves)
|
||||
{
|
||||
#if defined(JS_CODEGEN_X86) && defined(DEBUG)
|
||||
// Clobber any scratch register we have, to make regalloc bugs more visible.
|
||||
if (hasScratchRegister())
|
||||
masm.mov(ImmWord(0xdeadbeef), scratchRegister());
|
||||
#endif
|
||||
|
||||
for (size_t i = 0; i < moves.numMoves(); i++) {
|
||||
const MoveOp &move = moves.getMove(i);
|
||||
const MoveOperand &from = move.from();
|
||||
|
@ -365,15 +371,15 @@ MoveEmitterX86::emitInt32Move(const MoveOperand &from, const MoveOperand &to)
|
|||
} else {
|
||||
// Memory to memory gpr move.
|
||||
MOZ_ASSERT(from.isMemory());
|
||||
#ifdef JS_CODEGEN_X64
|
||||
// x64 has a ScratchReg. Use it.
|
||||
masm.load32(toAddress(from), ScratchReg);
|
||||
masm.move32(ScratchReg, toOperand(to));
|
||||
#else
|
||||
// No ScratchReg; bounce it off the stack.
|
||||
if (hasScratchRegister()) {
|
||||
Register reg = scratchRegister();
|
||||
masm.load32(toAddress(from), reg);
|
||||
masm.move32(reg, toOperand(to));
|
||||
} else {
|
||||
// No scratch register available; bounce it off the stack.
|
||||
masm.Push(toOperand(from));
|
||||
masm.Pop(toPopOperand(to));
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
@ -390,30 +396,30 @@ MoveEmitterX86::emitGeneralMove(const MoveOperand &from, const MoveOperand &to)
|
|||
masm.lea(toOperand(from), to.reg());
|
||||
} else if (from.isMemory()) {
|
||||
// Memory to memory gpr move.
|
||||
#ifdef JS_CODEGEN_X64
|
||||
// x64 has a ScratchReg. Use it.
|
||||
masm.loadPtr(toAddress(from), ScratchReg);
|
||||
masm.mov(ScratchReg, toOperand(to));
|
||||
#else
|
||||
// No ScratchReg; bounce it off the stack.
|
||||
if (hasScratchRegister()) {
|
||||
Register reg = scratchRegister();
|
||||
masm.loadPtr(toAddress(from), reg);
|
||||
masm.mov(reg, toOperand(to));
|
||||
} else {
|
||||
// No scratch register available; bounce it off the stack.
|
||||
masm.Push(toOperand(from));
|
||||
masm.Pop(toPopOperand(to));
|
||||
#endif
|
||||
}
|
||||
} else {
|
||||
// Effective address to memory move.
|
||||
MOZ_ASSERT(from.isEffectiveAddress());
|
||||
#ifdef JS_CODEGEN_X64
|
||||
// x64 has a ScratchReg. Use it.
|
||||
masm.lea(toOperand(from), ScratchReg);
|
||||
masm.mov(ScratchReg, toOperand(to));
|
||||
#else
|
||||
// This is tricky without a ScratchReg. We can't do an lea. Bounce the
|
||||
if (hasScratchRegister()) {
|
||||
Register reg = scratchRegister();
|
||||
masm.lea(toOperand(from), reg);
|
||||
masm.mov(reg, toOperand(to));
|
||||
} else {
|
||||
// This is tricky without a scratch reg. We can't do an lea. Bounce the
|
||||
// base register off the stack, then add the offset in place. Note that
|
||||
// this clobbers FLAGS!
|
||||
masm.Push(from.base());
|
||||
masm.Pop(toPopOperand(to));
|
||||
masm.addPtr(Imm32(from.disp()), toOperand(to));
|
||||
#endif
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
|
|
|
@ -33,6 +33,11 @@ class MoveEmitterX86
|
|||
// codegen->framePushed_ at the time it is allocated. -1 if not allocated.
|
||||
int32_t pushedAtCycle_;
|
||||
|
||||
#ifdef JS_CODEGEN_X86
|
||||
// Optional scratch register for performing moves.
|
||||
mozilla::Maybe<Register> scratchRegister_;
|
||||
#endif
|
||||
|
||||
void assertDone();
|
||||
Address cycleSlot();
|
||||
Address toAddress(const MoveOperand &operand) const;
|
||||
|
@ -57,6 +62,29 @@ class MoveEmitterX86
|
|||
~MoveEmitterX86();
|
||||
void emit(const MoveResolver &moves);
|
||||
void finish();
|
||||
|
||||
void setScratchRegister(Register reg) {
|
||||
#ifdef JS_CODEGEN_X86
|
||||
scratchRegister_.emplace(reg);
|
||||
#endif
|
||||
}
|
||||
|
||||
bool hasScratchRegister() {
|
||||
#ifdef JS_CODEGEN_X86
|
||||
return scratchRegister_.isSome();
|
||||
#else
|
||||
return true;
|
||||
#endif
|
||||
}
|
||||
|
||||
Register scratchRegister() {
|
||||
MOZ_ASSERT(hasScratchRegister());
|
||||
#ifdef JS_CODEGEN_X86
|
||||
return scratchRegister_.value();
|
||||
#else
|
||||
return ScratchReg;
|
||||
#endif
|
||||
}
|
||||
};
|
||||
|
||||
typedef MoveEmitterX86 MoveEmitter;
|
||||
|
|
Загрузка…
Ссылка в новой задаче