diff --git a/js/src/jit/BacktrackingAllocator.cpp b/js/src/jit/BacktrackingAllocator.cpp index 2efb60b19610..5857ed19e83b 100644 --- a/js/src/jit/BacktrackingAllocator.cpp +++ b/js/src/jit/BacktrackingAllocator.cpp @@ -128,6 +128,9 @@ BacktrackingAllocator::go() if (!populateSafepoints()) return false; + if (!annotateMoveGroups()) + return false; + return true; } @@ -1184,7 +1187,7 @@ BacktrackingAllocator::resolveControlFlow() CodePosition start = interval->start(); LNode *ins = insData[start]; - if (interval->start() > entryOf(ins->block())) { + if (start > entryOf(ins->block())) { MOZ_ASSERT(start == inputOf(ins) || start == outputOf(ins)); LiveInterval *prevInterval = reg->intervalFor(start.previous()); @@ -1454,6 +1457,63 @@ BacktrackingAllocator::populateSafepoints() return true; } +bool +BacktrackingAllocator::annotateMoveGroups() +{ + // Annotate move groups in the LIR graph with any register that is not + // allocated at that point and can be used as a scratch register. This is + // only required for x86, as other platforms always have scratch registers + // available for use. +#ifdef JS_CODEGEN_X86 + for (size_t i = 0; i < graph.numBlocks(); i++) { + if (mir->shouldCancel("Backtracking Annotate Move Groups")) + return false; + + LBlock *block = graph.getBlock(i); + LInstruction *last = nullptr; + for (LInstructionIterator iter = block->begin(); iter != block->end(); ++iter) { + if (iter->isMoveGroup()) { + CodePosition from = last ? outputOf(last) : entryOf(block); + LiveInterval::Range range(from, from.next()); + AllocatedRange search(nullptr, &range), existing; + + for (size_t i = 0; i < AnyRegister::Total; i++) { + PhysicalRegister ® = registers[i]; + if (reg.reg.isFloat() || !reg.allocatable) + continue; + + // This register is unavailable for use if (a) it is in use + // by some live interval immediately before the move group, + // or (b) it is an operand in one of the group's moves. The + // latter case handles live intervals which end immediately + // before the move group or start immediately after. + + bool found = false; + LGeneralReg alloc(reg.reg.gpr()); + for (size_t j = 0; j < iter->toMoveGroup()->numMoves(); j++) { + LMove move = iter->toMoveGroup()->getMove(j); + if (*move.from() == alloc || *move.to() == alloc) { + found = true; + break; + } + } + + if (found || reg.allocations.contains(search, &existing)) + continue; + + iter->toMoveGroup()->setScratchRegister(reg.reg.gpr()); + break; + } + } else { + last = *iter; + } + } + } +#endif + + return true; +} + void BacktrackingAllocator::dumpRegisterGroups() { diff --git a/js/src/jit/BacktrackingAllocator.h b/js/src/jit/BacktrackingAllocator.h index 82e78e59ebff..01af3192c597 100644 --- a/js/src/jit/BacktrackingAllocator.h +++ b/js/src/jit/BacktrackingAllocator.h @@ -241,6 +241,7 @@ class BacktrackingAllocator bool resolveControlFlow(); bool reifyAllocations(); bool populateSafepoints(); + bool annotateMoveGroups(); void dumpRegisterGroups(); void dumpFixedRanges(); diff --git a/js/src/jit/CodeGenerator.cpp b/js/src/jit/CodeGenerator.cpp index e7534e0642ae..d13855dd290b 100644 --- a/js/src/jit/CodeGenerator.cpp +++ b/js/src/jit/CodeGenerator.cpp @@ -2133,6 +2133,8 @@ CodeGenerator::visitMoveGroup(LMoveGroup *group) masm.propagateOOM(resolver.resolve()); MoveEmitter emitter(masm); + if (group->maybeScratchRegister().isGeneralReg()) + emitter.setScratchRegister(group->maybeScratchRegister().toGeneralReg()->reg()); emitter.emit(resolver); emitter.finish(); } diff --git a/js/src/jit/LIR-Common.h b/js/src/jit/LIR-Common.h index dd4f402bd257..daa258cce160 100644 --- a/js/src/jit/LIR-Common.h +++ b/js/src/jit/LIR-Common.h @@ -108,6 +108,11 @@ class LMoveGroup : public LInstructionHelper<0, 0, 0> { js::Vector moves_; +#ifdef JS_CODEGEN_X86 + // Optional general register available for use when executing moves. + LAllocation scratchRegister_; +#endif + explicit LMoveGroup(TempAllocator &alloc) : moves_(alloc) { } @@ -133,6 +138,19 @@ class LMoveGroup : public LInstructionHelper<0, 0, 0> const LMove &getMove(size_t i) const { return moves_[i]; } + +#ifdef JS_CODEGEN_X86 + void setScratchRegister(Register reg) { + scratchRegister_ = LGeneralReg(reg); + } +#endif + LAllocation maybeScratchRegister() { +#ifdef JS_CODEGEN_X86 + return scratchRegister_; +#else + return LAllocation(); +#endif + } }; diff --git a/js/src/jit/RegisterSets.h b/js/src/jit/RegisterSets.h index c2b8bd8df2f4..faac3cb30259 100644 --- a/js/src/jit/RegisterSets.h +++ b/js/src/jit/RegisterSets.h @@ -405,7 +405,7 @@ class TypedRegisterSet #error "Bad architecture" #endif } - // Determemine if some register are still allocated. This function should + // Determine if some register are still allocated. This function should // be used with the set of allocatable registers used for the initialization // of the current set. bool someAllocated(const TypedRegisterSet &allocatable) const { diff --git a/js/src/jit/arm/MoveEmitter-arm.h b/js/src/jit/arm/MoveEmitter-arm.h index 11bb10640750..8f03ebdb2fda 100644 --- a/js/src/jit/arm/MoveEmitter-arm.h +++ b/js/src/jit/arm/MoveEmitter-arm.h @@ -56,6 +56,8 @@ class MoveEmitterARM ~MoveEmitterARM(); void emit(const MoveResolver &moves); void finish(); + + void setScratchRegister(Register reg) {} }; typedef MoveEmitterARM MoveEmitter; diff --git a/js/src/jit/mips/MoveEmitter-mips.h b/js/src/jit/mips/MoveEmitter-mips.h index f836e837735e..ca4b1b18bd78 100644 --- a/js/src/jit/mips/MoveEmitter-mips.h +++ b/js/src/jit/mips/MoveEmitter-mips.h @@ -56,6 +56,8 @@ class MoveEmitterMIPS ~MoveEmitterMIPS(); void emit(const MoveResolver &moves); void finish(); + + void setScratchRegister(Register reg) {} }; typedef MoveEmitterMIPS MoveEmitter; diff --git a/js/src/jit/none/MoveEmitter-none.h b/js/src/jit/none/MoveEmitter-none.h index 36f3cac6f5a1..f1c01e893a00 100644 --- a/js/src/jit/none/MoveEmitter-none.h +++ b/js/src/jit/none/MoveEmitter-none.h @@ -19,6 +19,7 @@ class MoveEmitterNone MoveEmitterNone(MacroAssemblerNone &) { MOZ_CRASH(); } void emit(const MoveResolver &) { MOZ_CRASH(); } void finish() { MOZ_CRASH(); } + void setScratchRegister(Register) { MOZ_CRASH(); } }; typedef MoveEmitterNone MoveEmitter; diff --git a/js/src/jit/shared/MoveEmitter-x86-shared.cpp b/js/src/jit/shared/MoveEmitter-x86-shared.cpp index 79d548ff4700..8711c7c8d83d 100644 --- a/js/src/jit/shared/MoveEmitter-x86-shared.cpp +++ b/js/src/jit/shared/MoveEmitter-x86-shared.cpp @@ -97,6 +97,12 @@ MoveEmitterX86::maybeEmitOptimizedCycle(const MoveResolver &moves, size_t i, void MoveEmitterX86::emit(const MoveResolver &moves) { +#if defined(JS_CODEGEN_X86) && defined(DEBUG) + // Clobber any scratch register we have, to make regalloc bugs more visible. + if (hasScratchRegister()) + masm.mov(ImmWord(0xdeadbeef), scratchRegister()); +#endif + for (size_t i = 0; i < moves.numMoves(); i++) { const MoveOp &move = moves.getMove(i); const MoveOperand &from = move.from(); @@ -365,15 +371,15 @@ MoveEmitterX86::emitInt32Move(const MoveOperand &from, const MoveOperand &to) } else { // Memory to memory gpr move. MOZ_ASSERT(from.isMemory()); -#ifdef JS_CODEGEN_X64 - // x64 has a ScratchReg. Use it. - masm.load32(toAddress(from), ScratchReg); - masm.move32(ScratchReg, toOperand(to)); -#else - // No ScratchReg; bounce it off the stack. - masm.Push(toOperand(from)); - masm.Pop(toPopOperand(to)); -#endif + if (hasScratchRegister()) { + Register reg = scratchRegister(); + masm.load32(toAddress(from), reg); + masm.move32(reg, toOperand(to)); + } else { + // No scratch register available; bounce it off the stack. + masm.Push(toOperand(from)); + masm.Pop(toPopOperand(to)); + } } } @@ -390,30 +396,30 @@ MoveEmitterX86::emitGeneralMove(const MoveOperand &from, const MoveOperand &to) masm.lea(toOperand(from), to.reg()); } else if (from.isMemory()) { // Memory to memory gpr move. -#ifdef JS_CODEGEN_X64 - // x64 has a ScratchReg. Use it. - masm.loadPtr(toAddress(from), ScratchReg); - masm.mov(ScratchReg, toOperand(to)); -#else - // No ScratchReg; bounce it off the stack. - masm.Push(toOperand(from)); - masm.Pop(toPopOperand(to)); -#endif + if (hasScratchRegister()) { + Register reg = scratchRegister(); + masm.loadPtr(toAddress(from), reg); + masm.mov(reg, toOperand(to)); + } else { + // No scratch register available; bounce it off the stack. + masm.Push(toOperand(from)); + masm.Pop(toPopOperand(to)); + } } else { // Effective address to memory move. MOZ_ASSERT(from.isEffectiveAddress()); -#ifdef JS_CODEGEN_X64 - // x64 has a ScratchReg. Use it. - masm.lea(toOperand(from), ScratchReg); - masm.mov(ScratchReg, toOperand(to)); -#else - // This is tricky without a ScratchReg. We can't do an lea. Bounce the - // base register off the stack, then add the offset in place. Note that - // this clobbers FLAGS! - masm.Push(from.base()); - masm.Pop(toPopOperand(to)); - masm.addPtr(Imm32(from.disp()), toOperand(to)); -#endif + if (hasScratchRegister()) { + Register reg = scratchRegister(); + masm.lea(toOperand(from), reg); + masm.mov(reg, toOperand(to)); + } else { + // This is tricky without a scratch reg. We can't do an lea. Bounce the + // base register off the stack, then add the offset in place. Note that + // this clobbers FLAGS! + masm.Push(from.base()); + masm.Pop(toPopOperand(to)); + masm.addPtr(Imm32(from.disp()), toOperand(to)); + } } } diff --git a/js/src/jit/shared/MoveEmitter-x86-shared.h b/js/src/jit/shared/MoveEmitter-x86-shared.h index 0f9a7e109ac2..16382641db59 100644 --- a/js/src/jit/shared/MoveEmitter-x86-shared.h +++ b/js/src/jit/shared/MoveEmitter-x86-shared.h @@ -33,6 +33,11 @@ class MoveEmitterX86 // codegen->framePushed_ at the time it is allocated. -1 if not allocated. int32_t pushedAtCycle_; +#ifdef JS_CODEGEN_X86 + // Optional scratch register for performing moves. + mozilla::Maybe scratchRegister_; +#endif + void assertDone(); Address cycleSlot(); Address toAddress(const MoveOperand &operand) const; @@ -57,6 +62,29 @@ class MoveEmitterX86 ~MoveEmitterX86(); void emit(const MoveResolver &moves); void finish(); + + void setScratchRegister(Register reg) { +#ifdef JS_CODEGEN_X86 + scratchRegister_.emplace(reg); +#endif + } + + bool hasScratchRegister() { +#ifdef JS_CODEGEN_X86 + return scratchRegister_.isSome(); +#else + return true; +#endif + } + + Register scratchRegister() { + MOZ_ASSERT(hasScratchRegister()); +#ifdef JS_CODEGEN_X86 + return scratchRegister_.value(); +#else + return ScratchReg; +#endif + } }; typedef MoveEmitterX86 MoveEmitter;