From 2a5db2f8148c3ecea92ee65624e51192bbf81dc7 Mon Sep 17 00:00:00 2001 From: Brian Hackett Date: Wed, 12 Aug 2015 18:20:41 -0600 Subject: [PATCH] Bug 1179242 - Avoid unnecessary moves to dead stack slots, tweak regalloc heuristics for bundles required to be in a specific register, r=sunfish. --- js/src/jit/BacktrackingAllocator.cpp | 72 +++++++++++++++++++++++++--- js/src/jit/BacktrackingAllocator.h | 19 ++++++++ js/src/jit/InlineList.h | 12 +++++ 3 files changed, 96 insertions(+), 7 deletions(-) diff --git a/js/src/jit/BacktrackingAllocator.cpp b/js/src/jit/BacktrackingAllocator.cpp index 3781625f6c30..fe2276478094 100644 --- a/js/src/jit/BacktrackingAllocator.cpp +++ b/js/src/jit/BacktrackingAllocator.cpp @@ -546,6 +546,7 @@ BacktrackingAllocator::buildLivenessInfo() LAllocation* use = phi->getOperand(mblock->positionInPhiSuccessor()); uint32_t reg = use->toUse()->virtualRegister(); live.insert(reg); + vreg(use).setUsedByPhi(); } } @@ -1670,6 +1671,40 @@ BacktrackingAllocator::insertAllRanges(LiveRangeSet& set, LiveBundle* bundle) return true; } +bool +BacktrackingAllocator::deadRange(LiveRange* range) +{ + // Check for direct uses of this range. + if (range->hasUses() || range->hasDefinition()) + return false; + + CodePosition start = range->from(); + LNode* ins = insData[start]; + if (start == entryOf(ins->block())) + return false; + + VirtualRegister& reg = vregs[range->vreg()]; + + // Check if there are later ranges for this vreg. + LiveRange::RegisterLinkIterator iter = reg.rangesBegin(range); + for (iter++; iter; iter++) { + LiveRange* laterRange = LiveRange::get(*iter); + if (laterRange->from() > range->from()) + return false; + } + + // Check if this range ends at a loop backedge. + LNode* last = insData[range->to().previous()]; + if (last->isGoto() && last->toGoto()->target()->id() < last->block()->mir()->id()) + return false; + + // Check if there are phis which this vreg flows to. + if (reg.usedByPhi()) + return false; + + return true; +} + bool BacktrackingAllocator::resolveControlFlow() { @@ -1685,20 +1720,30 @@ BacktrackingAllocator::resolveControlFlow() if (mir->shouldCancel("Backtracking Resolve Control Flow (vreg loop)")) return false; - for (LiveRange::RegisterLinkIterator iter = reg.rangesBegin(); iter; iter++) { + for (LiveRange::RegisterLinkIterator iter = reg.rangesBegin(); iter; ) { LiveRange* range = LiveRange::get(*iter); + // Remove ranges which will never be used. + if (deadRange(range)) { + reg.removeRangeAndIncrement(iter); + continue; + } + // The range which defines the register does not have a predecessor // to add moves from. - if (range->hasDefinition()) + if (range->hasDefinition()) { + iter++; continue; + } // Ignore ranges that start at block boundaries. We will handle // these in the next phase. CodePosition start = range->from(); LNode* ins = insData[start]; - if (start == entryOf(ins->block())) + if (start == entryOf(ins->block())) { + iter++; continue; + } // If we already saw a range which covers the start of this range // and has the same allocation, we don't need an explicit move at @@ -1716,8 +1761,10 @@ BacktrackingAllocator::resolveControlFlow() break; } } - if (skip) + if (skip) { + iter++; continue; + } LiveRange* predecessorRange = reg.rangeFor(start.previous(), /* preferRegister = */ true); if (start.subpos() == CodePosition::INPUT) { @@ -1727,6 +1774,8 @@ BacktrackingAllocator::resolveControlFlow() if (!moveAfter(ins->toInstruction(), predecessorRange, range, reg.type())) return false; } + + iter++; } } @@ -2416,16 +2465,19 @@ BacktrackingAllocator::computeSpillWeight(LiveBundle* bundle) return fixed ? 2000000 : 1000000; size_t usesTotal = 0; + fixed = false; for (LiveRange::BundleLinkIterator iter = bundle->rangesBegin(); iter; iter++) { LiveRange* range = LiveRange::get(*iter); if (range->hasDefinition()) { VirtualRegister& reg = vregs[range->vreg()]; - if (reg.def()->policy() == LDefinition::FIXED && reg.def()->output()->isRegister()) + if (reg.def()->policy() == LDefinition::FIXED && reg.def()->output()->isRegister()) { usesTotal += 2000; - else if (!reg.ins()->isPhi()) + fixed = true; + } else if (!reg.ins()->isPhi()) { usesTotal += 2000; + } } for (UsePositionIterator iter = range->usesBegin(); iter; iter++) { @@ -2436,8 +2488,9 @@ BacktrackingAllocator::computeSpillWeight(LiveBundle* bundle) usesTotal += 1000; break; - case LUse::REGISTER: case LUse::FIXED: + fixed = true; + case LUse::REGISTER: usesTotal += 2000; break; @@ -2451,6 +2504,11 @@ BacktrackingAllocator::computeSpillWeight(LiveBundle* bundle) } } + // Bundles with fixed uses are given a higher spill weight, since they must + // be allocated to a specific register. + if (testbed && fixed) + usesTotal *= 2; + // Compute spill weight as a use density, lowering the weight for long // lived bundles with relatively few uses. size_t lifetimeTotal = computePriority(bundle); diff --git a/js/src/jit/BacktrackingAllocator.h b/js/src/jit/BacktrackingAllocator.h index f3feb8defeec..20df6179c7ba 100644 --- a/js/src/jit/BacktrackingAllocator.h +++ b/js/src/jit/BacktrackingAllocator.h @@ -463,6 +463,10 @@ class VirtualRegister // Whether def_ is a temp or an output. bool isTemp_; + // Whether this vreg is an input for some phi. This use is not reflected in + // any range on the vreg. + bool usedByPhi_; + // If this register's definition is MUST_REUSE_INPUT, whether a copy must // be introduced before the definition that relaxes the policy. bool mustCopyInput_; @@ -505,6 +509,13 @@ class VirtualRegister return isTemp_; } + void setUsedByPhi() { + usedByPhi_ = true; + } + bool usedByPhi() { + return usedByPhi_; + } + void setMustCopyInput() { mustCopyInput_ = true; } @@ -515,6 +526,9 @@ class VirtualRegister LiveRange::RegisterLinkIterator rangesBegin() const { return ranges_.begin(); } + LiveRange::RegisterLinkIterator rangesBegin(LiveRange* range) const { + return ranges_.begin(&range->registerLink); + } bool hasRanges() const { return !!rangesBegin(); } @@ -528,6 +542,10 @@ class VirtualRegister void removeRange(LiveRange* range); void addRange(LiveRange* range); + void removeRangeAndIncrement(LiveRange::RegisterLinkIterator& iter) { + ranges_.removeAndIncrement(iter); + } + LiveBundle* firstBundle() const { return firstRange()->bundle(); } @@ -665,6 +683,7 @@ class BacktrackingAllocator : protected RegisterAllocator bool reifyAllocations(); bool populateSafepoints(); bool annotateMoveGroups(); + bool deadRange(LiveRange* range); size_t findFirstNonCallSafepoint(CodePosition from); size_t findFirstSafepoint(CodePosition pos, size_t startFrom); void addLiveRegistersForRange(VirtualRegister& reg, LiveRange* range); diff --git a/js/src/jit/InlineList.h b/js/src/jit/InlineList.h index d78e4491ddad..73537182e321 100644 --- a/js/src/jit/InlineList.h +++ b/js/src/jit/InlineList.h @@ -64,6 +64,9 @@ class InlineForwardList : protected InlineForwardListNode iterator begin() const { return iterator(this); } + iterator begin(Node* item) const { + return iterator(this, item); + } iterator end() const { return iterator(nullptr); } @@ -166,6 +169,15 @@ private: #endif { } + InlineForwardListIterator(const InlineForwardList* owner, Node* node) + : prev(nullptr), + iter(node) +#ifdef DEBUG + , owner_(owner), + modifyCount_(owner ? owner->modifyCount_ : 0) +#endif + { } + public: InlineForwardListIterator & operator ++() { MOZ_ASSERT(modifyCount_ == owner_->modifyCount_);