Index: lib/CodeGen/LiveIntervalAnalysis.cpp =================================================================== --- lib/CodeGen/LiveIntervalAnalysis.cpp +++ lib/CodeGen/LiveIntervalAnalysis.cpp @@ -1010,6 +1010,11 @@ /// 5. Value read at OldIdx, killed before NewIdx: /// Extend kill to NewIdx. /// + /// 6. Live def at OldIdx AND several other values between NewIdx and OldIdx + /// Merge the range that contains OldIdx with the preceding one and split + /// the range that contains NewIdx. + /// (Happens when reordering independent writes to a subregister) + /// void handleMoveDown(LiveRange &LR) { // First look for a kill at OldIdx. LiveRange::iterator I = LR.find(OldIdx.getBaseIndex()); @@ -1020,10 +1025,22 @@ // Handle a live-in value. if (!SlotIndex::isSameInstr(I->start, OldIdx)) { - bool isKill = SlotIndex::isSameInstr(OldIdx, I->end); // If the live-in value already extends to NewIdx, there is nothing to do. if (!SlotIndex::isEarlierInstr(I->end, NewIdx)) return; + // If value is used at OldIdx and is moved to another interval, this + // happens when moving over an unrelated subregister def. + // Check if we need to extend the destination interval and return + LiveRange::iterator Next = std::next(I); + if (Next != E && !SlotIndex::isSameInstr(OldIdx, Next->start) && + SlotIndex::isEarlierInstr(Next->start, NewIdx)) { + LiveInterval::iterator NewI = LR.advanceTo(I, NewIdx.getBaseIndex()); + if (NewI != E && SlotIndex::isEarlierInstr(NewI->start, NewIdx)) + return; + LiveInterval::iterator Prev = std::prev(NewI); + Prev->end = NewIdx.getRegSlot(); + return; + } // Aggressively remove all kill flags from the old kill point. // Kill flags shouldn't be used while live intervals exist, they will be // reinserted by VirtRegRewriter. @@ -1033,9 +1050,10 @@ MO->setIsKill(false); // Adjust I->end to reach NewIdx. This may temporarily make LR invalid by // overlapping ranges. Case 5 above. + bool IsKill = SlotIndex::isSameInstr(OldIdx, I->end); I->end = NewIdx.getRegSlot(I->end.isEarlyClobber()); // If this was a kill, there may also be a def. Otherwise we're done. - if (!isKill) + if (!IsKill) return; ++I; } @@ -1056,11 +1074,64 @@ // The remaining possibilities are now: // 2. Live def at OldIdx, killed at NewIdx: isSameInstr(I->end, NewIdx). // 3. Dead def at OldIdx: I->end = OldIdx.getDeadSlot(). + // 6. Live def at OldIdx moving accross several others values // In either case, it is possible that there is an existing def at NewIdx. - assert((I->end == OldIdx.getDeadSlot() || - SlotIndex::isSameInstr(I->end, NewIdx)) && - "Cannot move def below kill"); LiveRange::iterator NewI = LR.advanceTo(I, NewIdx.getRegSlot()); + bool IsDeadDef = SlotIndex::isSameInstr(I->start, I->end); + if (!IsDeadDef && SlotIndex::isEarlierInstr(I->end, NewIdx)) { + // OldIdx is not a dead def, and NewIdx is inside a new interval. + // Case 6 above. + const SlotIndex SplitPos = DefVNI->def; + if (I != LR.begin() && + !SlotIndex::isEarlierInstr(std::prev(I)->end, I->start)) { + // The live intervals look like (where I = [xi+1,xi+2) ) : + // [x0,x1)...[xi,xi+1)[xi+1,xi+2)... + // There is no gap between I and its predecessor, merge them + DefVNI = I->valno; + LiveRange::iterator Prev = std::prev(I); + Prev->end = I->end; + } else { + // The live intervals look like (where I = [xi+2,xi+3) ) : + // [x0,x1)...[xi,xi+1)[xi+2,xi+3)... + // The value is live in in I + LiveRange::iterator Next = std::next(I); + // We merge I and its successor. As we're dealing with subreg + // reordering, there is always a successor to I in the same BB + DefVNI = Next->valno; + Next->start = I->end; + Next->valno = I->valno; + Next->valno->def = Next->start; + } + // If NewIdx is behind the last segment, extend that and append a new one. + if (NewI == E) { + // I is undef at this point, Slide (I;NewI] up one position. + std::copy(std::next(I), NewI, I); + // The last segment is undefined now, reuse it for a dead def. + NewI = std::prev(NewI); + NewI->start = SplitPos; + NewI->end = SplitPos.getDeadSlot(); + NewI->valno = DefVNI; + NewI->valno->def = NewI->start; + + LiveRange::iterator Prev = std::prev(NewI); + Prev->end = SplitPos; + } else { + // I is undef at this point, Slide (I;NewI] up one position. + std::copy(std::next(I), std::next(NewI), I); + // Interval pointed by NewI is now undef at this point, + // we can use it to split std::prev(NewI) in 2 + LiveRange::iterator Prev = std::prev(NewI); + NewI->start = SplitPos; + NewI->end = Prev->end; + NewI->valno = Prev->valno; + NewI->valno->def = SplitPos; + Prev->end = SplitPos; + Prev->valno = DefVNI; + Prev->valno->def = Prev->start; + } + return; + } + if (NewI != E && SlotIndex::isSameInstr(NewI->start, NewIdx)) { // There is an existing def at NewIdx, case 4 above. The def at OldIdx is // coalesced into that value. @@ -1098,6 +1169,11 @@ /// Hoist kill to NewIdx, then scan for last kill between NewIdx and /// OldIdx. /// + /// 6. Live def at OldIdx AND several other values between NewIdx and OldIdx + /// Extend the range that precedes OldIdx one and split the range that + /// contains NewIdx. + /// (Happens when reordering independent partial write to a register) + /// void handleMoveUp(LiveRange &LR, unsigned Reg, unsigned LaneMask) { // First look for a kill at OldIdx. LiveRange::iterator I = LR.find(OldIdx.getBaseIndex()); @@ -1106,22 +1182,23 @@ if (I == E || SlotIndex::isEarlierInstr(OldIdx, I->start)) return; - // Handle a live-in value. + // Is the value alive immediately before OldIdx? if (!SlotIndex::isSameInstr(I->start, OldIdx)) { - // If the live-in value isn't killed here, there is nothing to do. + // If the segment ends later, then we have a non-killing use and are fine. if (!SlotIndex::isSameInstr(OldIdx, I->end)) return; - // Adjust I->end to end at NewIdx. If we are hoisting a kill above - // another use, we need to search for that use. Case 5 above. - I->end = NewIdx.getRegSlot(I->end.isEarlyClobber()); - ++I; - // If OldIdx also defines a value, there couldn't have been another use. - if (I == E || !SlotIndex::isSameInstr(I->start, OldIdx)) { - // No def, search for the new kill. - // This can never be an early clobber kill since there is no def. - std::prev(I)->end = findLastUseBefore(Reg, LaneMask).getRegSlot(); - return; + LiveRange::iterator Next = std::next(I); + bool IsReDef = Next != E && SlotIndex::isSameInstr(OldIdx, Next->start); + // If OldIdx is not a redef or a dead-redef then we move the killing use + // around and have to search backwards for the new kill. Case 5 above. + I->end = std::max(I->start.getDeadSlot(), NewIdx.getRegSlot()); + if (!IsReDef || SlotIndex::isSameInstr(Next->start, Next->end)) { + I->end = findLastUseBefore(I->end, Reg, LaneMask); + // We are done if it was just a use. + if (!IsReDef) + return; } + I = Next; } // Now deal with the def at OldIdx. @@ -1146,6 +1223,43 @@ return; } + bool IsDeadDef = SlotIndex::isSameInstr(I->start, I->end); + if (!IsDeadDef && I != LR.begin() && + SlotIndex::isEarlierInstr(NewIdx, std::prev(I)->start)) { + // Case 6: + // OldIdx is not a dead def and NewIdx is before predecessor start + LiveInterval::iterator NewI = LR.find(NewIdx.getRegSlot(true)); + const SlotIndex SplitPos = DefVNI->def; + // We merge I and I predecessor's + LiveRange::iterator Prev = std::prev(I); + DefVNI = Prev->valno; + *I = LiveRange::Segment(Prev->start, I->end, I->valno); + // I predecessor's is now undef + // We Slide [NewI, Prev) down one position. + std::copy_backward(NewI, Prev, I); + // NewI is now considered undef + LiveRange::iterator Next = std::next(NewI); + if (SlotIndex::isEarlierInstr(Next->start, NewIdx)) { + // There is no gap between NewI and its predecessor + NewI->start = Next->start; + NewI->end = SplitPos; + NewI->valno = Next->valno; + NewI->valno->def = NewI->start; + Next->start = SplitPos; + Next->valno = DefVNI; + Next->valno->def = SplitPos; + } else { + // There is a gap between NewI and its predecessor + // Value become live in + NewI->start = SplitPos; + NewI->end = Next->start; + NewI->valno = Next->valno; + NewI->valno->def = SplitPos; + Next->valno = DefVNI; + } + return; + } + // There is no existing def at NewIdx. Hoist DefVNI. if (!I->end.isDead()) { // Leave the end point of a live def. @@ -1175,10 +1289,10 @@ } // Return the last use of reg between NewIdx and OldIdx. - SlotIndex findLastUseBefore(unsigned Reg, unsigned LaneMask) { - + SlotIndex findLastUseBefore(SlotIndex Before, unsigned Reg, + unsigned LaneMask) { if (TargetRegisterInfo::isVirtualRegister(Reg)) { - SlotIndex LastUse = NewIdx; + SlotIndex LastUse = Before; for (MachineOperand &MO : MRI.use_nodbg_operands(Reg)) { unsigned SubReg = MO.getSubReg(); if (SubReg != 0 && LaneMask != 0 @@ -1188,16 +1302,16 @@ const MachineInstr *MI = MO.getParent(); SlotIndex InstSlot = LIS.getSlotIndexes()->getInstructionIndex(MI); if (InstSlot > LastUse && InstSlot < OldIdx) - LastUse = InstSlot; + LastUse = InstSlot.getRegSlot(); } return LastUse; } // This is a regunit interval, so scanning the use list could be very // expensive. Scan upwards from OldIdx instead. - assert(NewIdx < OldIdx && "Expected upwards move"); + assert(Before < OldIdx && "Expected upwards move"); SlotIndexes *Indexes = LIS.getSlotIndexes(); - MachineBasicBlock *MBB = Indexes->getMBBFromIndex(NewIdx); + MachineBasicBlock *MBB = Indexes->getMBBFromIndex(Before); // OldIdx may not correspond to an instruction any longer, so set MII to // point to the next instruction after OldIdx, or MBB->end(). @@ -1213,19 +1327,19 @@ continue; SlotIndex Idx = Indexes->getInstructionIndex(MII); - // Stop searching when NewIdx is reached. - if (!SlotIndex::isEarlierInstr(NewIdx, Idx)) - return NewIdx; + // Stop searching when Before is reached. + if (!SlotIndex::isEarlierInstr(Before, Idx)) + return Before; // Check if MII uses Reg. for (MIBundleOperands MO(MII); MO.isValid(); ++MO) if (MO->isReg() && TargetRegisterInfo::isPhysicalRegister(MO->getReg()) && TRI.hasRegUnit(MO->getReg(), Reg)) - return Idx; + return Idx.getRegSlot(); } - // Didn't reach NewIdx. It must be the first instruction in the block. - return NewIdx; + // Didn't reach Before. It must be the first instruction in the block. + return Before; } };