Lines Matching +full:saw +full:- +full:reg
1 //===- InlineSpiller.cpp - Insert spills and restores inline --------------===//
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
7 //===----------------------------------------------------------------------===//
12 //===----------------------------------------------------------------------===//
46 #include "llvm/Config/llvm-config.h"
74 RestrictStatepointRemat("restrict-statepoint-remat",
208 bool isRegToSpill(Register Reg) { return is_contained(RegsToSpill, Reg); } in isRegToSpill() argument
210 bool isSibling(Register Reg);
219 bool coalesceStackAccess(MachineInstr *MI, Register Reg);
225 void spillAroundUses(Register Reg);
241 //===----------------------------------------------------------------------===//
243 //===----------------------------------------------------------------------===//
250 // This minimizes register pressure and maximizes the store-to-load distance for
253 /// isFullCopyOf - If MI is a COPY to or from Reg, return the other register,
255 static Register isCopyOf(const MachineInstr &MI, Register Reg, in isCopyOf() argument
266 if (DstOp.getReg() == Reg) in isCopyOf()
268 if (SrcOp.getReg() == Reg) in isCopyOf()
274 static Register isCopyOfBundle(const MachineInstr &FirstMI, Register Reg, in isCopyOfBundle() argument
277 return isCopyOf(FirstMI, Reg, TII); in isCopyOfBundle()
284 while (I->isBundledWithSucc()) { in isCopyOfBundle()
290 const MachineOperand &DstOp = *CopyInst->Destination; in isCopyOfBundle()
291 const MachineOperand &SrcOp = *CopyInst->Source; in isCopyOfBundle()
292 if (DstOp.getReg() == Reg) { in isCopyOfBundle()
297 } else if (SrcOp.getReg() == Reg) { in isCopyOfBundle()
316 /// isSnippet - Identify if a live interval is a snippet that should be spilled.
318 /// Edit->getReg().
320 Register Reg = Edit->getReg(); in isSnippet() local
323 // besides copies to/from Reg or spills/fills. in isSnippet()
328 // %snip = COPY %Reg / FILL fi# in isSnippet()
331 // %Reg = COPY %snip / SPILL %snip, fi# in isSnippet()
340 MachineInstr *MI = LIS.getInstructionFromIndex(VNI->def); in isSnippet()
341 if (MI->getOpcode() == TargetOpcode::STATEPOINT) in isSnippet()
342 --NumValNums; in isSnippet()
351 RI = MRI.reg_bundle_nodbg_begin(SnipLI.reg()), in isSnippet()
356 // Allow copies to/from Reg. in isSnippet()
357 if (isCopyOfBundle(MI, Reg, TII)) in isSnippet()
362 if (SnipLI.reg() == TII.isLoadFromStackSlot(MI, FI) && FI == StackSlot) in isSnippet()
366 if (SnipLI.reg() == TII.isStoreToStackSlot(MI, FI) && FI == StackSlot) in isSnippet()
369 if (StatepointOpers::isFoldableReg(&MI, SnipLI.reg())) in isSnippet()
380 /// collectRegsToSpill - Collect live range snippets that only have a single
383 Register Reg = Edit->getReg(); in collectRegsToSpill() local
386 RegsToSpill.assign(1, Reg); in collectRegsToSpill()
391 if (Original == Reg) in collectRegsToSpill()
394 for (MachineInstr &MI : llvm::make_early_inc_range(MRI.reg_bundles(Reg))) { in collectRegsToSpill()
395 Register SnipReg = isCopyOfBundle(MI, Reg, TII); in collectRegsToSpill()
410 bool InlineSpiller::isSibling(Register Reg) { in isSibling() argument
411 return Reg.isVirtual() && VRM.getOriginal(Reg) == Original; in isSibling()
438 assert(VNI && VNI->def == Idx.getRegSlot() && "Not defined by copy"); in hoistSpillInsideBB()
445 MachineBasicBlock *DefMBB = LIS.getMBBFromIndex(SrcVNI->def); in hoistSpillInsideBB()
455 StackInt->MergeValueInAsValue(OrigLI, OrigVNI, StackInt->getValNumInfo(0)); in hoistSpillInsideBB()
456 LLVM_DEBUG(dbgs() << "\tmerged orig valno " << OrigVNI->id << ": " in hoistSpillInsideBB()
463 MachineBasicBlock *MBB = LIS.getMBBFromIndex(SrcVNI->def); in hoistSpillInsideBB()
465 if (SrcVNI->isPHIDef()) in hoistSpillInsideBB()
466 MII = MBB->SkipPHIsLabelsAndDebug(MBB->begin(), SrcReg); in hoistSpillInsideBB()
468 MachineInstr *DefMI = LIS.getInstructionFromIndex(SrcVNI->def); in hoistSpillInsideBB()
480 --MII; // Point to store instruction. in hoistSpillInsideBB()
481 LLVM_DEBUG(dbgs() << "\thoisted: " << SrcVNI->def << '\t' << *MII); in hoistSpillInsideBB()
492 /// eliminateRedundantSpills - SLI:VNI is known to be on the stack. Remove any
493 /// redundant spills of this value in SLI.reg and sibling copies.
503 Register Reg = LI->reg(); in eliminateRedundantSpills() local
504 LLVM_DEBUG(dbgs() << "Checking redundant spills for " << VNI->id << '@' in eliminateRedundantSpills()
505 << VNI->def << " in " << *LI << '\n'); in eliminateRedundantSpills()
508 if (isRegToSpill(Reg)) in eliminateRedundantSpills()
512 StackInt->MergeValueInAsValue(*LI, VNI, StackInt->getValNumInfo(0)); in eliminateRedundantSpills()
517 llvm::make_early_inc_range(MRI.use_nodbg_bundles(Reg))) { in eliminateRedundantSpills()
521 if (LI->getVNInfoAt(Idx) != VNI) in eliminateRedundantSpills()
525 if (Register DstReg = isCopyOfBundle(MI, Reg, TII)) { in eliminateRedundantSpills()
530 assert(DstVNI->def == Idx.getRegSlot() && "Wrong copy def slot"); in eliminateRedundantSpills()
539 if (Reg == TII.isStoreToStackSlot(MI, FI) && FI == StackSlot) { in eliminateRedundantSpills()
546 --NumSpills; in eliminateRedundantSpills()
552 //===----------------------------------------------------------------------===//
554 //===----------------------------------------------------------------------===//
556 /// markValueUsed - Remember that VNI failed to rematerialize, so its defining
566 if (VNI->isPHIDef()) { in markValueUsed()
567 MachineBasicBlock *MBB = LIS.getMBBFromIndex(VNI->def); in markValueUsed()
568 for (MachineBasicBlock *P : MBB->predecessors()) { in markValueUsed()
569 VNInfo *PVNI = LI->getVNInfoBefore(LIS.getMBBEndIdx(P)); in markValueUsed()
577 MachineInstr *MI = LIS.getInstructionFromIndex(VNI->def); in markValueUsed()
580 LiveInterval &SnipLI = LIS.getInterval(MI->getOperand(1).getReg()); in markValueUsed()
581 assert(isRegToSpill(SnipLI.reg()) && "Unexpected register in copy"); in markValueUsed()
582 VNInfo *SnipVNI = SnipLI.getVNInfoAt(VNI->def.getRegSlot(true)); in markValueUsed()
609 // For STATEPOINTs we allow re-materialization for fixed arguments only hoping in canGuaranteeAssignmentAfterRemat()
622 /// reMaterializeFor - Attempt to rematerialize before MI instead of reloading.
626 VirtRegInfo RI = AnalyzeVirtRegInBundle(MI, VirtReg.reg(), &Ops); in reMaterializeFor()
637 if (MO.getReg() == VirtReg.reg()) in reMaterializeFor()
649 RM.OrigMI = LIS.getInstructionFromIndex(OrigVNI->def); in reMaterializeFor()
651 if (!Edit->canRematerializeAt(RM, OrigVNI, UseIdx, false)) { in reMaterializeFor()
657 // If the instruction also writes VirtReg.reg, it had better not require the in reMaterializeFor()
661 LLVM_DEBUG(dbgs() << "\tcannot remat tied reg: " << UseIdx << '\t' << MI); in reMaterializeFor()
667 if (RM.OrigMI->canFoldAsLoad() && in reMaterializeFor()
669 Edit->markRematerialized(RM.ParentVNI); in reMaterializeFor()
676 if (!canGuaranteeAssignmentAfterRemat(VirtReg.reg(), MI)) { in reMaterializeFor()
683 Register NewVReg = Edit->createFrom(Original); in reMaterializeFor()
687 Edit->rematerializeAt(*MI.getParent(), MI, NewVReg, RM, TRI); in reMaterializeFor()
692 NewMI->setDebugLoc(MI.getDebugLoc()); in reMaterializeFor()
700 MachineOperand &MO = OpPair.first->getOperand(OpPair.second); in reMaterializeFor()
701 if (MO.isReg() && MO.isUse() && MO.getReg() == VirtReg.reg()) { in reMaterializeFor()
712 /// reMaterializeAll - Try to rematerialize as many uses as possible,
715 if (!Edit->anyRematerializable()) in reMaterializeAll()
722 for (Register Reg : RegsToSpill) { in reMaterializeAll() local
723 LiveInterval &LI = LIS.getInterval(Reg); in reMaterializeAll()
724 for (MachineInstr &MI : llvm::make_early_inc_range(MRI.reg_bundles(Reg))) { in reMaterializeAll()
739 for (Register Reg : RegsToSpill) { in reMaterializeAll() local
740 LiveInterval &LI = LIS.getInterval(Reg); in reMaterializeAll()
742 if (VNI->isUnused() || VNI->isPHIDef() || UsedValues.count(VNI)) in reMaterializeAll()
744 MachineInstr *MI = LIS.getInstructionFromIndex(VNI->def); in reMaterializeAll()
745 MI->addRegisterDead(Reg, &TRI); in reMaterializeAll()
746 if (!MI->allDefsAreDead()) in reMaterializeAll()
753 if (MI->isBundledWithSucc() && !MI->isBundledWithPred()) { in reMaterializeAll()
754 MachineBasicBlock::instr_iterator BeginIt = MI->getIterator(), in reMaterializeAll()
755 EndIt = MI->getParent()->instr_end(); in reMaterializeAll()
760 It != EndIt && It->isBundledWithPred(); ++It) { in reMaterializeAll()
764 DestSrc && DestSrc->Destination->getReg() == Reg; in reMaterializeAll()
772 It != EndIt && It->isBundledWithPred(); ++It) { in reMaterializeAll()
773 It->addRegisterDead(Reg, &TRI); in reMaterializeAll()
787 Edit->eliminateDeadDefs(DeadDefs, RegsToSpill); in reMaterializeAll()
791 // LiveIntervals::removeVRegDefAt is used. However, after non-PHI VNIs are all in reMaterializeAll()
793 // So to get rid of unused reg, we need to check whether it has non-dbg in reMaterializeAll()
794 // reference instead of whether it has non-empty interval. in reMaterializeAll()
796 for (Register Reg : RegsToSpill) { in reMaterializeAll() local
797 if (MRI.reg_nodbg_empty(Reg)) { in reMaterializeAll()
798 Edit->eraseVirtReg(Reg); in reMaterializeAll()
802 assert(LIS.hasInterval(Reg) && in reMaterializeAll()
803 (!LIS.getInterval(Reg).empty() || !MRI.reg_nodbg_empty(Reg)) && in reMaterializeAll()
804 "Empty and not used live-range?!"); in reMaterializeAll()
806 RegsToSpill[ResultPos++] = Reg; in reMaterializeAll()
813 //===----------------------------------------------------------------------===//
815 //===----------------------------------------------------------------------===//
818 bool InlineSpiller::coalesceStackAccess(MachineInstr *MI, Register Reg) { in coalesceStackAccess() argument
826 if (InstrReg != Reg || FI != StackSlot) in coalesceStackAccess()
834 MI->eraseFromParent(); in coalesceStackAccess()
838 --NumReloads; in coalesceStackAccess()
841 --NumSpills; in coalesceStackAccess()
870 // early-clobber slot index. in dumpMachineInstrRangeWithSlotIndex()
872 MachineOperand *MO = I->findRegisterDefOperand(VReg, /*TRI=*/nullptr); in dumpMachineInstrRangeWithSlotIndex()
873 if (MO && MO->isEarlyClobber()) in dumpMachineInstrRangeWithSlotIndex()
882 /// foldMemoryOperand - Try folding stack slot references in Ops into their
886 /// @param LoadMI Load instruction to use instead of stack slot when non-null.
895 if (Ops.back().first != MI || MI->isBundled()) in foldMemoryOperand()
907 bool UntieRegs = MI->getOpcode() == TargetOpcode::STATEPOINT; in foldMemoryOperand()
912 MI->getOpcode() == TargetOpcode::STATEPOINT || in foldMemoryOperand()
913 MI->getOpcode() == TargetOpcode::PATCHPOINT || in foldMemoryOperand()
914 MI->getOpcode() == TargetOpcode::STACKMAP; in foldMemoryOperand()
916 // TargetInstrInfo::foldMemoryOperand only expects explicit, non-tied in foldMemoryOperand()
922 MachineOperand &MO = MI->getOperand(Idx); in foldMemoryOperand()
941 if (UntieRegs || !MI->isRegTiedToDefOperand(Idx)) in foldMemoryOperand()
950 MachineInstrSpan MIS(MI, MI->getParent()); in foldMemoryOperand()
955 MachineOperand &MO = MI->getOperand(Idx); in foldMemoryOperand()
958 unsigned Tied = MI->findTiedOperandIdx(Idx); in foldMemoryOperand()
965 MI->untieRegOperand(Idx); in foldMemoryOperand()
972 // Re-tie operands. in foldMemoryOperand()
974 MI->tieOperands(Tied.first, Tied.second); in foldMemoryOperand()
980 if (!MO->isReg()) in foldMemoryOperand()
982 Register Reg = MO->getReg(); in foldMemoryOperand() local
983 if (!Reg || Reg.isVirtual() || MRI.isReserved(Reg)) { in foldMemoryOperand()
986 // Skip non-Defs, including undef uses and internal reads. in foldMemoryOperand()
987 if (MO->isUse()) in foldMemoryOperand()
989 PhysRegInfo RI = AnalyzePhysRegInBundle(*FoldMI, Reg, &TRI); in foldMemoryOperand()
993 assert(MO->isDead() && "Cannot fold physreg def"); in foldMemoryOperand()
995 LIS.removePhysRegDefAt(Reg.asMCReg(), Idx); in foldMemoryOperand()
1001 --NumSpills; in foldMemoryOperand()
1004 if (MI->isCandidateForCallSiteEntry()) in foldMemoryOperand()
1005 MI->getMF()->moveCallSiteInfo(MI, FoldMI); in foldMemoryOperand()
1007 // If we've folded a store into an instruction labelled with debug-info, in foldMemoryOperand()
1012 if (MI->peekDebugInstrNum() && Ops[0].second == 0) { in foldMemoryOperand()
1017 unsigned NewNum = FoldMI->getDebugInstrNum(); in foldMemoryOperand()
1018 unsigned OldNum = MI->getDebugInstrNum(); in foldMemoryOperand()
1023 const MachineOperand &Op0 = MI->getOperand(Ops[0].second); in foldMemoryOperand()
1026 } else if (Ops.size() == 2 && Op0.isDef() && MI->getOperand(1).isTied() && in foldMemoryOperand()
1027 Op0.getReg() == MI->getOperand(1).getReg()) { in foldMemoryOperand()
1030 } else if (MI->peekDebugInstrNum()) { in foldMemoryOperand()
1031 // This is a debug-labelled instruction, but the operand being folded isn't in foldMemoryOperand()
1034 // folded -- past that point, we don't know what the new operand indexes in foldMemoryOperand()
1039 MI->eraseFromParent(); in foldMemoryOperand()
1050 for (unsigned i = FoldMI->getNumOperands(); i; --i) { in foldMemoryOperand()
1051 MachineOperand &MO = FoldMI->getOperand(i - 1); in foldMemoryOperand()
1055 FoldMI->removeOperand(i - 1); in foldMemoryOperand()
1078 MachineBasicBlock &MBB = *MI->getParent(); in insertReload()
1104 /// insertSpill - Insert a spill of NewVReg after MI.
1109 assert(!MI->isTerminator() && "Inserting a spill after a terminator"); in insertSpill()
1110 MachineBasicBlock &MBB = *MI->getParent(); in insertSpill()
1124 BuildMI(MBB, SpillBefore, MI->getDebugLoc(), TII.get(TargetOpcode::KILL)) in insertSpill()
1142 /// spillAroundUses - insert spill code around each use of Reg.
1143 void InlineSpiller::spillAroundUses(Register Reg) { in spillAroundUses() argument
1144 LLVM_DEBUG(dbgs() << "spillAroundUses " << printReg(Reg) << '\n'); in spillAroundUses()
1145 LiveInterval &OldLI = LIS.getInterval(Reg); in spillAroundUses()
1147 // Iterate over instructions using Reg. in spillAroundUses()
1148 for (MachineInstr &MI : llvm::make_early_inc_range(MRI.reg_bundles(Reg))) { in spillAroundUses()
1154 buildDbgValueForSpill(*MBB, &MI, MI, StackSlot, Reg); in spillAroundUses()
1155 MBB->erase(MI); in spillAroundUses()
1167 if (coalesceStackAccess(&MI, Reg)) in spillAroundUses()
1172 VirtRegInfo RI = AnalyzeVirtRegInBundle(MI, Reg, &Ops); in spillAroundUses()
1178 if (SlotIndex::isSameInstr(Idx, VNI->def)) in spillAroundUses()
1179 Idx = VNI->def; in spillAroundUses()
1182 Register SibReg = isCopyOfBundle(MI, Reg, TII); in spillAroundUses()
1198 // This is a reload for a sib-reg copy. Drop spills downstream. in spillAroundUses()
1211 Register NewVReg = Edit->createFrom(Reg); in spillAroundUses()
1219 MachineOperand &MO = OpPair.first->getOperand(OpPair.second); in spillAroundUses()
1222 if (!OpPair.first->isRegTiedToDefOperand(OpPair.second)) in spillAroundUses()
1238 /// spillAll - Spill all registers remaining after rematerialization.
1244 StackInt->getNextValue(SlotIndex(), LSS.getVNInfoAllocator()); in spillAll()
1248 if (Original != Edit->getReg()) in spillAll()
1249 VRM.assignVirt2StackSlot(Edit->getReg(), StackSlot); in spillAll()
1251 assert(StackInt->getNumValNums() == 1 && "Bad stack interval values"); in spillAll()
1252 for (Register Reg : RegsToSpill) in spillAll() local
1253 StackInt->MergeSegmentsInAsValue(LIS.getInterval(Reg), in spillAll()
1254 StackInt->getValNumInfo(0)); in spillAll()
1258 for (Register Reg : RegsToSpill) in spillAll() local
1259 spillAroundUses(Reg); in spillAll()
1264 Edit->eliminateDeadDefs(DeadDefs, RegsToSpill); in spillAll()
1268 for (Register Reg : RegsToSpill) { in spillAll() local
1270 llvm::make_early_inc_range(MRI.reg_instructions(Reg))) { in spillAll()
1273 LIS.getSlotIndexes()->removeSingleMachineInstrFromMaps(MI); in spillAll()
1279 for (Register Reg : RegsToSpill) in spillAll() local
1280 Edit->eraseVirtReg(Reg); in spillAll()
1308 Edit->calculateRegClassAndHint(MF, VRAI); in spill()
1311 /// Optimizations after all the reg selections and spills are done.
1322 auto LI = std::make_unique<LiveInterval>(OrigLI.reg(), OrigLI.weight()); in addToMergeableSpills()
1323 LI->assign(OrigLI, Allocator); in addToMergeableSpills()
1327 VNInfo *OrigVNI = StackSlotToOrigLI[StackSlot]->getVNInfoAt(Idx.getRegSlot()); in addToMergeableSpills()
1340 VNInfo *OrigVNI = It->second->getVNInfoAt(Idx.getRegSlot()); in rmFromMergeableSpills()
1355 LLVM_DEBUG(dbgs() << "can't spill in root block - def after LIP\n"); in isSpillCandBB()
1358 Register OrigReg = OrigLI.reg(); in isSpillCandBB()
1379 // For each spill saw, check SpillBBToSpill[] and see if its BB already has in rmRedundantSpills()
1383 MachineBasicBlock *Block = CurrentSpill->getParent(); in rmRedundantSpills()
1401 /// Starting from \p Root find a top-down traversal order of the dominator
1417 // non-redundant spill to the Root node. in getVisitOrders()
1423 MachineDomTreeNode *RootIDomNode = MDT[Root]->getIDom(); in getVisitOrders()
1426 // containing spill in the middle of the path, the previous spill saw will in getVisitOrders()
1428 // the path starting from the first node with non-redundant spill to the Root in getVisitOrders()
1432 MachineBasicBlock *Block = Spill->getParent(); in getVisitOrders()
1449 Node = Node->getIDom(); in getVisitOrders()
1454 // Add a BB containing the original spills to SpillsToKeep -- i.e., in getVisitOrders()
1465 // Sort the nodes in WorkSet in top-down order and save the nodes in getVisitOrders()
1471 for (MachineDomTreeNode *Child : Node->children()) { in getVisitOrders()
1483 LLVM_DEBUG(dbgs() << "BB" << (*RIt)->getBlock()->getNumber() << ","); in getVisitOrders()
1521 // Iterate Orders set in reverse order, which will be a bottom-up order in runHoistSpills()
1527 MachineBasicBlock *Block = (*RIt)->getBlock(); in runHoistSpills()
1539 for (MachineDomTreeNode *Child : (*RIt)->children()) { in runHoistSpills()
1592 dbgs() << Rspill->getBlock()->getNumber() << " "; in runHoistSpills()
1593 dbgs() << "were promoted to BB" << (*RIt)->getBlock()->getNumber() in runHoistSpills()
1605 SpillsToIns[Ent.first->getBlock()] = Ent.second; in runHoistSpills()
1619 /// collect the BB nodes on the path from non-dominated spills to the define
1622 /// bottom-up order, and for each node, we will decide whether to hoist spills
1630 Register Reg = Register::index2VirtReg(i); in hoistAllSpills() local
1631 Register Original = VRM.getPreSplitReg(Reg); in hoistAllSpills()
1632 if (!MRI.def_empty(Reg)) in hoistAllSpills()
1633 Virt2SiblingsMap[Original].insert(Reg); in hoistAllSpills()
1646 dbgs() << "\nFor Slot" << Slot << " and VN" << OrigVNI->id << ":\n" in hoistAllSpills()
1649 dbgs() << spill->getParent()->getNumber() << " "; in hoistAllSpills()
1663 dbgs() << Ispill.first->getNumber() << " "; in hoistAllSpills()
1666 dbgs() << Rspill->getParent()->getNumber() << " "; in hoistAllSpills()
1691 NumSpills -= SpillsToRm.size(); in hoistAllSpills()
1693 RMEnt->setDesc(TII.get(TargetOpcode::KILL)); in hoistAllSpills()
1694 for (unsigned i = RMEnt->getNumOperands(); i; --i) { in hoistAllSpills()
1695 MachineOperand &MO = RMEnt->getOperand(i - 1); in hoistAllSpills()
1697 RMEnt->removeOperand(i - 1); in hoistAllSpills()