1 //===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This is an extremely simple MachineInstr-level copy propagation pass. 10 // 11 // This pass forwards the source of COPYs to the users of their destinations 12 // when doing so is legal. For example: 13 // 14 // %reg1 = COPY %reg0 15 // ... 16 // ... = OP %reg1 17 // 18 // If 19 // - %reg0 has not been clobbered by the time of the use of %reg1 20 // - the register class constraints are satisfied 21 // - the COPY def is the only value that reaches OP 22 // then this pass replaces the above with: 23 // 24 // %reg1 = COPY %reg0 25 // ... 26 // ... = OP %reg0 27 // 28 // This pass also removes some redundant COPYs. For example: 29 // 30 // %R1 = COPY %R0 31 // ... // No clobber of %R1 32 // %R0 = COPY %R1 <<< Removed 33 // 34 // or 35 // 36 // %R1 = COPY %R0 37 // ... // No clobber of %R0 38 // %R1 = COPY %R0 <<< Removed 39 // 40 // or 41 // 42 // $R0 = OP ... 43 // ... // No read/clobber of $R0 and $R1 44 // $R1 = COPY $R0 // $R0 is killed 45 // Replace $R0 with $R1 and remove the COPY 46 // $R1 = OP ... 47 // ... 48 // 49 //===----------------------------------------------------------------------===// 50 51 #include "llvm/ADT/DenseMap.h" 52 #include "llvm/ADT/STLExtras.h" 53 #include "llvm/ADT/SetVector.h" 54 #include "llvm/ADT/SmallSet.h" 55 #include "llvm/ADT/SmallVector.h" 56 #include "llvm/ADT/Statistic.h" 57 #include "llvm/ADT/iterator_range.h" 58 #include "llvm/CodeGen/MachineBasicBlock.h" 59 #include "llvm/CodeGen/MachineFunction.h" 60 #include "llvm/CodeGen/MachineFunctionPass.h" 61 #include "llvm/CodeGen/MachineInstr.h" 62 #include "llvm/CodeGen/MachineOperand.h" 63 #include "llvm/CodeGen/MachineRegisterInfo.h" 64 #include "llvm/CodeGen/TargetInstrInfo.h" 65 #include "llvm/CodeGen/TargetRegisterInfo.h" 66 #include "llvm/CodeGen/TargetSubtargetInfo.h" 67 #include "llvm/InitializePasses.h" 68 #include "llvm/MC/MCRegisterInfo.h" 69 #include "llvm/Pass.h" 70 #include "llvm/Support/Debug.h" 71 #include "llvm/Support/DebugCounter.h" 72 #include "llvm/Support/raw_ostream.h" 73 #include <cassert> 74 #include <iterator> 75 76 using namespace llvm; 77 78 #define DEBUG_TYPE "machine-cp" 79 80 STATISTIC(NumDeletes, "Number of dead copies deleted"); 81 STATISTIC(NumCopyForwards, "Number of copy uses forwarded"); 82 STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated"); 83 DEBUG_COUNTER(FwdCounter, "machine-cp-fwd", 84 "Controls which register COPYs are forwarded"); 85 86 namespace { 87 88 class CopyTracker { 89 struct CopyInfo { 90 MachineInstr *MI; 91 SmallVector<MCRegister, 4> DefRegs; 92 bool Avail; 93 }; 94 95 DenseMap<MCRegister, CopyInfo> Copies; 96 97 public: 98 /// Mark all of the given registers and their subregisters as unavailable for 99 /// copying. 100 void markRegsUnavailable(ArrayRef<MCRegister> Regs, 101 const TargetRegisterInfo &TRI) { 102 for (MCRegister Reg : Regs) { 103 // Source of copy is no longer available for propagation. 104 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) { 105 auto CI = Copies.find(*RUI); 106 if (CI != Copies.end()) 107 CI->second.Avail = false; 108 } 109 } 110 } 111 112 /// Remove register from copy maps. 113 void invalidateRegister(MCRegister Reg, const TargetRegisterInfo &TRI) { 114 // Since Reg might be a subreg of some registers, only invalidate Reg is not 115 // enough. We have to find the COPY defines Reg or registers defined by Reg 116 // and invalidate all of them. 117 SmallSet<MCRegister, 8> RegsToInvalidate; 118 RegsToInvalidate.insert(Reg); 119 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) { 120 auto I = Copies.find(*RUI); 121 if (I != Copies.end()) { 122 if (MachineInstr *MI = I->second.MI) { 123 RegsToInvalidate.insert(MI->getOperand(0).getReg().asMCReg()); 124 RegsToInvalidate.insert(MI->getOperand(1).getReg().asMCReg()); 125 } 126 RegsToInvalidate.insert(I->second.DefRegs.begin(), 127 I->second.DefRegs.end()); 128 } 129 } 130 for (MCRegister InvalidReg : RegsToInvalidate) 131 for (MCRegUnitIterator RUI(InvalidReg, &TRI); RUI.isValid(); ++RUI) 132 Copies.erase(*RUI); 133 } 134 135 /// Clobber a single register, removing it from the tracker's copy maps. 136 void clobberRegister(MCRegister Reg, const TargetRegisterInfo &TRI) { 137 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) { 138 auto I = Copies.find(*RUI); 139 if (I != Copies.end()) { 140 // When we clobber the source of a copy, we need to clobber everything 141 // it defined. 142 markRegsUnavailable(I->second.DefRegs, TRI); 143 // When we clobber the destination of a copy, we need to clobber the 144 // whole register it defined. 145 if (MachineInstr *MI = I->second.MI) 146 markRegsUnavailable({MI->getOperand(0).getReg().asMCReg()}, TRI); 147 // Now we can erase the copy. 148 Copies.erase(I); 149 } 150 } 151 } 152 153 /// Add this copy's registers into the tracker's copy maps. 154 void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI) { 155 assert(MI->isCopy() && "Tracking non-copy?"); 156 157 MCRegister Def = MI->getOperand(0).getReg().asMCReg(); 158 MCRegister Src = MI->getOperand(1).getReg().asMCReg(); 159 160 // Remember Def is defined by the copy. 161 for (MCRegUnitIterator RUI(Def, &TRI); RUI.isValid(); ++RUI) 162 Copies[*RUI] = {MI, {}, true}; 163 164 // Remember source that's copied to Def. Once it's clobbered, then 165 // it's no longer available for copy propagation. 166 for (MCRegUnitIterator RUI(Src, &TRI); RUI.isValid(); ++RUI) { 167 auto I = Copies.insert({*RUI, {nullptr, {}, false}}); 168 auto &Copy = I.first->second; 169 if (!is_contained(Copy.DefRegs, Def)) 170 Copy.DefRegs.push_back(Def); 171 } 172 } 173 174 bool hasAnyCopies() { 175 return !Copies.empty(); 176 } 177 178 MachineInstr *findCopyForUnit(MCRegister RegUnit, 179 const TargetRegisterInfo &TRI, 180 bool MustBeAvailable = false) { 181 auto CI = Copies.find(RegUnit); 182 if (CI == Copies.end()) 183 return nullptr; 184 if (MustBeAvailable && !CI->second.Avail) 185 return nullptr; 186 return CI->second.MI; 187 } 188 189 MachineInstr *findCopyDefViaUnit(MCRegister RegUnit, 190 const TargetRegisterInfo &TRI) { 191 auto CI = Copies.find(RegUnit); 192 if (CI == Copies.end()) 193 return nullptr; 194 if (CI->second.DefRegs.size() != 1) 195 return nullptr; 196 MCRegUnitIterator RUI(CI->second.DefRegs[0], &TRI); 197 return findCopyForUnit(*RUI, TRI, true); 198 } 199 200 MachineInstr *findAvailBackwardCopy(MachineInstr &I, MCRegister Reg, 201 const TargetRegisterInfo &TRI) { 202 MCRegUnitIterator RUI(Reg, &TRI); 203 MachineInstr *AvailCopy = findCopyDefViaUnit(*RUI, TRI); 204 if (!AvailCopy || 205 !TRI.isSubRegisterEq(AvailCopy->getOperand(1).getReg(), Reg)) 206 return nullptr; 207 208 Register AvailSrc = AvailCopy->getOperand(1).getReg(); 209 Register AvailDef = AvailCopy->getOperand(0).getReg(); 210 for (const MachineInstr &MI : 211 make_range(AvailCopy->getReverseIterator(), I.getReverseIterator())) 212 for (const MachineOperand &MO : MI.operands()) 213 if (MO.isRegMask()) 214 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef? 215 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef)) 216 return nullptr; 217 218 return AvailCopy; 219 } 220 221 MachineInstr *findAvailCopy(MachineInstr &DestCopy, MCRegister Reg, 222 const TargetRegisterInfo &TRI) { 223 // We check the first RegUnit here, since we'll only be interested in the 224 // copy if it copies the entire register anyway. 225 MCRegUnitIterator RUI(Reg, &TRI); 226 MachineInstr *AvailCopy = 227 findCopyForUnit(*RUI, TRI, /*MustBeAvailable=*/true); 228 if (!AvailCopy || 229 !TRI.isSubRegisterEq(AvailCopy->getOperand(0).getReg(), Reg)) 230 return nullptr; 231 232 // Check that the available copy isn't clobbered by any regmasks between 233 // itself and the destination. 234 Register AvailSrc = AvailCopy->getOperand(1).getReg(); 235 Register AvailDef = AvailCopy->getOperand(0).getReg(); 236 for (const MachineInstr &MI : 237 make_range(AvailCopy->getIterator(), DestCopy.getIterator())) 238 for (const MachineOperand &MO : MI.operands()) 239 if (MO.isRegMask()) 240 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef)) 241 return nullptr; 242 243 return AvailCopy; 244 } 245 246 void clear() { 247 Copies.clear(); 248 } 249 }; 250 251 class MachineCopyPropagation : public MachineFunctionPass { 252 const TargetRegisterInfo *TRI; 253 const TargetInstrInfo *TII; 254 const MachineRegisterInfo *MRI; 255 256 public: 257 static char ID; // Pass identification, replacement for typeid 258 259 MachineCopyPropagation() : MachineFunctionPass(ID) { 260 initializeMachineCopyPropagationPass(*PassRegistry::getPassRegistry()); 261 } 262 263 void getAnalysisUsage(AnalysisUsage &AU) const override { 264 AU.setPreservesCFG(); 265 MachineFunctionPass::getAnalysisUsage(AU); 266 } 267 268 bool runOnMachineFunction(MachineFunction &MF) override; 269 270 MachineFunctionProperties getRequiredProperties() const override { 271 return MachineFunctionProperties().set( 272 MachineFunctionProperties::Property::NoVRegs); 273 } 274 275 private: 276 typedef enum { DebugUse = false, RegularUse = true } DebugType; 277 278 void ReadRegister(MCRegister Reg, MachineInstr &Reader, DebugType DT); 279 void ForwardCopyPropagateBlock(MachineBasicBlock &MBB); 280 void BackwardCopyPropagateBlock(MachineBasicBlock &MBB); 281 bool eraseIfRedundant(MachineInstr &Copy, MCRegister Src, MCRegister Def); 282 void forwardUses(MachineInstr &MI); 283 void propagateDefs(MachineInstr &MI); 284 bool isForwardableRegClassCopy(const MachineInstr &Copy, 285 const MachineInstr &UseI, unsigned UseIdx); 286 bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy, 287 const MachineInstr &UseI, 288 unsigned UseIdx); 289 bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use); 290 bool hasOverlappingMultipleDef(const MachineInstr &MI, 291 const MachineOperand &MODef, Register Def); 292 293 /// Candidates for deletion. 294 SmallSetVector<MachineInstr *, 8> MaybeDeadCopies; 295 296 /// Multimap tracking debug users in current BB 297 DenseMap<MachineInstr*, SmallVector<MachineInstr*, 2>> CopyDbgUsers; 298 299 CopyTracker Tracker; 300 301 bool Changed; 302 }; 303 304 } // end anonymous namespace 305 306 char MachineCopyPropagation::ID = 0; 307 308 char &llvm::MachineCopyPropagationID = MachineCopyPropagation::ID; 309 310 INITIALIZE_PASS(MachineCopyPropagation, DEBUG_TYPE, 311 "Machine Copy Propagation Pass", false, false) 312 313 void MachineCopyPropagation::ReadRegister(MCRegister Reg, MachineInstr &Reader, 314 DebugType DT) { 315 // If 'Reg' is defined by a copy, the copy is no longer a candidate 316 // for elimination. If a copy is "read" by a debug user, record the user 317 // for propagation. 318 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI) { 319 if (MachineInstr *Copy = Tracker.findCopyForUnit(*RUI, *TRI)) { 320 if (DT == RegularUse) { 321 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump()); 322 MaybeDeadCopies.remove(Copy); 323 } else { 324 CopyDbgUsers[Copy].push_back(&Reader); 325 } 326 } 327 } 328 } 329 330 /// Return true if \p PreviousCopy did copy register \p Src to register \p Def. 331 /// This fact may have been obscured by sub register usage or may not be true at 332 /// all even though Src and Def are subregisters of the registers used in 333 /// PreviousCopy. e.g. 334 /// isNopCopy("ecx = COPY eax", AX, CX) == true 335 /// isNopCopy("ecx = COPY eax", AH, CL) == false 336 static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src, 337 MCRegister Def, const TargetRegisterInfo *TRI) { 338 MCRegister PreviousSrc = PreviousCopy.getOperand(1).getReg().asMCReg(); 339 MCRegister PreviousDef = PreviousCopy.getOperand(0).getReg().asMCReg(); 340 if (Src == PreviousSrc && Def == PreviousDef) 341 return true; 342 if (!TRI->isSubRegister(PreviousSrc, Src)) 343 return false; 344 unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src); 345 return SubIdx == TRI->getSubRegIndex(PreviousDef, Def); 346 } 347 348 /// Remove instruction \p Copy if there exists a previous copy that copies the 349 /// register \p Src to the register \p Def; This may happen indirectly by 350 /// copying the super registers. 351 bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy, 352 MCRegister Src, MCRegister Def) { 353 // Avoid eliminating a copy from/to a reserved registers as we cannot predict 354 // the value (Example: The sparc zero register is writable but stays zero). 355 if (MRI->isReserved(Src) || MRI->isReserved(Def)) 356 return false; 357 358 // Search for an existing copy. 359 MachineInstr *PrevCopy = Tracker.findAvailCopy(Copy, Def, *TRI); 360 if (!PrevCopy) 361 return false; 362 363 // Check that the existing copy uses the correct sub registers. 364 if (PrevCopy->getOperand(0).isDead()) 365 return false; 366 if (!isNopCopy(*PrevCopy, Src, Def, TRI)) 367 return false; 368 369 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump()); 370 371 // Copy was redundantly redefining either Src or Def. Remove earlier kill 372 // flags between Copy and PrevCopy because the value will be reused now. 373 assert(Copy.isCopy()); 374 Register CopyDef = Copy.getOperand(0).getReg(); 375 assert(CopyDef == Src || CopyDef == Def); 376 for (MachineInstr &MI : 377 make_range(PrevCopy->getIterator(), Copy.getIterator())) 378 MI.clearRegisterKills(CopyDef, TRI); 379 380 Copy.eraseFromParent(); 381 Changed = true; 382 ++NumDeletes; 383 return true; 384 } 385 386 bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy( 387 const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) { 388 Register Def = Copy.getOperand(0).getReg(); 389 390 if (const TargetRegisterClass *URC = 391 UseI.getRegClassConstraint(UseIdx, TII, TRI)) 392 return URC->contains(Def); 393 394 // We don't process further if UseI is a COPY, since forward copy propagation 395 // should handle that. 396 return false; 397 } 398 399 /// Decide whether we should forward the source of \param Copy to its use in 400 /// \param UseI based on the physical register class constraints of the opcode 401 /// and avoiding introducing more cross-class COPYs. 402 bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy, 403 const MachineInstr &UseI, 404 unsigned UseIdx) { 405 406 Register CopySrcReg = Copy.getOperand(1).getReg(); 407 408 // If the new register meets the opcode register constraints, then allow 409 // forwarding. 410 if (const TargetRegisterClass *URC = 411 UseI.getRegClassConstraint(UseIdx, TII, TRI)) 412 return URC->contains(CopySrcReg); 413 414 if (!UseI.isCopy()) 415 return false; 416 417 /// COPYs don't have register class constraints, so if the user instruction 418 /// is a COPY, we just try to avoid introducing additional cross-class 419 /// COPYs. For example: 420 /// 421 /// RegClassA = COPY RegClassB // Copy parameter 422 /// ... 423 /// RegClassB = COPY RegClassA // UseI parameter 424 /// 425 /// which after forwarding becomes 426 /// 427 /// RegClassA = COPY RegClassB 428 /// ... 429 /// RegClassB = COPY RegClassB 430 /// 431 /// so we have reduced the number of cross-class COPYs and potentially 432 /// introduced a nop COPY that can be removed. 433 const TargetRegisterClass *UseDstRC = 434 TRI->getMinimalPhysRegClass(UseI.getOperand(0).getReg()); 435 436 const TargetRegisterClass *SuperRC = UseDstRC; 437 for (TargetRegisterClass::sc_iterator SuperRCI = UseDstRC->getSuperClasses(); 438 SuperRC; SuperRC = *SuperRCI++) 439 if (SuperRC->contains(CopySrcReg)) 440 return true; 441 442 return false; 443 } 444 445 /// Check that \p MI does not have implicit uses that overlap with it's \p Use 446 /// operand (the register being replaced), since these can sometimes be 447 /// implicitly tied to other operands. For example, on AMDGPU: 448 /// 449 /// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use> 450 /// 451 /// the %VGPR2 is implicitly tied to the larger reg operand, but we have no 452 /// way of knowing we need to update the latter when updating the former. 453 bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI, 454 const MachineOperand &Use) { 455 for (const MachineOperand &MIUse : MI.uses()) 456 if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() && 457 MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg())) 458 return true; 459 460 return false; 461 } 462 463 /// For an MI that has multiple definitions, check whether \p MI has 464 /// a definition that overlaps with another of its definitions. 465 /// For example, on ARM: umull r9, r9, lr, r0 466 /// The umull instruction is unpredictable unless RdHi and RdLo are different. 467 bool MachineCopyPropagation::hasOverlappingMultipleDef( 468 const MachineInstr &MI, const MachineOperand &MODef, Register Def) { 469 for (const MachineOperand &MIDef : MI.defs()) { 470 if ((&MIDef != &MODef) && MIDef.isReg() && 471 TRI->regsOverlap(Def, MIDef.getReg())) 472 return true; 473 } 474 475 return false; 476 } 477 478 /// Look for available copies whose destination register is used by \p MI and 479 /// replace the use in \p MI with the copy's source register. 480 void MachineCopyPropagation::forwardUses(MachineInstr &MI) { 481 if (!Tracker.hasAnyCopies()) 482 return; 483 484 // Look for non-tied explicit vreg uses that have an active COPY 485 // instruction that defines the physical register allocated to them. 486 // Replace the vreg with the source of the active COPY. 487 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd; 488 ++OpIdx) { 489 MachineOperand &MOUse = MI.getOperand(OpIdx); 490 // Don't forward into undef use operands since doing so can cause problems 491 // with the machine verifier, since it doesn't treat undef reads as reads, 492 // so we can end up with a live range that ends on an undef read, leading to 493 // an error that the live range doesn't end on a read of the live range 494 // register. 495 if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() || 496 MOUse.isImplicit()) 497 continue; 498 499 if (!MOUse.getReg()) 500 continue; 501 502 // Check that the register is marked 'renamable' so we know it is safe to 503 // rename it without violating any constraints that aren't expressed in the 504 // IR (e.g. ABI or opcode requirements). 505 if (!MOUse.isRenamable()) 506 continue; 507 508 MachineInstr *Copy = 509 Tracker.findAvailCopy(MI, MOUse.getReg().asMCReg(), *TRI); 510 if (!Copy) 511 continue; 512 513 Register CopyDstReg = Copy->getOperand(0).getReg(); 514 const MachineOperand &CopySrc = Copy->getOperand(1); 515 Register CopySrcReg = CopySrc.getReg(); 516 517 // FIXME: Don't handle partial uses of wider COPYs yet. 518 if (MOUse.getReg() != CopyDstReg) { 519 LLVM_DEBUG( 520 dbgs() << "MCP: FIXME! Not forwarding COPY to sub-register use:\n " 521 << MI); 522 continue; 523 } 524 525 // Don't forward COPYs of reserved regs unless they are constant. 526 if (MRI->isReserved(CopySrcReg) && !MRI->isConstantPhysReg(CopySrcReg)) 527 continue; 528 529 if (!isForwardableRegClassCopy(*Copy, MI, OpIdx)) 530 continue; 531 532 if (hasImplicitOverlap(MI, MOUse)) 533 continue; 534 535 // Check that the instruction is not a copy that partially overwrites the 536 // original copy source that we are about to use. The tracker mechanism 537 // cannot cope with that. 538 if (MI.isCopy() && MI.modifiesRegister(CopySrcReg, TRI) && 539 !MI.definesRegister(CopySrcReg)) { 540 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI); 541 continue; 542 } 543 544 if (!DebugCounter::shouldExecute(FwdCounter)) { 545 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n " 546 << MI); 547 continue; 548 } 549 550 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI) 551 << "\n with " << printReg(CopySrcReg, TRI) 552 << "\n in " << MI << " from " << *Copy); 553 554 MOUse.setReg(CopySrcReg); 555 if (!CopySrc.isRenamable()) 556 MOUse.setIsRenamable(false); 557 558 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n"); 559 560 // Clear kill markers that may have been invalidated. 561 for (MachineInstr &KMI : 562 make_range(Copy->getIterator(), std::next(MI.getIterator()))) 563 KMI.clearRegisterKills(CopySrcReg, TRI); 564 565 ++NumCopyForwards; 566 Changed = true; 567 } 568 } 569 570 void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock &MBB) { 571 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName() 572 << "\n"); 573 574 for (MachineBasicBlock::iterator I = MBB.begin(), E = MBB.end(); I != E; ) { 575 MachineInstr *MI = &*I; 576 ++I; 577 578 // Analyze copies (which don't overlap themselves). 579 if (MI->isCopy() && !TRI->regsOverlap(MI->getOperand(0).getReg(), 580 MI->getOperand(1).getReg())) { 581 assert(MI->getOperand(0).getReg().isPhysical() && 582 MI->getOperand(1).getReg().isPhysical() && 583 "MachineCopyPropagation should be run after register allocation!"); 584 585 MCRegister Def = MI->getOperand(0).getReg().asMCReg(); 586 MCRegister Src = MI->getOperand(1).getReg().asMCReg(); 587 588 // The two copies cancel out and the source of the first copy 589 // hasn't been overridden, eliminate the second one. e.g. 590 // %ecx = COPY %eax 591 // ... nothing clobbered eax. 592 // %eax = COPY %ecx 593 // => 594 // %ecx = COPY %eax 595 // 596 // or 597 // 598 // %ecx = COPY %eax 599 // ... nothing clobbered eax. 600 // %ecx = COPY %eax 601 // => 602 // %ecx = COPY %eax 603 if (eraseIfRedundant(*MI, Def, Src) || eraseIfRedundant(*MI, Src, Def)) 604 continue; 605 606 forwardUses(*MI); 607 608 // Src may have been changed by forwardUses() 609 Src = MI->getOperand(1).getReg().asMCReg(); 610 611 // If Src is defined by a previous copy, the previous copy cannot be 612 // eliminated. 613 ReadRegister(Src, *MI, RegularUse); 614 for (const MachineOperand &MO : MI->implicit_operands()) { 615 if (!MO.isReg() || !MO.readsReg()) 616 continue; 617 MCRegister Reg = MO.getReg().asMCReg(); 618 if (!Reg) 619 continue; 620 ReadRegister(Reg, *MI, RegularUse); 621 } 622 623 LLVM_DEBUG(dbgs() << "MCP: Copy is a deletion candidate: "; MI->dump()); 624 625 // Copy is now a candidate for deletion. 626 if (!MRI->isReserved(Def)) 627 MaybeDeadCopies.insert(MI); 628 629 // If 'Def' is previously source of another copy, then this earlier copy's 630 // source is no longer available. e.g. 631 // %xmm9 = copy %xmm2 632 // ... 633 // %xmm2 = copy %xmm0 634 // ... 635 // %xmm2 = copy %xmm9 636 Tracker.clobberRegister(Def, *TRI); 637 for (const MachineOperand &MO : MI->implicit_operands()) { 638 if (!MO.isReg() || !MO.isDef()) 639 continue; 640 MCRegister Reg = MO.getReg().asMCReg(); 641 if (!Reg) 642 continue; 643 Tracker.clobberRegister(Reg, *TRI); 644 } 645 646 Tracker.trackCopy(MI, *TRI); 647 648 continue; 649 } 650 651 // Clobber any earlyclobber regs first. 652 for (const MachineOperand &MO : MI->operands()) 653 if (MO.isReg() && MO.isEarlyClobber()) { 654 MCRegister Reg = MO.getReg().asMCReg(); 655 // If we have a tied earlyclobber, that means it is also read by this 656 // instruction, so we need to make sure we don't remove it as dead 657 // later. 658 if (MO.isTied()) 659 ReadRegister(Reg, *MI, RegularUse); 660 Tracker.clobberRegister(Reg, *TRI); 661 } 662 663 forwardUses(*MI); 664 665 // Not a copy. 666 SmallVector<Register, 2> Defs; 667 const MachineOperand *RegMask = nullptr; 668 for (const MachineOperand &MO : MI->operands()) { 669 if (MO.isRegMask()) 670 RegMask = &MO; 671 if (!MO.isReg()) 672 continue; 673 Register Reg = MO.getReg(); 674 if (!Reg) 675 continue; 676 677 assert(!Reg.isVirtual() && 678 "MachineCopyPropagation should be run after register allocation!"); 679 680 if (MO.isDef() && !MO.isEarlyClobber()) { 681 Defs.push_back(Reg.asMCReg()); 682 continue; 683 } else if (MO.readsReg()) 684 ReadRegister(Reg.asMCReg(), *MI, MO.isDebug() ? DebugUse : RegularUse); 685 } 686 687 // The instruction has a register mask operand which means that it clobbers 688 // a large set of registers. Treat clobbered registers the same way as 689 // defined registers. 690 if (RegMask) { 691 // Erase any MaybeDeadCopies whose destination register is clobbered. 692 for (SmallSetVector<MachineInstr *, 8>::iterator DI = 693 MaybeDeadCopies.begin(); 694 DI != MaybeDeadCopies.end();) { 695 MachineInstr *MaybeDead = *DI; 696 MCRegister Reg = MaybeDead->getOperand(0).getReg().asMCReg(); 697 assert(!MRI->isReserved(Reg)); 698 699 if (!RegMask->clobbersPhysReg(Reg)) { 700 ++DI; 701 continue; 702 } 703 704 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: "; 705 MaybeDead->dump()); 706 707 // Make sure we invalidate any entries in the copy maps before erasing 708 // the instruction. 709 Tracker.clobberRegister(Reg, *TRI); 710 711 // erase() will return the next valid iterator pointing to the next 712 // element after the erased one. 713 DI = MaybeDeadCopies.erase(DI); 714 MaybeDead->eraseFromParent(); 715 Changed = true; 716 ++NumDeletes; 717 } 718 } 719 720 // Any previous copy definition or reading the Defs is no longer available. 721 for (MCRegister Reg : Defs) 722 Tracker.clobberRegister(Reg, *TRI); 723 } 724 725 // If MBB doesn't have successors, delete the copies whose defs are not used. 726 // If MBB does have successors, then conservative assume the defs are live-out 727 // since we don't want to trust live-in lists. 728 if (MBB.succ_empty()) { 729 for (MachineInstr *MaybeDead : MaybeDeadCopies) { 730 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: "; 731 MaybeDead->dump()); 732 assert(!MRI->isReserved(MaybeDead->getOperand(0).getReg())); 733 734 // Update matching debug values, if any. 735 assert(MaybeDead->isCopy()); 736 Register SrcReg = MaybeDead->getOperand(1).getReg(); 737 MRI->updateDbgUsersToReg(SrcReg, CopyDbgUsers[MaybeDead]); 738 739 MaybeDead->eraseFromParent(); 740 Changed = true; 741 ++NumDeletes; 742 } 743 } 744 745 MaybeDeadCopies.clear(); 746 CopyDbgUsers.clear(); 747 Tracker.clear(); 748 } 749 750 static bool isBackwardPropagatableCopy(MachineInstr &MI, 751 const MachineRegisterInfo &MRI) { 752 assert(MI.isCopy() && "MI is expected to be a COPY"); 753 Register Def = MI.getOperand(0).getReg(); 754 Register Src = MI.getOperand(1).getReg(); 755 756 if (!Def || !Src) 757 return false; 758 759 if (MRI.isReserved(Def) || MRI.isReserved(Src)) 760 return false; 761 762 return MI.getOperand(1).isRenamable() && MI.getOperand(1).isKill(); 763 } 764 765 void MachineCopyPropagation::propagateDefs(MachineInstr &MI) { 766 if (!Tracker.hasAnyCopies()) 767 return; 768 769 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd; 770 ++OpIdx) { 771 MachineOperand &MODef = MI.getOperand(OpIdx); 772 773 if (!MODef.isReg() || MODef.isUse()) 774 continue; 775 776 // Ignore non-trivial cases. 777 if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit()) 778 continue; 779 780 if (!MODef.getReg()) 781 continue; 782 783 // We only handle if the register comes from a vreg. 784 if (!MODef.isRenamable()) 785 continue; 786 787 MachineInstr *Copy = 788 Tracker.findAvailBackwardCopy(MI, MODef.getReg().asMCReg(), *TRI); 789 if (!Copy) 790 continue; 791 792 Register Def = Copy->getOperand(0).getReg(); 793 Register Src = Copy->getOperand(1).getReg(); 794 795 if (MODef.getReg() != Src) 796 continue; 797 798 if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx)) 799 continue; 800 801 if (hasImplicitOverlap(MI, MODef)) 802 continue; 803 804 if (hasOverlappingMultipleDef(MI, MODef, Def)) 805 continue; 806 807 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI) 808 << "\n with " << printReg(Def, TRI) << "\n in " 809 << MI << " from " << *Copy); 810 811 MODef.setReg(Def); 812 MODef.setIsRenamable(Copy->getOperand(0).isRenamable()); 813 814 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n"); 815 MaybeDeadCopies.insert(Copy); 816 Changed = true; 817 ++NumCopyBackwardPropagated; 818 } 819 } 820 821 void MachineCopyPropagation::BackwardCopyPropagateBlock( 822 MachineBasicBlock &MBB) { 823 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName() 824 << "\n"); 825 826 for (MachineBasicBlock::reverse_iterator I = MBB.rbegin(), E = MBB.rend(); 827 I != E;) { 828 MachineInstr *MI = &*I; 829 ++I; 830 831 // Ignore non-trivial COPYs. 832 if (MI->isCopy() && MI->getNumOperands() == 2 && 833 !TRI->regsOverlap(MI->getOperand(0).getReg(), 834 MI->getOperand(1).getReg())) { 835 836 MCRegister Def = MI->getOperand(0).getReg().asMCReg(); 837 MCRegister Src = MI->getOperand(1).getReg().asMCReg(); 838 839 // Unlike forward cp, we don't invoke propagateDefs here, 840 // just let forward cp do COPY-to-COPY propagation. 841 if (isBackwardPropagatableCopy(*MI, *MRI)) { 842 Tracker.invalidateRegister(Src, *TRI); 843 Tracker.invalidateRegister(Def, *TRI); 844 Tracker.trackCopy(MI, *TRI); 845 continue; 846 } 847 } 848 849 // Invalidate any earlyclobber regs first. 850 for (const MachineOperand &MO : MI->operands()) 851 if (MO.isReg() && MO.isEarlyClobber()) { 852 MCRegister Reg = MO.getReg().asMCReg(); 853 if (!Reg) 854 continue; 855 Tracker.invalidateRegister(Reg, *TRI); 856 } 857 858 propagateDefs(*MI); 859 for (const MachineOperand &MO : MI->operands()) { 860 if (!MO.isReg()) 861 continue; 862 863 if (!MO.getReg()) 864 continue; 865 866 if (MO.isDef()) 867 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI); 868 869 if (MO.readsReg()) 870 Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI); 871 } 872 } 873 874 for (auto *Copy : MaybeDeadCopies) { 875 Copy->eraseFromParent(); 876 ++NumDeletes; 877 } 878 879 MaybeDeadCopies.clear(); 880 CopyDbgUsers.clear(); 881 Tracker.clear(); 882 } 883 884 bool MachineCopyPropagation::runOnMachineFunction(MachineFunction &MF) { 885 if (skipFunction(MF.getFunction())) 886 return false; 887 888 Changed = false; 889 890 TRI = MF.getSubtarget().getRegisterInfo(); 891 TII = MF.getSubtarget().getInstrInfo(); 892 MRI = &MF.getRegInfo(); 893 894 for (MachineBasicBlock &MBB : MF) { 895 BackwardCopyPropagateBlock(MBB); 896 ForwardCopyPropagateBlock(MBB); 897 } 898 899 return Changed; 900 } 901