1 //===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This is an extremely simple MachineInstr-level copy propagation pass. 10 // 11 // This pass forwards the source of COPYs to the users of their destinations 12 // when doing so is legal. For example: 13 // 14 // %reg1 = COPY %reg0 15 // ... 16 // ... = OP %reg1 17 // 18 // If 19 // - %reg0 has not been clobbered by the time of the use of %reg1 20 // - the register class constraints are satisfied 21 // - the COPY def is the only value that reaches OP 22 // then this pass replaces the above with: 23 // 24 // %reg1 = COPY %reg0 25 // ... 26 // ... = OP %reg0 27 // 28 // This pass also removes some redundant COPYs. For example: 29 // 30 // %R1 = COPY %R0 31 // ... // No clobber of %R1 32 // %R0 = COPY %R1 <<< Removed 33 // 34 // or 35 // 36 // %R1 = COPY %R0 37 // ... // No clobber of %R0 38 // %R1 = COPY %R0 <<< Removed 39 // 40 // or 41 // 42 // $R0 = OP ... 43 // ... // No read/clobber of $R0 and $R1 44 // $R1 = COPY $R0 // $R0 is killed 45 // Replace $R0 with $R1 and remove the COPY 46 // $R1 = OP ... 47 // ... 48 // 49 //===----------------------------------------------------------------------===// 50 51 #include "llvm/ADT/DenseMap.h" 52 #include "llvm/ADT/STLExtras.h" 53 #include "llvm/ADT/SetVector.h" 54 #include "llvm/ADT/SmallSet.h" 55 #include "llvm/ADT/SmallVector.h" 56 #include "llvm/ADT/Statistic.h" 57 #include "llvm/ADT/iterator_range.h" 58 #include "llvm/CodeGen/MachineBasicBlock.h" 59 #include "llvm/CodeGen/MachineFunction.h" 60 #include "llvm/CodeGen/MachineFunctionPass.h" 61 #include "llvm/CodeGen/MachineInstr.h" 62 #include "llvm/CodeGen/MachineOperand.h" 63 #include "llvm/CodeGen/MachineRegisterInfo.h" 64 #include "llvm/CodeGen/TargetInstrInfo.h" 65 #include "llvm/CodeGen/TargetRegisterInfo.h" 66 #include "llvm/CodeGen/TargetSubtargetInfo.h" 67 #include "llvm/InitializePasses.h" 68 #include "llvm/MC/MCRegisterInfo.h" 69 #include "llvm/Pass.h" 70 #include "llvm/Support/Debug.h" 71 #include "llvm/Support/DebugCounter.h" 72 #include "llvm/Support/raw_ostream.h" 73 #include <cassert> 74 #include <iterator> 75 76 using namespace llvm; 77 78 #define DEBUG_TYPE "machine-cp" 79 80 STATISTIC(NumDeletes, "Number of dead copies deleted"); 81 STATISTIC(NumCopyForwards, "Number of copy uses forwarded"); 82 STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated"); 83 DEBUG_COUNTER(FwdCounter, "machine-cp-fwd", 84 "Controls which register COPYs are forwarded"); 85 86 namespace { 87 88 class CopyTracker { 89 struct CopyInfo { 90 MachineInstr *MI; 91 SmallVector<unsigned, 4> DefRegs; 92 bool Avail; 93 }; 94 95 DenseMap<unsigned, CopyInfo> Copies; 96 97 public: 98 /// Mark all of the given registers and their subregisters as unavailable for 99 /// copying. 100 void markRegsUnavailable(ArrayRef<unsigned> Regs, 101 const TargetRegisterInfo &TRI) { 102 for (unsigned Reg : Regs) { 103 // Source of copy is no longer available for propagation. 104 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) { 105 auto CI = Copies.find(*RUI); 106 if (CI != Copies.end()) 107 CI->second.Avail = false; 108 } 109 } 110 } 111 112 /// Remove register from copy maps. 113 void invalidateRegister(unsigned Reg, const TargetRegisterInfo &TRI) { 114 // Since Reg might be a subreg of some registers, only invalidate Reg is not 115 // enough. We have to find the COPY defines Reg or registers defined by Reg 116 // and invalidate all of them. 117 SmallSet<unsigned, 8> RegsToInvalidate; 118 RegsToInvalidate.insert(Reg); 119 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) { 120 auto I = Copies.find(*RUI); 121 if (I != Copies.end()) { 122 if (MachineInstr *MI = I->second.MI) { 123 RegsToInvalidate.insert(MI->getOperand(0).getReg()); 124 RegsToInvalidate.insert(MI->getOperand(1).getReg()); 125 } 126 RegsToInvalidate.insert(I->second.DefRegs.begin(), 127 I->second.DefRegs.end()); 128 } 129 } 130 for (unsigned InvalidReg : RegsToInvalidate) 131 for (MCRegUnitIterator RUI(InvalidReg, &TRI); RUI.isValid(); ++RUI) 132 Copies.erase(*RUI); 133 } 134 135 /// Clobber a single register, removing it from the tracker's copy maps. 136 void clobberRegister(unsigned Reg, const TargetRegisterInfo &TRI) { 137 for (MCRegUnitIterator RUI(Reg, &TRI); RUI.isValid(); ++RUI) { 138 auto I = Copies.find(*RUI); 139 if (I != Copies.end()) { 140 // When we clobber the source of a copy, we need to clobber everything 141 // it defined. 142 markRegsUnavailable(I->second.DefRegs, TRI); 143 // When we clobber the destination of a copy, we need to clobber the 144 // whole register it defined. 145 if (MachineInstr *MI = I->second.MI) 146 markRegsUnavailable({MI->getOperand(0).getReg()}, TRI); 147 // Now we can erase the copy. 148 Copies.erase(I); 149 } 150 } 151 } 152 153 /// Add this copy's registers into the tracker's copy maps. 154 void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI) { 155 assert(MI->isCopy() && "Tracking non-copy?"); 156 157 Register Def = MI->getOperand(0).getReg(); 158 Register Src = MI->getOperand(1).getReg(); 159 160 // Remember Def is defined by the copy. 161 for (MCRegUnitIterator RUI(Def, &TRI); RUI.isValid(); ++RUI) 162 Copies[*RUI] = {MI, {}, true}; 163 164 // Remember source that's copied to Def. Once it's clobbered, then 165 // it's no longer available for copy propagation. 166 for (MCRegUnitIterator RUI(Src, &TRI); RUI.isValid(); ++RUI) { 167 auto I = Copies.insert({*RUI, {nullptr, {}, false}}); 168 auto &Copy = I.first->second; 169 if (!is_contained(Copy.DefRegs, Def)) 170 Copy.DefRegs.push_back(Def); 171 } 172 } 173 174 bool hasAnyCopies() { 175 return !Copies.empty(); 176 } 177 178 MachineInstr *findCopyForUnit(unsigned RegUnit, const TargetRegisterInfo &TRI, 179 bool MustBeAvailable = false) { 180 auto CI = Copies.find(RegUnit); 181 if (CI == Copies.end()) 182 return nullptr; 183 if (MustBeAvailable && !CI->second.Avail) 184 return nullptr; 185 return CI->second.MI; 186 } 187 188 MachineInstr *findCopyDefViaUnit(unsigned RegUnit, 189 const TargetRegisterInfo &TRI) { 190 auto CI = Copies.find(RegUnit); 191 if (CI == Copies.end()) 192 return nullptr; 193 if (CI->second.DefRegs.size() != 1) 194 return nullptr; 195 MCRegUnitIterator RUI(CI->second.DefRegs[0], &TRI); 196 return findCopyForUnit(*RUI, TRI, true); 197 } 198 199 MachineInstr *findAvailBackwardCopy(MachineInstr &I, unsigned Reg, 200 const TargetRegisterInfo &TRI) { 201 MCRegUnitIterator RUI(Reg, &TRI); 202 MachineInstr *AvailCopy = findCopyDefViaUnit(*RUI, TRI); 203 if (!AvailCopy || 204 !TRI.isSubRegisterEq(AvailCopy->getOperand(1).getReg(), Reg)) 205 return nullptr; 206 207 Register AvailSrc = AvailCopy->getOperand(1).getReg(); 208 Register AvailDef = AvailCopy->getOperand(0).getReg(); 209 for (const MachineInstr &MI : 210 make_range(AvailCopy->getReverseIterator(), I.getReverseIterator())) 211 for (const MachineOperand &MO : MI.operands()) 212 if (MO.isRegMask()) 213 // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef? 214 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef)) 215 return nullptr; 216 217 return AvailCopy; 218 } 219 220 MachineInstr *findAvailCopy(MachineInstr &DestCopy, unsigned Reg, 221 const TargetRegisterInfo &TRI) { 222 // We check the first RegUnit here, since we'll only be interested in the 223 // copy if it copies the entire register anyway. 224 MCRegUnitIterator RUI(Reg, &TRI); 225 MachineInstr *AvailCopy = 226 findCopyForUnit(*RUI, TRI, /*MustBeAvailable=*/true); 227 if (!AvailCopy || 228 !TRI.isSubRegisterEq(AvailCopy->getOperand(0).getReg(), Reg)) 229 return nullptr; 230 231 // Check that the available copy isn't clobbered by any regmasks between 232 // itself and the destination. 233 Register AvailSrc = AvailCopy->getOperand(1).getReg(); 234 Register AvailDef = AvailCopy->getOperand(0).getReg(); 235 for (const MachineInstr &MI : 236 make_range(AvailCopy->getIterator(), DestCopy.getIterator())) 237 for (const MachineOperand &MO : MI.operands()) 238 if (MO.isRegMask()) 239 if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef)) 240 return nullptr; 241 242 return AvailCopy; 243 } 244 245 void clear() { 246 Copies.clear(); 247 } 248 }; 249 250 class MachineCopyPropagation : public MachineFunctionPass { 251 const TargetRegisterInfo *TRI; 252 const TargetInstrInfo *TII; 253 const MachineRegisterInfo *MRI; 254 255 public: 256 static char ID; // Pass identification, replacement for typeid 257 258 MachineCopyPropagation() : MachineFunctionPass(ID) { 259 initializeMachineCopyPropagationPass(*PassRegistry::getPassRegistry()); 260 } 261 262 void getAnalysisUsage(AnalysisUsage &AU) const override { 263 AU.setPreservesCFG(); 264 MachineFunctionPass::getAnalysisUsage(AU); 265 } 266 267 bool runOnMachineFunction(MachineFunction &MF) override; 268 269 MachineFunctionProperties getRequiredProperties() const override { 270 return MachineFunctionProperties().set( 271 MachineFunctionProperties::Property::NoVRegs); 272 } 273 274 private: 275 typedef enum { DebugUse = false, RegularUse = true } DebugType; 276 277 void ClobberRegister(unsigned Reg); 278 void ReadRegister(unsigned Reg, MachineInstr &Reader, 279 DebugType DT); 280 void ForwardCopyPropagateBlock(MachineBasicBlock &MBB); 281 void BackwardCopyPropagateBlock(MachineBasicBlock &MBB); 282 bool eraseIfRedundant(MachineInstr &Copy, unsigned Src, unsigned Def); 283 void forwardUses(MachineInstr &MI); 284 void propagateDefs(MachineInstr &MI); 285 bool isForwardableRegClassCopy(const MachineInstr &Copy, 286 const MachineInstr &UseI, unsigned UseIdx); 287 bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy, 288 const MachineInstr &UseI, 289 unsigned UseIdx); 290 bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use); 291 292 /// Candidates for deletion. 293 SmallSetVector<MachineInstr *, 8> MaybeDeadCopies; 294 295 /// Multimap tracking debug users in current BB 296 DenseMap<MachineInstr*, SmallVector<MachineInstr*, 2>> CopyDbgUsers; 297 298 CopyTracker Tracker; 299 300 bool Changed; 301 }; 302 303 } // end anonymous namespace 304 305 char MachineCopyPropagation::ID = 0; 306 307 char &llvm::MachineCopyPropagationID = MachineCopyPropagation::ID; 308 309 INITIALIZE_PASS(MachineCopyPropagation, DEBUG_TYPE, 310 "Machine Copy Propagation Pass", false, false) 311 312 void MachineCopyPropagation::ReadRegister(unsigned Reg, MachineInstr &Reader, 313 DebugType DT) { 314 // If 'Reg' is defined by a copy, the copy is no longer a candidate 315 // for elimination. If a copy is "read" by a debug user, record the user 316 // for propagation. 317 for (MCRegUnitIterator RUI(Reg, TRI); RUI.isValid(); ++RUI) { 318 if (MachineInstr *Copy = Tracker.findCopyForUnit(*RUI, *TRI)) { 319 if (DT == RegularUse) { 320 LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump()); 321 MaybeDeadCopies.remove(Copy); 322 } else { 323 CopyDbgUsers[Copy].push_back(&Reader); 324 } 325 } 326 } 327 } 328 329 /// Return true if \p PreviousCopy did copy register \p Src to register \p Def. 330 /// This fact may have been obscured by sub register usage or may not be true at 331 /// all even though Src and Def are subregisters of the registers used in 332 /// PreviousCopy. e.g. 333 /// isNopCopy("ecx = COPY eax", AX, CX) == true 334 /// isNopCopy("ecx = COPY eax", AH, CL) == false 335 static bool isNopCopy(const MachineInstr &PreviousCopy, unsigned Src, 336 unsigned Def, const TargetRegisterInfo *TRI) { 337 Register PreviousSrc = PreviousCopy.getOperand(1).getReg(); 338 Register PreviousDef = PreviousCopy.getOperand(0).getReg(); 339 if (Src == PreviousSrc) { 340 assert(Def == PreviousDef); 341 return true; 342 } 343 if (!TRI->isSubRegister(PreviousSrc, Src)) 344 return false; 345 unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src); 346 return SubIdx == TRI->getSubRegIndex(PreviousDef, Def); 347 } 348 349 /// Remove instruction \p Copy if there exists a previous copy that copies the 350 /// register \p Src to the register \p Def; This may happen indirectly by 351 /// copying the super registers. 352 bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy, unsigned Src, 353 unsigned Def) { 354 // Avoid eliminating a copy from/to a reserved registers as we cannot predict 355 // the value (Example: The sparc zero register is writable but stays zero). 356 if (MRI->isReserved(Src) || MRI->isReserved(Def)) 357 return false; 358 359 // Search for an existing copy. 360 MachineInstr *PrevCopy = Tracker.findAvailCopy(Copy, Def, *TRI); 361 if (!PrevCopy) 362 return false; 363 364 // Check that the existing copy uses the correct sub registers. 365 if (PrevCopy->getOperand(0).isDead()) 366 return false; 367 if (!isNopCopy(*PrevCopy, Src, Def, TRI)) 368 return false; 369 370 LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump()); 371 372 // Copy was redundantly redefining either Src or Def. Remove earlier kill 373 // flags between Copy and PrevCopy because the value will be reused now. 374 assert(Copy.isCopy()); 375 Register CopyDef = Copy.getOperand(0).getReg(); 376 assert(CopyDef == Src || CopyDef == Def); 377 for (MachineInstr &MI : 378 make_range(PrevCopy->getIterator(), Copy.getIterator())) 379 MI.clearRegisterKills(CopyDef, TRI); 380 381 Copy.eraseFromParent(); 382 Changed = true; 383 ++NumDeletes; 384 return true; 385 } 386 387 bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy( 388 const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) { 389 Register Def = Copy.getOperand(0).getReg(); 390 391 if (const TargetRegisterClass *URC = 392 UseI.getRegClassConstraint(UseIdx, TII, TRI)) 393 return URC->contains(Def); 394 395 // We don't process further if UseI is a COPY, since forward copy propagation 396 // should handle that. 397 return false; 398 } 399 400 /// Decide whether we should forward the source of \param Copy to its use in 401 /// \param UseI based on the physical register class constraints of the opcode 402 /// and avoiding introducing more cross-class COPYs. 403 bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy, 404 const MachineInstr &UseI, 405 unsigned UseIdx) { 406 407 Register CopySrcReg = Copy.getOperand(1).getReg(); 408 409 // If the new register meets the opcode register constraints, then allow 410 // forwarding. 411 if (const TargetRegisterClass *URC = 412 UseI.getRegClassConstraint(UseIdx, TII, TRI)) 413 return URC->contains(CopySrcReg); 414 415 if (!UseI.isCopy()) 416 return false; 417 418 /// COPYs don't have register class constraints, so if the user instruction 419 /// is a COPY, we just try to avoid introducing additional cross-class 420 /// COPYs. For example: 421 /// 422 /// RegClassA = COPY RegClassB // Copy parameter 423 /// ... 424 /// RegClassB = COPY RegClassA // UseI parameter 425 /// 426 /// which after forwarding becomes 427 /// 428 /// RegClassA = COPY RegClassB 429 /// ... 430 /// RegClassB = COPY RegClassB 431 /// 432 /// so we have reduced the number of cross-class COPYs and potentially 433 /// introduced a nop COPY that can be removed. 434 const TargetRegisterClass *UseDstRC = 435 TRI->getMinimalPhysRegClass(UseI.getOperand(0).getReg()); 436 437 const TargetRegisterClass *SuperRC = UseDstRC; 438 for (TargetRegisterClass::sc_iterator SuperRCI = UseDstRC->getSuperClasses(); 439 SuperRC; SuperRC = *SuperRCI++) 440 if (SuperRC->contains(CopySrcReg)) 441 return true; 442 443 return false; 444 } 445 446 /// Check that \p MI does not have implicit uses that overlap with it's \p Use 447 /// operand (the register being replaced), since these can sometimes be 448 /// implicitly tied to other operands. For example, on AMDGPU: 449 /// 450 /// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use> 451 /// 452 /// the %VGPR2 is implicitly tied to the larger reg operand, but we have no 453 /// way of knowing we need to update the latter when updating the former. 454 bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI, 455 const MachineOperand &Use) { 456 for (const MachineOperand &MIUse : MI.uses()) 457 if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() && 458 MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg())) 459 return true; 460 461 return false; 462 } 463 464 /// Look for available copies whose destination register is used by \p MI and 465 /// replace the use in \p MI with the copy's source register. 466 void MachineCopyPropagation::forwardUses(MachineInstr &MI) { 467 if (!Tracker.hasAnyCopies()) 468 return; 469 470 // Look for non-tied explicit vreg uses that have an active COPY 471 // instruction that defines the physical register allocated to them. 472 // Replace the vreg with the source of the active COPY. 473 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd; 474 ++OpIdx) { 475 MachineOperand &MOUse = MI.getOperand(OpIdx); 476 // Don't forward into undef use operands since doing so can cause problems 477 // with the machine verifier, since it doesn't treat undef reads as reads, 478 // so we can end up with a live range that ends on an undef read, leading to 479 // an error that the live range doesn't end on a read of the live range 480 // register. 481 if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() || 482 MOUse.isImplicit()) 483 continue; 484 485 if (!MOUse.getReg()) 486 continue; 487 488 // Check that the register is marked 'renamable' so we know it is safe to 489 // rename it without violating any constraints that aren't expressed in the 490 // IR (e.g. ABI or opcode requirements). 491 if (!MOUse.isRenamable()) 492 continue; 493 494 MachineInstr *Copy = Tracker.findAvailCopy(MI, MOUse.getReg(), *TRI); 495 if (!Copy) 496 continue; 497 498 Register CopyDstReg = Copy->getOperand(0).getReg(); 499 const MachineOperand &CopySrc = Copy->getOperand(1); 500 Register CopySrcReg = CopySrc.getReg(); 501 502 // FIXME: Don't handle partial uses of wider COPYs yet. 503 if (MOUse.getReg() != CopyDstReg) { 504 LLVM_DEBUG( 505 dbgs() << "MCP: FIXME! Not forwarding COPY to sub-register use:\n " 506 << MI); 507 continue; 508 } 509 510 // Don't forward COPYs of reserved regs unless they are constant. 511 if (MRI->isReserved(CopySrcReg) && !MRI->isConstantPhysReg(CopySrcReg)) 512 continue; 513 514 if (!isForwardableRegClassCopy(*Copy, MI, OpIdx)) 515 continue; 516 517 if (hasImplicitOverlap(MI, MOUse)) 518 continue; 519 520 // Check that the instruction is not a copy that partially overwrites the 521 // original copy source that we are about to use. The tracker mechanism 522 // cannot cope with that. 523 if (MI.isCopy() && MI.modifiesRegister(CopySrcReg, TRI) && 524 !MI.definesRegister(CopySrcReg)) { 525 LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI); 526 continue; 527 } 528 529 if (!DebugCounter::shouldExecute(FwdCounter)) { 530 LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n " 531 << MI); 532 continue; 533 } 534 535 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI) 536 << "\n with " << printReg(CopySrcReg, TRI) 537 << "\n in " << MI << " from " << *Copy); 538 539 MOUse.setReg(CopySrcReg); 540 if (!CopySrc.isRenamable()) 541 MOUse.setIsRenamable(false); 542 543 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n"); 544 545 // Clear kill markers that may have been invalidated. 546 for (MachineInstr &KMI : 547 make_range(Copy->getIterator(), std::next(MI.getIterator()))) 548 KMI.clearRegisterKills(CopySrcReg, TRI); 549 550 ++NumCopyForwards; 551 Changed = true; 552 } 553 } 554 555 void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock &MBB) { 556 LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName() 557 << "\n"); 558 559 for (MachineBasicBlock::iterator I = MBB.begin(), E = MBB.end(); I != E; ) { 560 MachineInstr *MI = &*I; 561 ++I; 562 563 // Analyze copies (which don't overlap themselves). 564 if (MI->isCopy() && !TRI->regsOverlap(MI->getOperand(0).getReg(), 565 MI->getOperand(1).getReg())) { 566 Register Def = MI->getOperand(0).getReg(); 567 Register Src = MI->getOperand(1).getReg(); 568 569 assert(!Register::isVirtualRegister(Def) && 570 !Register::isVirtualRegister(Src) && 571 "MachineCopyPropagation should be run after register allocation!"); 572 573 // The two copies cancel out and the source of the first copy 574 // hasn't been overridden, eliminate the second one. e.g. 575 // %ecx = COPY %eax 576 // ... nothing clobbered eax. 577 // %eax = COPY %ecx 578 // => 579 // %ecx = COPY %eax 580 // 581 // or 582 // 583 // %ecx = COPY %eax 584 // ... nothing clobbered eax. 585 // %ecx = COPY %eax 586 // => 587 // %ecx = COPY %eax 588 if (eraseIfRedundant(*MI, Def, Src) || eraseIfRedundant(*MI, Src, Def)) 589 continue; 590 591 forwardUses(*MI); 592 593 // Src may have been changed by forwardUses() 594 Src = MI->getOperand(1).getReg(); 595 596 // If Src is defined by a previous copy, the previous copy cannot be 597 // eliminated. 598 ReadRegister(Src, *MI, RegularUse); 599 for (const MachineOperand &MO : MI->implicit_operands()) { 600 if (!MO.isReg() || !MO.readsReg()) 601 continue; 602 Register Reg = MO.getReg(); 603 if (!Reg) 604 continue; 605 ReadRegister(Reg, *MI, RegularUse); 606 } 607 608 LLVM_DEBUG(dbgs() << "MCP: Copy is a deletion candidate: "; MI->dump()); 609 610 // Copy is now a candidate for deletion. 611 if (!MRI->isReserved(Def)) 612 MaybeDeadCopies.insert(MI); 613 614 // If 'Def' is previously source of another copy, then this earlier copy's 615 // source is no longer available. e.g. 616 // %xmm9 = copy %xmm2 617 // ... 618 // %xmm2 = copy %xmm0 619 // ... 620 // %xmm2 = copy %xmm9 621 Tracker.clobberRegister(Def, *TRI); 622 for (const MachineOperand &MO : MI->implicit_operands()) { 623 if (!MO.isReg() || !MO.isDef()) 624 continue; 625 Register Reg = MO.getReg(); 626 if (!Reg) 627 continue; 628 Tracker.clobberRegister(Reg, *TRI); 629 } 630 631 Tracker.trackCopy(MI, *TRI); 632 633 continue; 634 } 635 636 // Clobber any earlyclobber regs first. 637 for (const MachineOperand &MO : MI->operands()) 638 if (MO.isReg() && MO.isEarlyClobber()) { 639 Register Reg = MO.getReg(); 640 // If we have a tied earlyclobber, that means it is also read by this 641 // instruction, so we need to make sure we don't remove it as dead 642 // later. 643 if (MO.isTied()) 644 ReadRegister(Reg, *MI, RegularUse); 645 Tracker.clobberRegister(Reg, *TRI); 646 } 647 648 forwardUses(*MI); 649 650 // Not a copy. 651 SmallVector<unsigned, 2> Defs; 652 const MachineOperand *RegMask = nullptr; 653 for (const MachineOperand &MO : MI->operands()) { 654 if (MO.isRegMask()) 655 RegMask = &MO; 656 if (!MO.isReg()) 657 continue; 658 Register Reg = MO.getReg(); 659 if (!Reg) 660 continue; 661 662 assert(!Register::isVirtualRegister(Reg) && 663 "MachineCopyPropagation should be run after register allocation!"); 664 665 if (MO.isDef() && !MO.isEarlyClobber()) { 666 Defs.push_back(Reg); 667 continue; 668 } else if (MO.readsReg()) 669 ReadRegister(Reg, *MI, MO.isDebug() ? DebugUse : RegularUse); 670 } 671 672 // The instruction has a register mask operand which means that it clobbers 673 // a large set of registers. Treat clobbered registers the same way as 674 // defined registers. 675 if (RegMask) { 676 // Erase any MaybeDeadCopies whose destination register is clobbered. 677 for (SmallSetVector<MachineInstr *, 8>::iterator DI = 678 MaybeDeadCopies.begin(); 679 DI != MaybeDeadCopies.end();) { 680 MachineInstr *MaybeDead = *DI; 681 Register Reg = MaybeDead->getOperand(0).getReg(); 682 assert(!MRI->isReserved(Reg)); 683 684 if (!RegMask->clobbersPhysReg(Reg)) { 685 ++DI; 686 continue; 687 } 688 689 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: "; 690 MaybeDead->dump()); 691 692 // Make sure we invalidate any entries in the copy maps before erasing 693 // the instruction. 694 Tracker.clobberRegister(Reg, *TRI); 695 696 // erase() will return the next valid iterator pointing to the next 697 // element after the erased one. 698 DI = MaybeDeadCopies.erase(DI); 699 MaybeDead->eraseFromParent(); 700 Changed = true; 701 ++NumDeletes; 702 } 703 } 704 705 // Any previous copy definition or reading the Defs is no longer available. 706 for (unsigned Reg : Defs) 707 Tracker.clobberRegister(Reg, *TRI); 708 } 709 710 // If MBB doesn't have successors, delete the copies whose defs are not used. 711 // If MBB does have successors, then conservative assume the defs are live-out 712 // since we don't want to trust live-in lists. 713 if (MBB.succ_empty()) { 714 for (MachineInstr *MaybeDead : MaybeDeadCopies) { 715 LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: "; 716 MaybeDead->dump()); 717 assert(!MRI->isReserved(MaybeDead->getOperand(0).getReg())); 718 719 // Update matching debug values, if any. 720 assert(MaybeDead->isCopy()); 721 unsigned SrcReg = MaybeDead->getOperand(1).getReg(); 722 MRI->updateDbgUsersToReg(SrcReg, CopyDbgUsers[MaybeDead]); 723 724 MaybeDead->eraseFromParent(); 725 Changed = true; 726 ++NumDeletes; 727 } 728 } 729 730 MaybeDeadCopies.clear(); 731 CopyDbgUsers.clear(); 732 Tracker.clear(); 733 } 734 735 static bool isBackwardPropagatableCopy(MachineInstr &MI, 736 const MachineRegisterInfo &MRI) { 737 assert(MI.isCopy() && "MI is expected to be a COPY"); 738 Register Def = MI.getOperand(0).getReg(); 739 Register Src = MI.getOperand(1).getReg(); 740 741 if (!Def || !Src) 742 return false; 743 744 if (MRI.isReserved(Def) || MRI.isReserved(Src)) 745 return false; 746 747 return MI.getOperand(1).isRenamable() && MI.getOperand(1).isKill(); 748 } 749 750 void MachineCopyPropagation::propagateDefs(MachineInstr &MI) { 751 if (!Tracker.hasAnyCopies()) 752 return; 753 754 for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd; 755 ++OpIdx) { 756 MachineOperand &MODef = MI.getOperand(OpIdx); 757 758 if (!MODef.isReg() || MODef.isUse()) 759 continue; 760 761 // Ignore non-trivial cases. 762 if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit()) 763 continue; 764 765 if (!MODef.getReg()) 766 continue; 767 768 // We only handle if the register comes from a vreg. 769 if (!MODef.isRenamable()) 770 continue; 771 772 MachineInstr *Copy = 773 Tracker.findAvailBackwardCopy(MI, MODef.getReg(), *TRI); 774 if (!Copy) 775 continue; 776 777 Register Def = Copy->getOperand(0).getReg(); 778 Register Src = Copy->getOperand(1).getReg(); 779 780 if (MODef.getReg() != Src) 781 continue; 782 783 if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx)) 784 continue; 785 786 if (hasImplicitOverlap(MI, MODef)) 787 continue; 788 789 LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI) 790 << "\n with " << printReg(Def, TRI) << "\n in " 791 << MI << " from " << *Copy); 792 793 MODef.setReg(Def); 794 MODef.setIsRenamable(Copy->getOperand(0).isRenamable()); 795 796 LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n"); 797 MaybeDeadCopies.insert(Copy); 798 Changed = true; 799 ++NumCopyBackwardPropagated; 800 } 801 } 802 803 void MachineCopyPropagation::BackwardCopyPropagateBlock( 804 MachineBasicBlock &MBB) { 805 LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName() 806 << "\n"); 807 808 for (MachineBasicBlock::reverse_iterator I = MBB.rbegin(), E = MBB.rend(); 809 I != E;) { 810 MachineInstr *MI = &*I; 811 ++I; 812 813 // Ignore non-trivial COPYs. 814 if (MI->isCopy() && MI->getNumOperands() == 2 && 815 !TRI->regsOverlap(MI->getOperand(0).getReg(), 816 MI->getOperand(1).getReg())) { 817 818 Register Def = MI->getOperand(0).getReg(); 819 Register Src = MI->getOperand(1).getReg(); 820 821 // Unlike forward cp, we don't invoke propagateDefs here, 822 // just let forward cp do COPY-to-COPY propagation. 823 if (isBackwardPropagatableCopy(*MI, *MRI)) { 824 Tracker.invalidateRegister(Src, *TRI); 825 Tracker.invalidateRegister(Def, *TRI); 826 Tracker.trackCopy(MI, *TRI); 827 continue; 828 } 829 } 830 831 // Invalidate any earlyclobber regs first. 832 for (const MachineOperand &MO : MI->operands()) 833 if (MO.isReg() && MO.isEarlyClobber()) { 834 Register Reg = MO.getReg(); 835 if (!Reg) 836 continue; 837 Tracker.invalidateRegister(Reg, *TRI); 838 } 839 840 propagateDefs(*MI); 841 for (const MachineOperand &MO : MI->operands()) { 842 if (!MO.isReg()) 843 continue; 844 845 if (!MO.getReg()) 846 continue; 847 848 if (MO.isDef()) 849 Tracker.invalidateRegister(MO.getReg(), *TRI); 850 851 if (MO.readsReg()) 852 Tracker.invalidateRegister(MO.getReg(), *TRI); 853 } 854 } 855 856 for (auto *Copy : MaybeDeadCopies) { 857 Copy->eraseFromParent(); 858 ++NumDeletes; 859 } 860 861 MaybeDeadCopies.clear(); 862 CopyDbgUsers.clear(); 863 Tracker.clear(); 864 } 865 866 bool MachineCopyPropagation::runOnMachineFunction(MachineFunction &MF) { 867 if (skipFunction(MF.getFunction())) 868 return false; 869 870 Changed = false; 871 872 TRI = MF.getSubtarget().getRegisterInfo(); 873 TII = MF.getSubtarget().getInstrInfo(); 874 MRI = &MF.getRegInfo(); 875 876 for (MachineBasicBlock &MBB : MF) { 877 BackwardCopyPropagateBlock(MBB); 878 ForwardCopyPropagateBlock(MBB); 879 } 880 881 return Changed; 882 } 883