xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/MachineCopyPropagation.cpp (revision e64bea71c21eb42e97aa615188ba91f6cce0d36d)
1 //===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This is an extremely simple MachineInstr-level copy propagation pass.
10 //
11 // This pass forwards the source of COPYs to the users of their destinations
12 // when doing so is legal.  For example:
13 //
14 //   %reg1 = COPY %reg0
15 //   ...
16 //   ... = OP %reg1
17 //
18 // If
19 //   - %reg0 has not been clobbered by the time of the use of %reg1
20 //   - the register class constraints are satisfied
21 //   - the COPY def is the only value that reaches OP
22 // then this pass replaces the above with:
23 //
24 //   %reg1 = COPY %reg0
25 //   ...
26 //   ... = OP %reg0
27 //
28 // This pass also removes some redundant COPYs.  For example:
29 //
30 //    %R1 = COPY %R0
31 //    ... // No clobber of %R1
32 //    %R0 = COPY %R1 <<< Removed
33 //
34 // or
35 //
36 //    %R1 = COPY %R0
37 //    ... // No clobber of %R0
38 //    %R1 = COPY %R0 <<< Removed
39 //
40 // or
41 //
42 //    $R0 = OP ...
43 //    ... // No read/clobber of $R0 and $R1
44 //    $R1 = COPY $R0 // $R0 is killed
45 // Replace $R0 with $R1 and remove the COPY
46 //    $R1 = OP ...
47 //    ...
48 //
49 //===----------------------------------------------------------------------===//
50 
51 #include "llvm/CodeGen/MachineCopyPropagation.h"
52 #include "llvm/ADT/DenseMap.h"
53 #include "llvm/ADT/STLExtras.h"
54 #include "llvm/ADT/SetVector.h"
55 #include "llvm/ADT/SmallSet.h"
56 #include "llvm/ADT/SmallVector.h"
57 #include "llvm/ADT/Statistic.h"
58 #include "llvm/ADT/iterator_range.h"
59 #include "llvm/CodeGen/MachineBasicBlock.h"
60 #include "llvm/CodeGen/MachineFunction.h"
61 #include "llvm/CodeGen/MachineFunctionPass.h"
62 #include "llvm/CodeGen/MachineInstr.h"
63 #include "llvm/CodeGen/MachineOperand.h"
64 #include "llvm/CodeGen/MachineRegisterInfo.h"
65 #include "llvm/CodeGen/TargetInstrInfo.h"
66 #include "llvm/CodeGen/TargetRegisterInfo.h"
67 #include "llvm/CodeGen/TargetSubtargetInfo.h"
68 #include "llvm/InitializePasses.h"
69 #include "llvm/MC/MCRegister.h"
70 #include "llvm/MC/MCRegisterInfo.h"
71 #include "llvm/Pass.h"
72 #include "llvm/Support/Debug.h"
73 #include "llvm/Support/DebugCounter.h"
74 #include "llvm/Support/raw_ostream.h"
75 #include <cassert>
76 #include <iterator>
77 
78 using namespace llvm;
79 
80 #define DEBUG_TYPE "machine-cp"
81 
82 STATISTIC(NumDeletes, "Number of dead copies deleted");
83 STATISTIC(NumCopyForwards, "Number of copy uses forwarded");
84 STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated");
85 STATISTIC(SpillageChainsLength, "Length of spillage chains");
86 STATISTIC(NumSpillageChains, "Number of spillage chains");
87 DEBUG_COUNTER(FwdCounter, "machine-cp-fwd",
88               "Controls which register COPYs are forwarded");
89 
90 static cl::opt<bool> MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false),
91                                      cl::Hidden);
92 static cl::opt<cl::boolOrDefault>
93     EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden);
94 
95 namespace {
96 
isCopyInstr(const MachineInstr & MI,const TargetInstrInfo & TII,bool UseCopyInstr)97 static std::optional<DestSourcePair> isCopyInstr(const MachineInstr &MI,
98                                                  const TargetInstrInfo &TII,
99                                                  bool UseCopyInstr) {
100   if (UseCopyInstr)
101     return TII.isCopyInstr(MI);
102 
103   if (MI.isCopy())
104     return std::optional<DestSourcePair>(
105         DestSourcePair{MI.getOperand(0), MI.getOperand(1)});
106 
107   return std::nullopt;
108 }
109 
110 class CopyTracker {
111   struct CopyInfo {
112     MachineInstr *MI = nullptr;
113     MachineInstr *LastSeenUseInCopy = nullptr;
114     SmallPtrSet<MachineInstr *, 4> SrcUsers;
115     SmallVector<MCRegister, 4> DefRegs;
116     bool Avail = false;
117   };
118 
119   DenseMap<MCRegUnit, CopyInfo> Copies;
120 
121   // Memoised sets of register units which are preserved by each register mask,
122   // needed to efficiently remove copies which are invalidated by call
123   // instructions.
124   DenseMap<const uint32_t *, BitVector> RegMaskToPreservedRegUnits;
125 
126 public:
127   /// Get the set of register units which are preserved by RegMaskOp.
getPreservedRegUnits(const MachineOperand & RegMaskOp,const TargetRegisterInfo & TRI)128   BitVector &getPreservedRegUnits(const MachineOperand &RegMaskOp,
129                                   const TargetRegisterInfo &TRI) {
130     const uint32_t *RegMask = RegMaskOp.getRegMask();
131     auto [It, Inserted] = RegMaskToPreservedRegUnits.try_emplace(RegMask);
132     if (!Inserted) {
133       return It->second;
134     } else {
135       BitVector &PreservedRegUnits = It->second;
136 
137       PreservedRegUnits.resize(TRI.getNumRegUnits());
138       for (unsigned SafeReg = 0, E = TRI.getNumRegs(); SafeReg < E; ++SafeReg)
139         if (!RegMaskOp.clobbersPhysReg(SafeReg))
140           for (auto SafeUnit : TRI.regunits(SafeReg))
141             PreservedRegUnits.set(SafeUnit);
142 
143       return PreservedRegUnits;
144     }
145   }
146 
147   /// Mark all of the given registers and their subregisters as unavailable for
148   /// copying.
markRegsUnavailable(ArrayRef<MCRegister> Regs,const TargetRegisterInfo & TRI)149   void markRegsUnavailable(ArrayRef<MCRegister> Regs,
150                            const TargetRegisterInfo &TRI) {
151     for (MCRegister Reg : Regs) {
152       // Source of copy is no longer available for propagation.
153       for (MCRegUnit Unit : TRI.regunits(Reg)) {
154         auto CI = Copies.find(Unit);
155         if (CI != Copies.end())
156           CI->second.Avail = false;
157       }
158     }
159   }
160 
161   /// Remove register from copy maps.
invalidateRegister(MCRegister Reg,const TargetRegisterInfo & TRI,const TargetInstrInfo & TII,bool UseCopyInstr)162   void invalidateRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
163                           const TargetInstrInfo &TII, bool UseCopyInstr) {
164     // Since Reg might be a subreg of some registers, only invalidate Reg is not
165     // enough. We have to find the COPY defines Reg or registers defined by Reg
166     // and invalidate all of them. Similarly, we must invalidate all of the
167     // the subregisters used in the source of the COPY.
168     SmallSet<MCRegUnit, 8> RegUnitsToInvalidate;
169     auto InvalidateCopy = [&](MachineInstr *MI) {
170       std::optional<DestSourcePair> CopyOperands =
171           isCopyInstr(*MI, TII, UseCopyInstr);
172       assert(CopyOperands && "Expect copy");
173 
174       auto Dest = TRI.regunits(CopyOperands->Destination->getReg().asMCReg());
175       auto Src = TRI.regunits(CopyOperands->Source->getReg().asMCReg());
176       RegUnitsToInvalidate.insert_range(Dest);
177       RegUnitsToInvalidate.insert_range(Src);
178     };
179 
180     for (MCRegUnit Unit : TRI.regunits(Reg)) {
181       auto I = Copies.find(Unit);
182       if (I != Copies.end()) {
183         if (MachineInstr *MI = I->second.MI)
184           InvalidateCopy(MI);
185         if (MachineInstr *MI = I->second.LastSeenUseInCopy)
186           InvalidateCopy(MI);
187       }
188     }
189     for (MCRegUnit Unit : RegUnitsToInvalidate)
190       Copies.erase(Unit);
191   }
192 
193   /// Clobber a single register unit, removing it from the tracker's copy maps.
clobberRegUnit(MCRegUnit Unit,const TargetRegisterInfo & TRI,const TargetInstrInfo & TII,bool UseCopyInstr)194   void clobberRegUnit(MCRegUnit Unit, const TargetRegisterInfo &TRI,
195                       const TargetInstrInfo &TII, bool UseCopyInstr) {
196     auto I = Copies.find(Unit);
197     if (I != Copies.end()) {
198       // When we clobber the source of a copy, we need to clobber everything
199       // it defined.
200       markRegsUnavailable(I->second.DefRegs, TRI);
201       // When we clobber the destination of a copy, we need to clobber the
202       // whole register it defined.
203       if (MachineInstr *MI = I->second.MI) {
204         std::optional<DestSourcePair> CopyOperands =
205             isCopyInstr(*MI, TII, UseCopyInstr);
206 
207         MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
208         MCRegister Src = CopyOperands->Source->getReg().asMCReg();
209 
210         markRegsUnavailable(Def, TRI);
211 
212         // Since we clobber the destination of a copy, the semantic of Src's
213         // "DefRegs" to contain Def is no longer effectual. We will also need
214         // to remove the record from the copy maps that indicates Src defined
215         // Def. Failing to do so might cause the target to miss some
216         // opportunities to further eliminate redundant copy instructions.
217         // Consider the following sequence during the
218         // ForwardCopyPropagateBlock procedure:
219         // L1: r0 = COPY r9     <- TrackMI
220         // L2: r0 = COPY r8     <- TrackMI (Remove r9 defined r0 from tracker)
221         // L3: use r0           <- Remove L2 from MaybeDeadCopies
222         // L4: early-clobber r9 <- Clobber r9 (L2 is still valid in tracker)
223         // L5: r0 = COPY r8     <- Remove NopCopy
224         for (MCRegUnit SrcUnit : TRI.regunits(Src)) {
225           auto SrcCopy = Copies.find(SrcUnit);
226           if (SrcCopy != Copies.end() && SrcCopy->second.LastSeenUseInCopy) {
227             // If SrcCopy defines multiple values, we only need
228             // to erase the record for Def in DefRegs.
229             for (auto itr = SrcCopy->second.DefRegs.begin();
230                  itr != SrcCopy->second.DefRegs.end(); itr++) {
231               if (*itr == Def) {
232                 SrcCopy->second.DefRegs.erase(itr);
233                 // If DefReg becomes empty after removal, we can remove the
234                 // SrcCopy from the tracker's copy maps. We only remove those
235                 // entries solely record the Def is defined by Src. If an
236                 // entry also contains the definition record of other Def'
237                 // registers, it cannot be cleared.
238                 if (SrcCopy->second.DefRegs.empty() && !SrcCopy->second.MI) {
239                   Copies.erase(SrcCopy);
240                 }
241                 break;
242               }
243             }
244           }
245         }
246       }
247       // Now we can erase the copy.
248       Copies.erase(I);
249     }
250   }
251 
252   /// Clobber a single register, removing it from the tracker's copy maps.
clobberRegister(MCRegister Reg,const TargetRegisterInfo & TRI,const TargetInstrInfo & TII,bool UseCopyInstr)253   void clobberRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
254                        const TargetInstrInfo &TII, bool UseCopyInstr) {
255     for (MCRegUnit Unit : TRI.regunits(Reg)) {
256       clobberRegUnit(Unit, TRI, TII, UseCopyInstr);
257     }
258   }
259 
260   /// Track copy's src users, and return false if that can't be done.
261   /// We can only track if we have a COPY instruction which source is
262   /// the same as the Reg.
trackSrcUsers(MCRegister Reg,MachineInstr & MI,const TargetRegisterInfo & TRI,const TargetInstrInfo & TII,bool UseCopyInstr)263   bool trackSrcUsers(MCRegister Reg, MachineInstr &MI,
264                      const TargetRegisterInfo &TRI, const TargetInstrInfo &TII,
265                      bool UseCopyInstr) {
266     MCRegUnit RU = *TRI.regunits(Reg).begin();
267     MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
268     if (!AvailCopy)
269       return false;
270 
271     std::optional<DestSourcePair> CopyOperands =
272         isCopyInstr(*AvailCopy, TII, UseCopyInstr);
273     Register Src = CopyOperands->Source->getReg();
274 
275     // Bail out, if the source of the copy is not the same as the Reg.
276     if (Src != Reg)
277       return false;
278 
279     auto I = Copies.find(RU);
280     if (I == Copies.end())
281       return false;
282 
283     I->second.SrcUsers.insert(&MI);
284     return true;
285   }
286 
287   /// Return the users for a given register.
getSrcUsers(MCRegister Reg,const TargetRegisterInfo & TRI)288   SmallPtrSet<MachineInstr *, 4> getSrcUsers(MCRegister Reg,
289                                              const TargetRegisterInfo &TRI) {
290     MCRegUnit RU = *TRI.regunits(Reg).begin();
291     auto I = Copies.find(RU);
292     if (I == Copies.end())
293       return {};
294     return I->second.SrcUsers;
295   }
296 
297   /// Add this copy's registers into the tracker's copy maps.
trackCopy(MachineInstr * MI,const TargetRegisterInfo & TRI,const TargetInstrInfo & TII,bool UseCopyInstr)298   void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI,
299                  const TargetInstrInfo &TII, bool UseCopyInstr) {
300     std::optional<DestSourcePair> CopyOperands =
301         isCopyInstr(*MI, TII, UseCopyInstr);
302     assert(CopyOperands && "Tracking non-copy?");
303 
304     MCRegister Src = CopyOperands->Source->getReg().asMCReg();
305     MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
306 
307     // Remember Def is defined by the copy.
308     for (MCRegUnit Unit : TRI.regunits(Def))
309       Copies[Unit] = {MI, nullptr, {}, {}, true};
310 
311     // Remember source that's copied to Def. Once it's clobbered, then
312     // it's no longer available for copy propagation.
313     for (MCRegUnit Unit : TRI.regunits(Src)) {
314       auto &Copy = Copies[Unit];
315       if (!is_contained(Copy.DefRegs, Def))
316         Copy.DefRegs.push_back(Def);
317       Copy.LastSeenUseInCopy = MI;
318     }
319   }
320 
hasAnyCopies()321   bool hasAnyCopies() {
322     return !Copies.empty();
323   }
324 
findCopyForUnit(MCRegUnit RegUnit,const TargetRegisterInfo & TRI,bool MustBeAvailable=false)325   MachineInstr *findCopyForUnit(MCRegUnit RegUnit,
326                                 const TargetRegisterInfo &TRI,
327                                 bool MustBeAvailable = false) {
328     auto CI = Copies.find(RegUnit);
329     if (CI == Copies.end())
330       return nullptr;
331     if (MustBeAvailable && !CI->second.Avail)
332       return nullptr;
333     return CI->second.MI;
334   }
335 
findCopyDefViaUnit(MCRegUnit RegUnit,const TargetRegisterInfo & TRI)336   MachineInstr *findCopyDefViaUnit(MCRegUnit RegUnit,
337                                    const TargetRegisterInfo &TRI) {
338     auto CI = Copies.find(RegUnit);
339     if (CI == Copies.end())
340       return nullptr;
341     if (CI->second.DefRegs.size() != 1)
342       return nullptr;
343     MCRegUnit RU = *TRI.regunits(CI->second.DefRegs[0]).begin();
344     return findCopyForUnit(RU, TRI, true);
345   }
346 
findAvailBackwardCopy(MachineInstr & I,MCRegister Reg,const TargetRegisterInfo & TRI,const TargetInstrInfo & TII,bool UseCopyInstr)347   MachineInstr *findAvailBackwardCopy(MachineInstr &I, MCRegister Reg,
348                                       const TargetRegisterInfo &TRI,
349                                       const TargetInstrInfo &TII,
350                                       bool UseCopyInstr) {
351     MCRegUnit RU = *TRI.regunits(Reg).begin();
352     MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
353 
354     if (!AvailCopy)
355       return nullptr;
356 
357     std::optional<DestSourcePair> CopyOperands =
358         isCopyInstr(*AvailCopy, TII, UseCopyInstr);
359     Register AvailSrc = CopyOperands->Source->getReg();
360     Register AvailDef = CopyOperands->Destination->getReg();
361     if (!TRI.isSubRegisterEq(AvailSrc, Reg))
362       return nullptr;
363 
364     for (const MachineInstr &MI :
365          make_range(AvailCopy->getReverseIterator(), I.getReverseIterator()))
366       for (const MachineOperand &MO : MI.operands())
367         if (MO.isRegMask())
368           // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef?
369           if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
370             return nullptr;
371 
372     return AvailCopy;
373   }
374 
findAvailCopy(MachineInstr & DestCopy,MCRegister Reg,const TargetRegisterInfo & TRI,const TargetInstrInfo & TII,bool UseCopyInstr)375   MachineInstr *findAvailCopy(MachineInstr &DestCopy, MCRegister Reg,
376                               const TargetRegisterInfo &TRI,
377                               const TargetInstrInfo &TII, bool UseCopyInstr) {
378     // We check the first RegUnit here, since we'll only be interested in the
379     // copy if it copies the entire register anyway.
380     MCRegUnit RU = *TRI.regunits(Reg).begin();
381     MachineInstr *AvailCopy =
382         findCopyForUnit(RU, TRI, /*MustBeAvailable=*/true);
383 
384     if (!AvailCopy)
385       return nullptr;
386 
387     std::optional<DestSourcePair> CopyOperands =
388         isCopyInstr(*AvailCopy, TII, UseCopyInstr);
389     Register AvailSrc = CopyOperands->Source->getReg();
390     Register AvailDef = CopyOperands->Destination->getReg();
391     if (!TRI.isSubRegisterEq(AvailDef, Reg))
392       return nullptr;
393 
394     // Check that the available copy isn't clobbered by any regmasks between
395     // itself and the destination.
396     for (const MachineInstr &MI :
397          make_range(AvailCopy->getIterator(), DestCopy.getIterator()))
398       for (const MachineOperand &MO : MI.operands())
399         if (MO.isRegMask())
400           if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
401             return nullptr;
402 
403     return AvailCopy;
404   }
405 
406   // Find last COPY that defines Reg before Current MachineInstr.
findLastSeenDefInCopy(const MachineInstr & Current,MCRegister Reg,const TargetRegisterInfo & TRI,const TargetInstrInfo & TII,bool UseCopyInstr)407   MachineInstr *findLastSeenDefInCopy(const MachineInstr &Current,
408                                       MCRegister Reg,
409                                       const TargetRegisterInfo &TRI,
410                                       const TargetInstrInfo &TII,
411                                       bool UseCopyInstr) {
412     MCRegUnit RU = *TRI.regunits(Reg).begin();
413     auto CI = Copies.find(RU);
414     if (CI == Copies.end() || !CI->second.Avail)
415       return nullptr;
416 
417     MachineInstr *DefCopy = CI->second.MI;
418     std::optional<DestSourcePair> CopyOperands =
419         isCopyInstr(*DefCopy, TII, UseCopyInstr);
420     Register Def = CopyOperands->Destination->getReg();
421     if (!TRI.isSubRegisterEq(Def, Reg))
422       return nullptr;
423 
424     for (const MachineInstr &MI :
425          make_range(static_cast<const MachineInstr *>(DefCopy)->getIterator(),
426                     Current.getIterator()))
427       for (const MachineOperand &MO : MI.operands())
428         if (MO.isRegMask())
429           if (MO.clobbersPhysReg(Def)) {
430             LLVM_DEBUG(dbgs() << "MCP: Removed tracking of "
431                               << printReg(Def, &TRI) << "\n");
432             return nullptr;
433           }
434 
435     return DefCopy;
436   }
437 
438   // Find last COPY that uses Reg.
findLastSeenUseInCopy(MCRegister Reg,const TargetRegisterInfo & TRI)439   MachineInstr *findLastSeenUseInCopy(MCRegister Reg,
440                                       const TargetRegisterInfo &TRI) {
441     MCRegUnit RU = *TRI.regunits(Reg).begin();
442     auto CI = Copies.find(RU);
443     if (CI == Copies.end())
444       return nullptr;
445     return CI->second.LastSeenUseInCopy;
446   }
447 
clear()448   void clear() {
449     Copies.clear();
450   }
451 };
452 
453 class MachineCopyPropagation {
454   const TargetRegisterInfo *TRI = nullptr;
455   const TargetInstrInfo *TII = nullptr;
456   const MachineRegisterInfo *MRI = nullptr;
457 
458   // Return true if this is a copy instruction and false otherwise.
459   bool UseCopyInstr;
460 
461 public:
MachineCopyPropagation(bool CopyInstr=false)462   MachineCopyPropagation(bool CopyInstr = false)
463       : UseCopyInstr(CopyInstr || MCPUseCopyInstr) {}
464 
465   bool run(MachineFunction &MF);
466 
467 private:
468   typedef enum { DebugUse = false, RegularUse = true } DebugType;
469 
470   void ReadRegister(MCRegister Reg, MachineInstr &Reader, DebugType DT);
471   void readSuccessorLiveIns(const MachineBasicBlock &MBB);
472   void ForwardCopyPropagateBlock(MachineBasicBlock &MBB);
473   void BackwardCopyPropagateBlock(MachineBasicBlock &MBB);
474   void EliminateSpillageCopies(MachineBasicBlock &MBB);
475   bool eraseIfRedundant(MachineInstr &Copy, MCRegister Src, MCRegister Def);
476   void forwardUses(MachineInstr &MI);
477   void propagateDefs(MachineInstr &MI);
478   bool isForwardableRegClassCopy(const MachineInstr &Copy,
479                                  const MachineInstr &UseI, unsigned UseIdx);
480   bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy,
481                                           const MachineInstr &UseI,
482                                           unsigned UseIdx);
483   bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use);
484   bool hasOverlappingMultipleDef(const MachineInstr &MI,
485                                  const MachineOperand &MODef, Register Def);
486   bool canUpdateSrcUsers(const MachineInstr &Copy,
487                          const MachineOperand &CopySrc);
488 
489   /// Candidates for deletion.
490   SmallSetVector<MachineInstr *, 8> MaybeDeadCopies;
491 
492   /// Multimap tracking debug users in current BB
493   DenseMap<MachineInstr *, SmallSet<MachineInstr *, 2>> CopyDbgUsers;
494 
495   CopyTracker Tracker;
496 
497   bool Changed = false;
498 };
499 
500 class MachineCopyPropagationLegacy : public MachineFunctionPass {
501   bool UseCopyInstr;
502 
503 public:
504   static char ID; // pass identification
505 
MachineCopyPropagationLegacy(bool UseCopyInstr=false)506   MachineCopyPropagationLegacy(bool UseCopyInstr = false)
507       : MachineFunctionPass(ID), UseCopyInstr(UseCopyInstr) {}
508 
getAnalysisUsage(AnalysisUsage & AU) const509   void getAnalysisUsage(AnalysisUsage &AU) const override {
510     AU.setPreservesCFG();
511     MachineFunctionPass::getAnalysisUsage(AU);
512   }
513 
514   bool runOnMachineFunction(MachineFunction &MF) override;
515 
getRequiredProperties() const516   MachineFunctionProperties getRequiredProperties() const override {
517     return MachineFunctionProperties().setNoVRegs();
518   }
519 };
520 
521 } // end anonymous namespace
522 
523 char MachineCopyPropagationLegacy::ID = 0;
524 
525 char &llvm::MachineCopyPropagationID = MachineCopyPropagationLegacy::ID;
526 
527 INITIALIZE_PASS(MachineCopyPropagationLegacy, DEBUG_TYPE,
528                 "Machine Copy Propagation Pass", false, false)
529 
ReadRegister(MCRegister Reg,MachineInstr & Reader,DebugType DT)530 void MachineCopyPropagation::ReadRegister(MCRegister Reg, MachineInstr &Reader,
531                                           DebugType DT) {
532   // If 'Reg' is defined by a copy, the copy is no longer a candidate
533   // for elimination. If a copy is "read" by a debug user, record the user
534   // for propagation.
535   for (MCRegUnit Unit : TRI->regunits(Reg)) {
536     if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI)) {
537       if (DT == RegularUse) {
538         LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump());
539         MaybeDeadCopies.remove(Copy);
540       } else {
541         CopyDbgUsers[Copy].insert(&Reader);
542       }
543     }
544   }
545 }
546 
readSuccessorLiveIns(const MachineBasicBlock & MBB)547 void MachineCopyPropagation::readSuccessorLiveIns(
548     const MachineBasicBlock &MBB) {
549   if (MaybeDeadCopies.empty())
550     return;
551 
552   // If a copy result is livein to a successor, it is not dead.
553   for (const MachineBasicBlock *Succ : MBB.successors()) {
554     for (const auto &LI : Succ->liveins()) {
555       for (MCRegUnitMaskIterator U(LI.PhysReg, TRI); U.isValid(); ++U) {
556         auto [Unit, Mask] = *U;
557         if ((Mask & LI.LaneMask).any()) {
558           if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI))
559             MaybeDeadCopies.remove(Copy);
560         }
561       }
562     }
563   }
564 }
565 
566 /// Return true if \p PreviousCopy did copy register \p Src to register \p Def.
567 /// This fact may have been obscured by sub register usage or may not be true at
568 /// all even though Src and Def are subregisters of the registers used in
569 /// PreviousCopy. e.g.
570 /// isNopCopy("ecx = COPY eax", AX, CX) == true
571 /// isNopCopy("ecx = COPY eax", AH, CL) == false
isNopCopy(const MachineInstr & PreviousCopy,MCRegister Src,MCRegister Def,const TargetRegisterInfo * TRI,const TargetInstrInfo * TII,bool UseCopyInstr)572 static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src,
573                       MCRegister Def, const TargetRegisterInfo *TRI,
574                       const TargetInstrInfo *TII, bool UseCopyInstr) {
575 
576   std::optional<DestSourcePair> CopyOperands =
577       isCopyInstr(PreviousCopy, *TII, UseCopyInstr);
578   MCRegister PreviousSrc = CopyOperands->Source->getReg().asMCReg();
579   MCRegister PreviousDef = CopyOperands->Destination->getReg().asMCReg();
580   if (Src == PreviousSrc && Def == PreviousDef)
581     return true;
582   if (!TRI->isSubRegister(PreviousSrc, Src))
583     return false;
584   unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src);
585   return SubIdx == TRI->getSubRegIndex(PreviousDef, Def);
586 }
587 
588 /// Remove instruction \p Copy if there exists a previous copy that copies the
589 /// register \p Src to the register \p Def; This may happen indirectly by
590 /// copying the super registers.
eraseIfRedundant(MachineInstr & Copy,MCRegister Src,MCRegister Def)591 bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy,
592                                               MCRegister Src, MCRegister Def) {
593   // Avoid eliminating a copy from/to a reserved registers as we cannot predict
594   // the value (Example: The sparc zero register is writable but stays zero).
595   if (MRI->isReserved(Src) || MRI->isReserved(Def))
596     return false;
597 
598   // Search for an existing copy.
599   MachineInstr *PrevCopy =
600       Tracker.findAvailCopy(Copy, Def, *TRI, *TII, UseCopyInstr);
601   if (!PrevCopy)
602     return false;
603 
604   auto PrevCopyOperands = isCopyInstr(*PrevCopy, *TII, UseCopyInstr);
605   // Check that the existing copy uses the correct sub registers.
606   if (PrevCopyOperands->Destination->isDead())
607     return false;
608   if (!isNopCopy(*PrevCopy, Src, Def, TRI, TII, UseCopyInstr))
609     return false;
610 
611   LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump());
612 
613   // Copy was redundantly redefining either Src or Def. Remove earlier kill
614   // flags between Copy and PrevCopy because the value will be reused now.
615   std::optional<DestSourcePair> CopyOperands =
616       isCopyInstr(Copy, *TII, UseCopyInstr);
617   assert(CopyOperands);
618 
619   Register CopyDef = CopyOperands->Destination->getReg();
620   assert(CopyDef == Src || CopyDef == Def);
621   for (MachineInstr &MI :
622        make_range(PrevCopy->getIterator(), Copy.getIterator()))
623     MI.clearRegisterKills(CopyDef, TRI);
624 
625   // Clear undef flag from remaining copy if needed.
626   if (!CopyOperands->Source->isUndef()) {
627     PrevCopy->getOperand(PrevCopyOperands->Source->getOperandNo())
628         .setIsUndef(false);
629   }
630 
631   Copy.eraseFromParent();
632   Changed = true;
633   ++NumDeletes;
634   return true;
635 }
636 
isBackwardPropagatableRegClassCopy(const MachineInstr & Copy,const MachineInstr & UseI,unsigned UseIdx)637 bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
638     const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) {
639   std::optional<DestSourcePair> CopyOperands =
640       isCopyInstr(Copy, *TII, UseCopyInstr);
641   Register Def = CopyOperands->Destination->getReg();
642 
643   if (const TargetRegisterClass *URC =
644           UseI.getRegClassConstraint(UseIdx, TII, TRI))
645     return URC->contains(Def);
646 
647   // We don't process further if UseI is a COPY, since forward copy propagation
648   // should handle that.
649   return false;
650 }
651 
652 /// Decide whether we should forward the source of \param Copy to its use in
653 /// \param UseI based on the physical register class constraints of the opcode
654 /// and avoiding introducing more cross-class COPYs.
isForwardableRegClassCopy(const MachineInstr & Copy,const MachineInstr & UseI,unsigned UseIdx)655 bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy,
656                                                        const MachineInstr &UseI,
657                                                        unsigned UseIdx) {
658   std::optional<DestSourcePair> CopyOperands =
659       isCopyInstr(Copy, *TII, UseCopyInstr);
660   Register CopySrcReg = CopyOperands->Source->getReg();
661 
662   // If the new register meets the opcode register constraints, then allow
663   // forwarding.
664   if (const TargetRegisterClass *URC =
665           UseI.getRegClassConstraint(UseIdx, TII, TRI))
666     return URC->contains(CopySrcReg);
667 
668   auto UseICopyOperands = isCopyInstr(UseI, *TII, UseCopyInstr);
669   if (!UseICopyOperands)
670     return false;
671 
672   /// COPYs don't have register class constraints, so if the user instruction
673   /// is a COPY, we just try to avoid introducing additional cross-class
674   /// COPYs.  For example:
675   ///
676   ///   RegClassA = COPY RegClassB  // Copy parameter
677   ///   ...
678   ///   RegClassB = COPY RegClassA  // UseI parameter
679   ///
680   /// which after forwarding becomes
681   ///
682   ///   RegClassA = COPY RegClassB
683   ///   ...
684   ///   RegClassB = COPY RegClassB
685   ///
686   /// so we have reduced the number of cross-class COPYs and potentially
687   /// introduced a nop COPY that can be removed.
688 
689   // Allow forwarding if src and dst belong to any common class, so long as they
690   // don't belong to any (possibly smaller) common class that requires copies to
691   // go via a different class.
692   Register UseDstReg = UseICopyOperands->Destination->getReg();
693   bool Found = false;
694   bool IsCrossClass = false;
695   for (const TargetRegisterClass *RC : TRI->regclasses()) {
696     if (RC->contains(CopySrcReg) && RC->contains(UseDstReg)) {
697       Found = true;
698       if (TRI->getCrossCopyRegClass(RC) != RC) {
699         IsCrossClass = true;
700         break;
701       }
702     }
703   }
704   if (!Found)
705     return false;
706   if (!IsCrossClass)
707     return true;
708   // The forwarded copy would be cross-class. Only do this if the original copy
709   // was also cross-class.
710   Register CopyDstReg = CopyOperands->Destination->getReg();
711   for (const TargetRegisterClass *RC : TRI->regclasses()) {
712     if (RC->contains(CopySrcReg) && RC->contains(CopyDstReg) &&
713         TRI->getCrossCopyRegClass(RC) != RC)
714       return true;
715   }
716   return false;
717 }
718 
719 /// Check that \p MI does not have implicit uses that overlap with it's \p Use
720 /// operand (the register being replaced), since these can sometimes be
721 /// implicitly tied to other operands.  For example, on AMDGPU:
722 ///
723 /// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use>
724 ///
725 /// the %VGPR2 is implicitly tied to the larger reg operand, but we have no
726 /// way of knowing we need to update the latter when updating the former.
hasImplicitOverlap(const MachineInstr & MI,const MachineOperand & Use)727 bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI,
728                                                 const MachineOperand &Use) {
729   for (const MachineOperand &MIUse : MI.uses())
730     if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() &&
731         MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg()))
732       return true;
733 
734   return false;
735 }
736 
737 /// For an MI that has multiple definitions, check whether \p MI has
738 /// a definition that overlaps with another of its definitions.
739 /// For example, on ARM: umull   r9, r9, lr, r0
740 /// The umull instruction is unpredictable unless RdHi and RdLo are different.
hasOverlappingMultipleDef(const MachineInstr & MI,const MachineOperand & MODef,Register Def)741 bool MachineCopyPropagation::hasOverlappingMultipleDef(
742     const MachineInstr &MI, const MachineOperand &MODef, Register Def) {
743   for (const MachineOperand &MIDef : MI.all_defs()) {
744     if ((&MIDef != &MODef) && MIDef.isReg() &&
745         TRI->regsOverlap(Def, MIDef.getReg()))
746       return true;
747   }
748 
749   return false;
750 }
751 
752 /// Return true if it is safe to update all users of the \p CopySrc register
753 /// in the given \p Copy instruction.
canUpdateSrcUsers(const MachineInstr & Copy,const MachineOperand & CopySrc)754 bool MachineCopyPropagation::canUpdateSrcUsers(const MachineInstr &Copy,
755                                                const MachineOperand &CopySrc) {
756   assert(CopySrc.isReg() && "Expected a register operand");
757   for (auto *SrcUser : Tracker.getSrcUsers(CopySrc.getReg(), *TRI)) {
758     if (hasImplicitOverlap(*SrcUser, CopySrc))
759       return false;
760 
761     for (MachineOperand &MO : SrcUser->uses()) {
762       if (!MO.isReg() || !MO.isUse() || MO.getReg() != CopySrc.getReg())
763         continue;
764       if (MO.isTied() || !MO.isRenamable() ||
765           !isBackwardPropagatableRegClassCopy(Copy, *SrcUser,
766                                               MO.getOperandNo()))
767         return false;
768     }
769   }
770   return true;
771 }
772 
773 /// Look for available copies whose destination register is used by \p MI and
774 /// replace the use in \p MI with the copy's source register.
forwardUses(MachineInstr & MI)775 void MachineCopyPropagation::forwardUses(MachineInstr &MI) {
776   if (!Tracker.hasAnyCopies())
777     return;
778 
779   // Look for non-tied explicit vreg uses that have an active COPY
780   // instruction that defines the physical register allocated to them.
781   // Replace the vreg with the source of the active COPY.
782   for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd;
783        ++OpIdx) {
784     MachineOperand &MOUse = MI.getOperand(OpIdx);
785     // Don't forward into undef use operands since doing so can cause problems
786     // with the machine verifier, since it doesn't treat undef reads as reads,
787     // so we can end up with a live range that ends on an undef read, leading to
788     // an error that the live range doesn't end on a read of the live range
789     // register.
790     if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() ||
791         MOUse.isImplicit())
792       continue;
793 
794     if (!MOUse.getReg())
795       continue;
796 
797     // Check that the register is marked 'renamable' so we know it is safe to
798     // rename it without violating any constraints that aren't expressed in the
799     // IR (e.g. ABI or opcode requirements).
800     if (!MOUse.isRenamable())
801       continue;
802 
803     MachineInstr *Copy = Tracker.findAvailCopy(MI, MOUse.getReg().asMCReg(),
804                                                *TRI, *TII, UseCopyInstr);
805     if (!Copy)
806       continue;
807 
808     std::optional<DestSourcePair> CopyOperands =
809         isCopyInstr(*Copy, *TII, UseCopyInstr);
810     Register CopyDstReg = CopyOperands->Destination->getReg();
811     const MachineOperand &CopySrc = *CopyOperands->Source;
812     Register CopySrcReg = CopySrc.getReg();
813 
814     Register ForwardedReg = CopySrcReg;
815     // MI might use a sub-register of the Copy destination, in which case the
816     // forwarded register is the matching sub-register of the Copy source.
817     if (MOUse.getReg() != CopyDstReg) {
818       unsigned SubRegIdx = TRI->getSubRegIndex(CopyDstReg, MOUse.getReg());
819       assert(SubRegIdx &&
820              "MI source is not a sub-register of Copy destination");
821       ForwardedReg = TRI->getSubReg(CopySrcReg, SubRegIdx);
822       if (!ForwardedReg) {
823         LLVM_DEBUG(dbgs() << "MCP: Copy source does not have sub-register "
824                           << TRI->getSubRegIndexName(SubRegIdx) << '\n');
825         continue;
826       }
827     }
828 
829     // Don't forward COPYs of reserved regs unless they are constant.
830     if (MRI->isReserved(CopySrcReg) && !MRI->isConstantPhysReg(CopySrcReg))
831       continue;
832 
833     if (!isForwardableRegClassCopy(*Copy, MI, OpIdx))
834       continue;
835 
836     if (hasImplicitOverlap(MI, MOUse))
837       continue;
838 
839     // Check that the instruction is not a copy that partially overwrites the
840     // original copy source that we are about to use. The tracker mechanism
841     // cannot cope with that.
842     if (isCopyInstr(MI, *TII, UseCopyInstr) &&
843         MI.modifiesRegister(CopySrcReg, TRI) &&
844         !MI.definesRegister(CopySrcReg, /*TRI=*/nullptr)) {
845       LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI);
846       continue;
847     }
848 
849     if (!DebugCounter::shouldExecute(FwdCounter)) {
850       LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n  "
851                         << MI);
852       continue;
853     }
854 
855     LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI)
856                       << "\n     with " << printReg(ForwardedReg, TRI)
857                       << "\n     in " << MI << "     from " << *Copy);
858 
859     MOUse.setReg(ForwardedReg);
860 
861     if (!CopySrc.isRenamable())
862       MOUse.setIsRenamable(false);
863     MOUse.setIsUndef(CopySrc.isUndef());
864 
865     LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
866 
867     // Clear kill markers that may have been invalidated.
868     for (MachineInstr &KMI :
869          make_range(Copy->getIterator(), std::next(MI.getIterator())))
870       KMI.clearRegisterKills(CopySrcReg, TRI);
871 
872     ++NumCopyForwards;
873     Changed = true;
874   }
875 }
876 
ForwardCopyPropagateBlock(MachineBasicBlock & MBB)877 void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock &MBB) {
878   LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName()
879                     << "\n");
880 
881   for (MachineInstr &MI : llvm::make_early_inc_range(MBB)) {
882     // Analyze copies (which don't overlap themselves).
883     std::optional<DestSourcePair> CopyOperands =
884         isCopyInstr(MI, *TII, UseCopyInstr);
885     if (CopyOperands) {
886       Register RegSrc = CopyOperands->Source->getReg();
887       Register RegDef = CopyOperands->Destination->getReg();
888       if (!TRI->regsOverlap(RegDef, RegSrc)) {
889         assert(RegDef.isPhysical() && RegSrc.isPhysical() &&
890               "MachineCopyPropagation should be run after register allocation!");
891 
892         MCRegister Def = RegDef.asMCReg();
893         MCRegister Src = RegSrc.asMCReg();
894 
895         // The two copies cancel out and the source of the first copy
896         // hasn't been overridden, eliminate the second one. e.g.
897         //  %ecx = COPY %eax
898         //  ... nothing clobbered eax.
899         //  %eax = COPY %ecx
900         // =>
901         //  %ecx = COPY %eax
902         //
903         // or
904         //
905         //  %ecx = COPY %eax
906         //  ... nothing clobbered eax.
907         //  %ecx = COPY %eax
908         // =>
909         //  %ecx = COPY %eax
910         if (eraseIfRedundant(MI, Def, Src) || eraseIfRedundant(MI, Src, Def))
911           continue;
912       }
913     }
914 
915     // Clobber any earlyclobber regs first.
916     for (const MachineOperand &MO : MI.operands())
917       if (MO.isReg() && MO.isEarlyClobber()) {
918         MCRegister Reg = MO.getReg().asMCReg();
919         // If we have a tied earlyclobber, that means it is also read by this
920         // instruction, so we need to make sure we don't remove it as dead
921         // later.
922         if (MO.isTied())
923           ReadRegister(Reg, MI, RegularUse);
924         Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
925       }
926 
927     forwardUses(MI);
928 
929     // Attempt to canonicalize/optimize the instruction now its arguments have
930     // been mutated.  This may convert MI from a non-copy to a copy instruction.
931     if (TII->simplifyInstruction(MI)) {
932       Changed = true;
933       LLVM_DEBUG(dbgs() << "MCP: After simplifyInstruction: " << MI);
934     }
935 
936     CopyOperands = isCopyInstr(MI, *TII, UseCopyInstr);
937     if (CopyOperands) {
938       Register RegSrc = CopyOperands->Source->getReg();
939       Register RegDef = CopyOperands->Destination->getReg();
940 
941       if (!TRI->regsOverlap(RegDef, RegSrc)) {
942         // Copy is now a candidate for deletion.
943         MCRegister Def = RegDef.asMCReg();
944         if (!MRI->isReserved(Def))
945           MaybeDeadCopies.insert(&MI);
946       }
947     }
948 
949     SmallVector<Register, 4> Defs;
950     const MachineOperand *RegMask = nullptr;
951     for (const MachineOperand &MO : MI.operands()) {
952       if (MO.isRegMask())
953         RegMask = &MO;
954       if (!MO.isReg())
955         continue;
956       Register Reg = MO.getReg();
957       if (!Reg)
958         continue;
959 
960       assert(!Reg.isVirtual() &&
961              "MachineCopyPropagation should be run after register allocation!");
962 
963       if (MO.isDef() && !MO.isEarlyClobber()) {
964         // Skip invalidating constant registers.
965         if (!MRI->isConstantPhysReg(Reg)) {
966           Defs.push_back(Reg.asMCReg());
967           continue;
968         }
969       } else if (MO.readsReg())
970         ReadRegister(Reg.asMCReg(), MI, MO.isDebug() ? DebugUse : RegularUse);
971     }
972 
973     // The instruction has a register mask operand which means that it clobbers
974     // a large set of registers.  Treat clobbered registers the same way as
975     // defined registers.
976     if (RegMask) {
977       BitVector &PreservedRegUnits =
978           Tracker.getPreservedRegUnits(*RegMask, *TRI);
979 
980       // Erase any MaybeDeadCopies whose destination register is clobbered.
981       for (SmallSetVector<MachineInstr *, 8>::iterator DI =
982                MaybeDeadCopies.begin();
983            DI != MaybeDeadCopies.end();) {
984         MachineInstr *MaybeDead = *DI;
985         std::optional<DestSourcePair> CopyOperands =
986             isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
987         MCRegister Reg = CopyOperands->Destination->getReg().asMCReg();
988         assert(!MRI->isReserved(Reg));
989 
990         if (!RegMask->clobbersPhysReg(Reg)) {
991           ++DI;
992           continue;
993         }
994 
995         // Invalidate all entries in the copy map which are not preserved by
996         // this register mask.
997         bool MIRefedinCopyInfo = false;
998         for (unsigned RegUnit : TRI->regunits(Reg)) {
999           if (!PreservedRegUnits.test(RegUnit))
1000             Tracker.clobberRegUnit(RegUnit, *TRI, *TII, UseCopyInstr);
1001           else {
1002             if (MaybeDead == Tracker.findCopyForUnit(RegUnit, *TRI)) {
1003               MIRefedinCopyInfo = true;
1004             }
1005           }
1006         }
1007 
1008         // erase() will return the next valid iterator pointing to the next
1009         // element after the erased one.
1010         DI = MaybeDeadCopies.erase(DI);
1011 
1012         // Preserved by RegMask, DO NOT remove copy
1013         if (MIRefedinCopyInfo)
1014           continue;
1015 
1016         LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: "
1017                           << *MaybeDead);
1018 
1019         MaybeDead->eraseFromParent();
1020         Changed = true;
1021         ++NumDeletes;
1022       }
1023     }
1024 
1025     // Any previous copy definition or reading the Defs is no longer available.
1026     for (MCRegister Reg : Defs)
1027       Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1028 
1029     if (CopyOperands) {
1030       Register RegSrc = CopyOperands->Source->getReg();
1031       Register RegDef = CopyOperands->Destination->getReg();
1032       if (!TRI->regsOverlap(RegDef, RegSrc)) {
1033         Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1034       }
1035     }
1036   }
1037 
1038   bool TracksLiveness = MRI->tracksLiveness();
1039 
1040   // If liveness is tracked, we can use the live-in lists to know which
1041   // copies aren't dead.
1042   if (TracksLiveness)
1043     readSuccessorLiveIns(MBB);
1044 
1045   // If MBB doesn't have succesor, delete copies whose defs are not used.
1046   // If MBB does have successors, we can only delete copies if we are able to
1047   // use liveness information from successors to confirm they are really dead.
1048   if (MBB.succ_empty() || TracksLiveness) {
1049     for (MachineInstr *MaybeDead : MaybeDeadCopies) {
1050       LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: ";
1051                  MaybeDead->dump());
1052 
1053       std::optional<DestSourcePair> CopyOperands =
1054           isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
1055       assert(CopyOperands);
1056 
1057       Register SrcReg = CopyOperands->Source->getReg();
1058       Register DestReg = CopyOperands->Destination->getReg();
1059       assert(!MRI->isReserved(DestReg));
1060 
1061       // Update matching debug values, if any.
1062       const auto &DbgUsers = CopyDbgUsers[MaybeDead];
1063       SmallVector<MachineInstr *> MaybeDeadDbgUsers(DbgUsers.begin(),
1064                                                     DbgUsers.end());
1065       MRI->updateDbgUsersToReg(DestReg.asMCReg(), SrcReg.asMCReg(),
1066                                MaybeDeadDbgUsers);
1067 
1068       MaybeDead->eraseFromParent();
1069       Changed = true;
1070       ++NumDeletes;
1071     }
1072   }
1073 
1074   MaybeDeadCopies.clear();
1075   CopyDbgUsers.clear();
1076   Tracker.clear();
1077 }
1078 
isBackwardPropagatableCopy(const DestSourcePair & CopyOperands,const MachineRegisterInfo & MRI)1079 static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands,
1080                                        const MachineRegisterInfo &MRI) {
1081   Register Def = CopyOperands.Destination->getReg();
1082   Register Src = CopyOperands.Source->getReg();
1083 
1084   if (!Def || !Src)
1085     return false;
1086 
1087   if (MRI.isReserved(Def) || MRI.isReserved(Src))
1088     return false;
1089 
1090   return CopyOperands.Source->isRenamable() && CopyOperands.Source->isKill();
1091 }
1092 
propagateDefs(MachineInstr & MI)1093 void MachineCopyPropagation::propagateDefs(MachineInstr &MI) {
1094   if (!Tracker.hasAnyCopies())
1095     return;
1096 
1097   for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd;
1098        ++OpIdx) {
1099     MachineOperand &MODef = MI.getOperand(OpIdx);
1100 
1101     if (!MODef.isReg() || MODef.isUse())
1102       continue;
1103 
1104     // Ignore non-trivial cases.
1105     if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit())
1106       continue;
1107 
1108     if (!MODef.getReg())
1109       continue;
1110 
1111     // We only handle if the register comes from a vreg.
1112     if (!MODef.isRenamable())
1113       continue;
1114 
1115     MachineInstr *Copy = Tracker.findAvailBackwardCopy(
1116         MI, MODef.getReg().asMCReg(), *TRI, *TII, UseCopyInstr);
1117     if (!Copy)
1118       continue;
1119 
1120     std::optional<DestSourcePair> CopyOperands =
1121         isCopyInstr(*Copy, *TII, UseCopyInstr);
1122     Register Def = CopyOperands->Destination->getReg();
1123     Register Src = CopyOperands->Source->getReg();
1124 
1125     if (MODef.getReg() != Src)
1126       continue;
1127 
1128     if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx))
1129       continue;
1130 
1131     if (hasImplicitOverlap(MI, MODef))
1132       continue;
1133 
1134     if (hasOverlappingMultipleDef(MI, MODef, Def))
1135       continue;
1136 
1137     if (!canUpdateSrcUsers(*Copy, *CopyOperands->Source))
1138       continue;
1139 
1140     LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI)
1141                       << "\n     with " << printReg(Def, TRI) << "\n     in "
1142                       << MI << "     from " << *Copy);
1143 
1144     MODef.setReg(Def);
1145     MODef.setIsRenamable(CopyOperands->Destination->isRenamable());
1146 
1147     for (auto *SrcUser : Tracker.getSrcUsers(Src, *TRI)) {
1148       for (MachineOperand &MO : SrcUser->uses()) {
1149         if (!MO.isReg() || !MO.isUse() || MO.getReg() != Src)
1150           continue;
1151         MO.setReg(Def);
1152         MO.setIsRenamable(CopyOperands->Destination->isRenamable());
1153       }
1154     }
1155 
1156     LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
1157     MaybeDeadCopies.insert(Copy);
1158     Changed = true;
1159     ++NumCopyBackwardPropagated;
1160   }
1161 }
1162 
BackwardCopyPropagateBlock(MachineBasicBlock & MBB)1163 void MachineCopyPropagation::BackwardCopyPropagateBlock(
1164     MachineBasicBlock &MBB) {
1165   LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName()
1166                     << "\n");
1167 
1168   for (MachineInstr &MI : llvm::make_early_inc_range(llvm::reverse(MBB))) {
1169     // Ignore non-trivial COPYs.
1170     std::optional<DestSourcePair> CopyOperands =
1171         isCopyInstr(MI, *TII, UseCopyInstr);
1172     if (CopyOperands && MI.getNumImplicitOperands() == 0) {
1173       Register DefReg = CopyOperands->Destination->getReg();
1174       Register SrcReg = CopyOperands->Source->getReg();
1175 
1176       if (!TRI->regsOverlap(DefReg, SrcReg)) {
1177         // Unlike forward cp, we don't invoke propagateDefs here,
1178         // just let forward cp do COPY-to-COPY propagation.
1179         if (isBackwardPropagatableCopy(*CopyOperands, *MRI)) {
1180           Tracker.invalidateRegister(SrcReg.asMCReg(), *TRI, *TII,
1181                                      UseCopyInstr);
1182           Tracker.invalidateRegister(DefReg.asMCReg(), *TRI, *TII,
1183                                      UseCopyInstr);
1184           Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1185           continue;
1186         }
1187       }
1188     }
1189 
1190     // Invalidate any earlyclobber regs first.
1191     for (const MachineOperand &MO : MI.operands())
1192       if (MO.isReg() && MO.isEarlyClobber()) {
1193         MCRegister Reg = MO.getReg().asMCReg();
1194         if (!Reg)
1195           continue;
1196         Tracker.invalidateRegister(Reg, *TRI, *TII, UseCopyInstr);
1197       }
1198 
1199     propagateDefs(MI);
1200     for (const MachineOperand &MO : MI.operands()) {
1201       if (!MO.isReg())
1202         continue;
1203 
1204       if (!MO.getReg())
1205         continue;
1206 
1207       if (MO.isDef())
1208         Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1209                                    UseCopyInstr);
1210 
1211       if (MO.readsReg()) {
1212         if (MO.isDebug()) {
1213           //  Check if the register in the debug instruction is utilized
1214           // in a copy instruction, so we can update the debug info if the
1215           // register is changed.
1216           for (MCRegUnit Unit : TRI->regunits(MO.getReg().asMCReg())) {
1217             if (auto *Copy = Tracker.findCopyDefViaUnit(Unit, *TRI)) {
1218               CopyDbgUsers[Copy].insert(&MI);
1219             }
1220           }
1221         } else if (!Tracker.trackSrcUsers(MO.getReg().asMCReg(), MI, *TRI, *TII,
1222                                           UseCopyInstr)) {
1223           // If we can't track the source users, invalidate the register.
1224           Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1225                                      UseCopyInstr);
1226         }
1227       }
1228     }
1229   }
1230 
1231   for (auto *Copy : MaybeDeadCopies) {
1232     std::optional<DestSourcePair> CopyOperands =
1233         isCopyInstr(*Copy, *TII, UseCopyInstr);
1234     Register Src = CopyOperands->Source->getReg();
1235     Register Def = CopyOperands->Destination->getReg();
1236     const auto &DbgUsers = CopyDbgUsers[Copy];
1237     SmallVector<MachineInstr *> MaybeDeadDbgUsers(DbgUsers.begin(),
1238                                                   DbgUsers.end());
1239 
1240     MRI->updateDbgUsersToReg(Src.asMCReg(), Def.asMCReg(), MaybeDeadDbgUsers);
1241     Copy->eraseFromParent();
1242     ++NumDeletes;
1243   }
1244 
1245   MaybeDeadCopies.clear();
1246   CopyDbgUsers.clear();
1247   Tracker.clear();
1248 }
1249 
printSpillReloadChain(DenseMap<MachineInstr *,SmallVector<MachineInstr * >> & SpillChain,DenseMap<MachineInstr *,SmallVector<MachineInstr * >> & ReloadChain,MachineInstr * Leader)1250 static void LLVM_ATTRIBUTE_UNUSED printSpillReloadChain(
1251     DenseMap<MachineInstr *, SmallVector<MachineInstr *>> &SpillChain,
1252     DenseMap<MachineInstr *, SmallVector<MachineInstr *>> &ReloadChain,
1253     MachineInstr *Leader) {
1254   auto &SC = SpillChain[Leader];
1255   auto &RC = ReloadChain[Leader];
1256   for (auto I = SC.rbegin(), E = SC.rend(); I != E; ++I)
1257     (*I)->dump();
1258   for (MachineInstr *MI : RC)
1259     MI->dump();
1260 }
1261 
1262 // Remove spill-reload like copy chains. For example
1263 // r0 = COPY r1
1264 // r1 = COPY r2
1265 // r2 = COPY r3
1266 // r3 = COPY r4
1267 // <def-use r4>
1268 // r4 = COPY r3
1269 // r3 = COPY r2
1270 // r2 = COPY r1
1271 // r1 = COPY r0
1272 // will be folded into
1273 // r0 = COPY r1
1274 // r1 = COPY r4
1275 // <def-use r4>
1276 // r4 = COPY r1
1277 // r1 = COPY r0
1278 // TODO: Currently we don't track usage of r0 outside the chain, so we
1279 // conservatively keep its value as it was before the rewrite.
1280 //
1281 // The algorithm is trying to keep
1282 // property#1: No Def of spill COPY in the chain is used or defined until the
1283 // paired reload COPY in the chain uses the Def.
1284 //
1285 // property#2: NO Source of COPY in the chain is used or defined until the next
1286 // COPY in the chain defines the Source, except the innermost spill-reload
1287 // pair.
1288 //
1289 // The algorithm is conducted by checking every COPY inside the MBB, assuming
1290 // the COPY is a reload COPY, then try to find paired spill COPY by searching
1291 // the COPY defines the Src of the reload COPY backward. If such pair is found,
1292 // it either belongs to an existing chain or a new chain depends on
1293 // last available COPY uses the Def of the reload COPY.
1294 // Implementation notes, we use CopyTracker::findLastDefCopy(Reg, ...) to find
1295 // out last COPY that defines Reg; we use CopyTracker::findLastUseCopy(Reg, ...)
1296 // to find out last COPY that uses Reg. When we are encountered with a Non-COPY
1297 // instruction, we check registers in the operands of this instruction. If this
1298 // Reg is defined by a COPY, we untrack this Reg via
1299 // CopyTracker::clobberRegister(Reg, ...).
EliminateSpillageCopies(MachineBasicBlock & MBB)1300 void MachineCopyPropagation::EliminateSpillageCopies(MachineBasicBlock &MBB) {
1301   // ChainLeader maps MI inside a spill-reload chain to its innermost reload COPY.
1302   // Thus we can track if a MI belongs to an existing spill-reload chain.
1303   DenseMap<MachineInstr *, MachineInstr *> ChainLeader;
1304   // SpillChain maps innermost reload COPY of a spill-reload chain to a sequence
1305   // of COPYs that forms spills of a spill-reload chain.
1306   // ReloadChain maps innermost reload COPY of a spill-reload chain to a
1307   // sequence of COPYs that forms reloads of a spill-reload chain.
1308   DenseMap<MachineInstr *, SmallVector<MachineInstr *>> SpillChain, ReloadChain;
1309   // If a COPY's Source has use or def until next COPY defines the Source,
1310   // we put the COPY in this set to keep property#2.
1311   DenseSet<const MachineInstr *> CopySourceInvalid;
1312 
1313   auto TryFoldSpillageCopies =
1314       [&, this](const SmallVectorImpl<MachineInstr *> &SC,
1315                 const SmallVectorImpl<MachineInstr *> &RC) {
1316         assert(SC.size() == RC.size() && "Spill-reload should be paired");
1317 
1318         // We need at least 3 pairs of copies for the transformation to apply,
1319         // because the first outermost pair cannot be removed since we don't
1320         // recolor outside of the chain and that we need at least one temporary
1321         // spill slot to shorten the chain. If we only have a chain of two
1322         // pairs, we already have the shortest sequence this code can handle:
1323         // the outermost pair for the temporary spill slot, and the pair that
1324         // use that temporary spill slot for the other end of the chain.
1325         // TODO: We might be able to simplify to one spill-reload pair if collecting
1326         // more infomation about the outermost COPY.
1327         if (SC.size() <= 2)
1328           return;
1329 
1330         // If violate property#2, we don't fold the chain.
1331         for (const MachineInstr *Spill : drop_begin(SC))
1332           if (CopySourceInvalid.count(Spill))
1333             return;
1334 
1335         for (const MachineInstr *Reload : drop_end(RC))
1336           if (CopySourceInvalid.count(Reload))
1337             return;
1338 
1339         auto CheckCopyConstraint = [this](Register Def, Register Src) {
1340           for (const TargetRegisterClass *RC : TRI->regclasses()) {
1341             if (RC->contains(Def) && RC->contains(Src))
1342               return true;
1343           }
1344           return false;
1345         };
1346 
1347         auto UpdateReg = [](MachineInstr *MI, const MachineOperand *Old,
1348                             const MachineOperand *New) {
1349           for (MachineOperand &MO : MI->operands()) {
1350             if (&MO == Old)
1351               MO.setReg(New->getReg());
1352           }
1353         };
1354 
1355         std::optional<DestSourcePair> InnerMostSpillCopy =
1356             isCopyInstr(*SC[0], *TII, UseCopyInstr);
1357         std::optional<DestSourcePair> OuterMostSpillCopy =
1358             isCopyInstr(*SC.back(), *TII, UseCopyInstr);
1359         std::optional<DestSourcePair> InnerMostReloadCopy =
1360             isCopyInstr(*RC[0], *TII, UseCopyInstr);
1361         std::optional<DestSourcePair> OuterMostReloadCopy =
1362             isCopyInstr(*RC.back(), *TII, UseCopyInstr);
1363         if (!CheckCopyConstraint(OuterMostSpillCopy->Source->getReg(),
1364                                  InnerMostSpillCopy->Source->getReg()) ||
1365             !CheckCopyConstraint(InnerMostReloadCopy->Destination->getReg(),
1366                                  OuterMostReloadCopy->Destination->getReg()))
1367           return;
1368 
1369         SpillageChainsLength += SC.size() + RC.size();
1370         NumSpillageChains += 1;
1371         UpdateReg(SC[0], InnerMostSpillCopy->Destination,
1372                   OuterMostSpillCopy->Source);
1373         UpdateReg(RC[0], InnerMostReloadCopy->Source,
1374                   OuterMostReloadCopy->Destination);
1375 
1376         for (size_t I = 1; I < SC.size() - 1; ++I) {
1377           SC[I]->eraseFromParent();
1378           RC[I]->eraseFromParent();
1379           NumDeletes += 2;
1380         }
1381       };
1382 
1383   auto IsFoldableCopy = [this](const MachineInstr &MaybeCopy) {
1384     if (MaybeCopy.getNumImplicitOperands() > 0)
1385       return false;
1386     std::optional<DestSourcePair> CopyOperands =
1387         isCopyInstr(MaybeCopy, *TII, UseCopyInstr);
1388     if (!CopyOperands)
1389       return false;
1390     Register Src = CopyOperands->Source->getReg();
1391     Register Def = CopyOperands->Destination->getReg();
1392     return Src && Def && !TRI->regsOverlap(Src, Def) &&
1393            CopyOperands->Source->isRenamable() &&
1394            CopyOperands->Destination->isRenamable();
1395   };
1396 
1397   auto IsSpillReloadPair = [&, this](const MachineInstr &Spill,
1398                                      const MachineInstr &Reload) {
1399     if (!IsFoldableCopy(Spill) || !IsFoldableCopy(Reload))
1400       return false;
1401     std::optional<DestSourcePair> SpillCopy =
1402         isCopyInstr(Spill, *TII, UseCopyInstr);
1403     std::optional<DestSourcePair> ReloadCopy =
1404         isCopyInstr(Reload, *TII, UseCopyInstr);
1405     if (!SpillCopy || !ReloadCopy)
1406       return false;
1407     return SpillCopy->Source->getReg() == ReloadCopy->Destination->getReg() &&
1408            SpillCopy->Destination->getReg() == ReloadCopy->Source->getReg();
1409   };
1410 
1411   auto IsChainedCopy = [&, this](const MachineInstr &Prev,
1412                                  const MachineInstr &Current) {
1413     if (!IsFoldableCopy(Prev) || !IsFoldableCopy(Current))
1414       return false;
1415     std::optional<DestSourcePair> PrevCopy =
1416         isCopyInstr(Prev, *TII, UseCopyInstr);
1417     std::optional<DestSourcePair> CurrentCopy =
1418         isCopyInstr(Current, *TII, UseCopyInstr);
1419     if (!PrevCopy || !CurrentCopy)
1420       return false;
1421     return PrevCopy->Source->getReg() == CurrentCopy->Destination->getReg();
1422   };
1423 
1424   for (MachineInstr &MI : llvm::make_early_inc_range(MBB)) {
1425     std::optional<DestSourcePair> CopyOperands =
1426         isCopyInstr(MI, *TII, UseCopyInstr);
1427 
1428     // Update track information via non-copy instruction.
1429     SmallSet<Register, 8> RegsToClobber;
1430     if (!CopyOperands) {
1431       for (const MachineOperand &MO : MI.operands()) {
1432         if (!MO.isReg())
1433           continue;
1434         Register Reg = MO.getReg();
1435         if (!Reg)
1436           continue;
1437         MachineInstr *LastUseCopy =
1438             Tracker.findLastSeenUseInCopy(Reg.asMCReg(), *TRI);
1439         if (LastUseCopy) {
1440           LLVM_DEBUG(dbgs() << "MCP: Copy source of\n");
1441           LLVM_DEBUG(LastUseCopy->dump());
1442           LLVM_DEBUG(dbgs() << "might be invalidated by\n");
1443           LLVM_DEBUG(MI.dump());
1444           CopySourceInvalid.insert(LastUseCopy);
1445         }
1446         // Must be noted Tracker.clobberRegister(Reg, ...) removes tracking of
1447         // Reg, i.e, COPY that defines Reg is removed from the mapping as well
1448         // as marking COPYs that uses Reg unavailable.
1449         // We don't invoke CopyTracker::clobberRegister(Reg, ...) if Reg is not
1450         // defined by a previous COPY, since we don't want to make COPYs uses
1451         // Reg unavailable.
1452         if (Tracker.findLastSeenDefInCopy(MI, Reg.asMCReg(), *TRI, *TII,
1453                                     UseCopyInstr))
1454           // Thus we can keep the property#1.
1455           RegsToClobber.insert(Reg);
1456       }
1457       for (Register Reg : RegsToClobber) {
1458         Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1459         LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Reg, TRI)
1460                           << "\n");
1461       }
1462       continue;
1463     }
1464 
1465     Register Src = CopyOperands->Source->getReg();
1466     Register Def = CopyOperands->Destination->getReg();
1467     // Check if we can find a pair spill-reload copy.
1468     LLVM_DEBUG(dbgs() << "MCP: Searching paired spill for reload: ");
1469     LLVM_DEBUG(MI.dump());
1470     MachineInstr *MaybeSpill =
1471         Tracker.findLastSeenDefInCopy(MI, Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1472     bool MaybeSpillIsChained = ChainLeader.count(MaybeSpill);
1473     if (!MaybeSpillIsChained && MaybeSpill &&
1474         IsSpillReloadPair(*MaybeSpill, MI)) {
1475       // Check if we already have an existing chain. Now we have a
1476       // spill-reload pair.
1477       // L2: r2 = COPY r3
1478       // L5: r3 = COPY r2
1479       // Looking for a valid COPY before L5 which uses r3.
1480       // This can be serverial cases.
1481       // Case #1:
1482       // No COPY is found, which can be r3 is def-use between (L2, L5), we
1483       // create a new chain for L2 and L5.
1484       // Case #2:
1485       // L2: r2 = COPY r3
1486       // L5: r3 = COPY r2
1487       // Such COPY is found and is L2, we create a new chain for L2 and L5.
1488       // Case #3:
1489       // L2: r2 = COPY r3
1490       // L3: r1 = COPY r3
1491       // L5: r3 = COPY r2
1492       // we create a new chain for L2 and L5.
1493       // Case #4:
1494       // L2: r2 = COPY r3
1495       // L3: r1 = COPY r3
1496       // L4: r3 = COPY r1
1497       // L5: r3 = COPY r2
1498       // Such COPY won't be found since L4 defines r3. we create a new chain
1499       // for L2 and L5.
1500       // Case #5:
1501       // L2: r2 = COPY r3
1502       // L3: r3 = COPY r1
1503       // L4: r1 = COPY r3
1504       // L5: r3 = COPY r2
1505       // COPY is found and is L4 which belongs to an existing chain, we add
1506       // L2 and L5 to this chain.
1507       LLVM_DEBUG(dbgs() << "MCP: Found spill: ");
1508       LLVM_DEBUG(MaybeSpill->dump());
1509       MachineInstr *MaybePrevReload =
1510           Tracker.findLastSeenUseInCopy(Def.asMCReg(), *TRI);
1511       auto Leader = ChainLeader.find(MaybePrevReload);
1512       MachineInstr *L = nullptr;
1513       if (Leader == ChainLeader.end() ||
1514           (MaybePrevReload && !IsChainedCopy(*MaybePrevReload, MI))) {
1515         L = &MI;
1516         assert(!SpillChain.count(L) &&
1517                "SpillChain should not have contained newly found chain");
1518       } else {
1519         assert(MaybePrevReload &&
1520                "Found a valid leader through nullptr should not happend");
1521         L = Leader->second;
1522         assert(SpillChain[L].size() > 0 &&
1523                "Existing chain's length should be larger than zero");
1524       }
1525       assert(!ChainLeader.count(&MI) && !ChainLeader.count(MaybeSpill) &&
1526              "Newly found paired spill-reload should not belong to any chain "
1527              "at this point");
1528       ChainLeader.insert({MaybeSpill, L});
1529       ChainLeader.insert({&MI, L});
1530       SpillChain[L].push_back(MaybeSpill);
1531       ReloadChain[L].push_back(&MI);
1532       LLVM_DEBUG(dbgs() << "MCP: Chain " << L << " now is:\n");
1533       LLVM_DEBUG(printSpillReloadChain(SpillChain, ReloadChain, L));
1534     } else if (MaybeSpill && !MaybeSpillIsChained) {
1535       // MaybeSpill is unable to pair with MI. That's to say adding MI makes
1536       // the chain invalid.
1537       // The COPY defines Src is no longer considered as a candidate of a
1538       // valid chain. Since we expect the Def of a spill copy isn't used by
1539       // any COPY instruction until a reload copy. For example:
1540       // L1: r1 = COPY r2
1541       // L2: r3 = COPY r1
1542       // If we later have
1543       // L1: r1 = COPY r2
1544       // L2: r3 = COPY r1
1545       // L3: r2 = COPY r1
1546       // L1 and L3 can't be a valid spill-reload pair.
1547       // Thus we keep the property#1.
1548       LLVM_DEBUG(dbgs() << "MCP: Not paired spill-reload:\n");
1549       LLVM_DEBUG(MaybeSpill->dump());
1550       LLVM_DEBUG(MI.dump());
1551       Tracker.clobberRegister(Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1552       LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Src, TRI)
1553                         << "\n");
1554     }
1555     Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1556   }
1557 
1558   for (auto I = SpillChain.begin(), E = SpillChain.end(); I != E; ++I) {
1559     auto &SC = I->second;
1560     assert(ReloadChain.count(I->first) &&
1561            "Reload chain of the same leader should exist");
1562     auto &RC = ReloadChain[I->first];
1563     TryFoldSpillageCopies(SC, RC);
1564   }
1565 
1566   MaybeDeadCopies.clear();
1567   CopyDbgUsers.clear();
1568   Tracker.clear();
1569 }
1570 
runOnMachineFunction(MachineFunction & MF)1571 bool MachineCopyPropagationLegacy::runOnMachineFunction(MachineFunction &MF) {
1572   if (skipFunction(MF.getFunction()))
1573     return false;
1574 
1575   return MachineCopyPropagation(UseCopyInstr).run(MF);
1576 }
1577 
1578 PreservedAnalyses
run(MachineFunction & MF,MachineFunctionAnalysisManager &)1579 MachineCopyPropagationPass::run(MachineFunction &MF,
1580                                 MachineFunctionAnalysisManager &) {
1581   MFPropsModifier _(*this, MF);
1582   if (!MachineCopyPropagation(UseCopyInstr).run(MF))
1583     return PreservedAnalyses::all();
1584   auto PA = getMachineFunctionPassPreservedAnalyses();
1585   PA.preserveSet<CFGAnalyses>();
1586   return PA;
1587 }
1588 
run(MachineFunction & MF)1589 bool MachineCopyPropagation::run(MachineFunction &MF) {
1590   bool isSpillageCopyElimEnabled = false;
1591   switch (EnableSpillageCopyElimination) {
1592   case cl::BOU_UNSET:
1593     isSpillageCopyElimEnabled =
1594         MF.getSubtarget().enableSpillageCopyElimination();
1595     break;
1596   case cl::BOU_TRUE:
1597     isSpillageCopyElimEnabled = true;
1598     break;
1599   case cl::BOU_FALSE:
1600     isSpillageCopyElimEnabled = false;
1601     break;
1602   }
1603 
1604   Changed = false;
1605 
1606   TRI = MF.getSubtarget().getRegisterInfo();
1607   TII = MF.getSubtarget().getInstrInfo();
1608   MRI = &MF.getRegInfo();
1609 
1610   for (MachineBasicBlock &MBB : MF) {
1611     if (isSpillageCopyElimEnabled)
1612       EliminateSpillageCopies(MBB);
1613     BackwardCopyPropagateBlock(MBB);
1614     ForwardCopyPropagateBlock(MBB);
1615   }
1616 
1617   return Changed;
1618 }
1619 
1620 MachineFunctionPass *
createMachineCopyPropagationPass(bool UseCopyInstr=false)1621 llvm::createMachineCopyPropagationPass(bool UseCopyInstr = false) {
1622   return new MachineCopyPropagationLegacy(UseCopyInstr);
1623 }
1624