xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/MachineCopyPropagation.cpp (revision 5cb28f7979773715615cc2131fe40e0c5879ed1d)
1  //===- MachineCopyPropagation.cpp - Machine Copy Propagation Pass ---------===//
2  //
3  // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4  // See https://llvm.org/LICENSE.txt for license information.
5  // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6  //
7  //===----------------------------------------------------------------------===//
8  //
9  // This is an extremely simple MachineInstr-level copy propagation pass.
10  //
11  // This pass forwards the source of COPYs to the users of their destinations
12  // when doing so is legal.  For example:
13  //
14  //   %reg1 = COPY %reg0
15  //   ...
16  //   ... = OP %reg1
17  //
18  // If
19  //   - %reg0 has not been clobbered by the time of the use of %reg1
20  //   - the register class constraints are satisfied
21  //   - the COPY def is the only value that reaches OP
22  // then this pass replaces the above with:
23  //
24  //   %reg1 = COPY %reg0
25  //   ...
26  //   ... = OP %reg0
27  //
28  // This pass also removes some redundant COPYs.  For example:
29  //
30  //    %R1 = COPY %R0
31  //    ... // No clobber of %R1
32  //    %R0 = COPY %R1 <<< Removed
33  //
34  // or
35  //
36  //    %R1 = COPY %R0
37  //    ... // No clobber of %R0
38  //    %R1 = COPY %R0 <<< Removed
39  //
40  // or
41  //
42  //    $R0 = OP ...
43  //    ... // No read/clobber of $R0 and $R1
44  //    $R1 = COPY $R0 // $R0 is killed
45  // Replace $R0 with $R1 and remove the COPY
46  //    $R1 = OP ...
47  //    ...
48  //
49  //===----------------------------------------------------------------------===//
50  
51  #include "llvm/ADT/DenseMap.h"
52  #include "llvm/ADT/STLExtras.h"
53  #include "llvm/ADT/SetVector.h"
54  #include "llvm/ADT/SmallSet.h"
55  #include "llvm/ADT/SmallVector.h"
56  #include "llvm/ADT/Statistic.h"
57  #include "llvm/ADT/iterator_range.h"
58  #include "llvm/CodeGen/MachineBasicBlock.h"
59  #include "llvm/CodeGen/MachineFunction.h"
60  #include "llvm/CodeGen/MachineFunctionPass.h"
61  #include "llvm/CodeGen/MachineInstr.h"
62  #include "llvm/CodeGen/MachineOperand.h"
63  #include "llvm/CodeGen/MachineRegisterInfo.h"
64  #include "llvm/CodeGen/TargetInstrInfo.h"
65  #include "llvm/CodeGen/TargetRegisterInfo.h"
66  #include "llvm/CodeGen/TargetSubtargetInfo.h"
67  #include "llvm/InitializePasses.h"
68  #include "llvm/MC/MCRegisterInfo.h"
69  #include "llvm/Pass.h"
70  #include "llvm/Support/Debug.h"
71  #include "llvm/Support/DebugCounter.h"
72  #include "llvm/Support/raw_ostream.h"
73  #include <cassert>
74  #include <iterator>
75  
76  using namespace llvm;
77  
78  #define DEBUG_TYPE "machine-cp"
79  
80  STATISTIC(NumDeletes, "Number of dead copies deleted");
81  STATISTIC(NumCopyForwards, "Number of copy uses forwarded");
82  STATISTIC(NumCopyBackwardPropagated, "Number of copy defs backward propagated");
83  STATISTIC(SpillageChainsLength, "Length of spillage chains");
84  STATISTIC(NumSpillageChains, "Number of spillage chains");
85  DEBUG_COUNTER(FwdCounter, "machine-cp-fwd",
86                "Controls which register COPYs are forwarded");
87  
88  static cl::opt<bool> MCPUseCopyInstr("mcp-use-is-copy-instr", cl::init(false),
89                                       cl::Hidden);
90  static cl::opt<cl::boolOrDefault>
91      EnableSpillageCopyElimination("enable-spill-copy-elim", cl::Hidden);
92  
93  namespace {
94  
95  static std::optional<DestSourcePair> isCopyInstr(const MachineInstr &MI,
96                                                   const TargetInstrInfo &TII,
97                                                   bool UseCopyInstr) {
98    if (UseCopyInstr)
99      return TII.isCopyInstr(MI);
100  
101    if (MI.isCopy())
102      return std::optional<DestSourcePair>(
103          DestSourcePair{MI.getOperand(0), MI.getOperand(1)});
104  
105    return std::nullopt;
106  }
107  
108  class CopyTracker {
109    struct CopyInfo {
110      MachineInstr *MI, *LastSeenUseInCopy;
111      SmallVector<MCRegister, 4> DefRegs;
112      bool Avail;
113    };
114  
115    DenseMap<MCRegister, CopyInfo> Copies;
116  
117  public:
118    /// Mark all of the given registers and their subregisters as unavailable for
119    /// copying.
120    void markRegsUnavailable(ArrayRef<MCRegister> Regs,
121                             const TargetRegisterInfo &TRI) {
122      for (MCRegister Reg : Regs) {
123        // Source of copy is no longer available for propagation.
124        for (MCRegUnit Unit : TRI.regunits(Reg)) {
125          auto CI = Copies.find(Unit);
126          if (CI != Copies.end())
127            CI->second.Avail = false;
128        }
129      }
130    }
131  
132    /// Remove register from copy maps.
133    void invalidateRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
134                            const TargetInstrInfo &TII, bool UseCopyInstr) {
135      // Since Reg might be a subreg of some registers, only invalidate Reg is not
136      // enough. We have to find the COPY defines Reg or registers defined by Reg
137      // and invalidate all of them.
138      SmallSet<MCRegister, 8> RegsToInvalidate;
139      RegsToInvalidate.insert(Reg);
140      for (MCRegUnit Unit : TRI.regunits(Reg)) {
141        auto I = Copies.find(Unit);
142        if (I != Copies.end()) {
143          if (MachineInstr *MI = I->second.MI) {
144            std::optional<DestSourcePair> CopyOperands =
145                isCopyInstr(*MI, TII, UseCopyInstr);
146            assert(CopyOperands && "Expect copy");
147  
148            RegsToInvalidate.insert(
149                CopyOperands->Destination->getReg().asMCReg());
150            RegsToInvalidate.insert(CopyOperands->Source->getReg().asMCReg());
151          }
152          RegsToInvalidate.insert(I->second.DefRegs.begin(),
153                                  I->second.DefRegs.end());
154        }
155      }
156      for (MCRegister InvalidReg : RegsToInvalidate)
157        for (MCRegUnit Unit : TRI.regunits(InvalidReg))
158          Copies.erase(Unit);
159    }
160  
161    /// Clobber a single register, removing it from the tracker's copy maps.
162    void clobberRegister(MCRegister Reg, const TargetRegisterInfo &TRI,
163                         const TargetInstrInfo &TII, bool UseCopyInstr) {
164      for (MCRegUnit Unit : TRI.regunits(Reg)) {
165        auto I = Copies.find(Unit);
166        if (I != Copies.end()) {
167          // When we clobber the source of a copy, we need to clobber everything
168          // it defined.
169          markRegsUnavailable(I->second.DefRegs, TRI);
170          // When we clobber the destination of a copy, we need to clobber the
171          // whole register it defined.
172          if (MachineInstr *MI = I->second.MI) {
173            std::optional<DestSourcePair> CopyOperands =
174                isCopyInstr(*MI, TII, UseCopyInstr);
175            markRegsUnavailable({CopyOperands->Destination->getReg().asMCReg()},
176                                TRI);
177          }
178          // Now we can erase the copy.
179          Copies.erase(I);
180        }
181      }
182    }
183  
184    /// Add this copy's registers into the tracker's copy maps.
185    void trackCopy(MachineInstr *MI, const TargetRegisterInfo &TRI,
186                   const TargetInstrInfo &TII, bool UseCopyInstr) {
187      std::optional<DestSourcePair> CopyOperands =
188          isCopyInstr(*MI, TII, UseCopyInstr);
189      assert(CopyOperands && "Tracking non-copy?");
190  
191      MCRegister Src = CopyOperands->Source->getReg().asMCReg();
192      MCRegister Def = CopyOperands->Destination->getReg().asMCReg();
193  
194      // Remember Def is defined by the copy.
195      for (MCRegUnit Unit : TRI.regunits(Def))
196        Copies[Unit] = {MI, nullptr, {}, true};
197  
198      // Remember source that's copied to Def. Once it's clobbered, then
199      // it's no longer available for copy propagation.
200      for (MCRegUnit Unit : TRI.regunits(Src)) {
201        auto I = Copies.insert({Unit, {nullptr, nullptr, {}, false}});
202        auto &Copy = I.first->second;
203        if (!is_contained(Copy.DefRegs, Def))
204          Copy.DefRegs.push_back(Def);
205        Copy.LastSeenUseInCopy = MI;
206      }
207    }
208  
209    bool hasAnyCopies() {
210      return !Copies.empty();
211    }
212  
213    MachineInstr *findCopyForUnit(MCRegister RegUnit,
214                                  const TargetRegisterInfo &TRI,
215                                  bool MustBeAvailable = false) {
216      auto CI = Copies.find(RegUnit);
217      if (CI == Copies.end())
218        return nullptr;
219      if (MustBeAvailable && !CI->second.Avail)
220        return nullptr;
221      return CI->second.MI;
222    }
223  
224    MachineInstr *findCopyDefViaUnit(MCRegister RegUnit,
225                                     const TargetRegisterInfo &TRI) {
226      auto CI = Copies.find(RegUnit);
227      if (CI == Copies.end())
228        return nullptr;
229      if (CI->second.DefRegs.size() != 1)
230        return nullptr;
231      MCRegUnit RU = *TRI.regunits(CI->second.DefRegs[0]).begin();
232      return findCopyForUnit(RU, TRI, true);
233    }
234  
235    MachineInstr *findAvailBackwardCopy(MachineInstr &I, MCRegister Reg,
236                                        const TargetRegisterInfo &TRI,
237                                        const TargetInstrInfo &TII,
238                                        bool UseCopyInstr) {
239      MCRegUnit RU = *TRI.regunits(Reg).begin();
240      MachineInstr *AvailCopy = findCopyDefViaUnit(RU, TRI);
241  
242      if (!AvailCopy)
243        return nullptr;
244  
245      std::optional<DestSourcePair> CopyOperands =
246          isCopyInstr(*AvailCopy, TII, UseCopyInstr);
247      Register AvailSrc = CopyOperands->Source->getReg();
248      Register AvailDef = CopyOperands->Destination->getReg();
249      if (!TRI.isSubRegisterEq(AvailSrc, Reg))
250        return nullptr;
251  
252      for (const MachineInstr &MI :
253           make_range(AvailCopy->getReverseIterator(), I.getReverseIterator()))
254        for (const MachineOperand &MO : MI.operands())
255          if (MO.isRegMask())
256            // FIXME: Shall we simultaneously invalidate AvailSrc or AvailDef?
257            if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
258              return nullptr;
259  
260      return AvailCopy;
261    }
262  
263    MachineInstr *findAvailCopy(MachineInstr &DestCopy, MCRegister Reg,
264                                const TargetRegisterInfo &TRI,
265                                const TargetInstrInfo &TII, bool UseCopyInstr) {
266      // We check the first RegUnit here, since we'll only be interested in the
267      // copy if it copies the entire register anyway.
268      MCRegUnit RU = *TRI.regunits(Reg).begin();
269      MachineInstr *AvailCopy =
270          findCopyForUnit(RU, TRI, /*MustBeAvailable=*/true);
271  
272      if (!AvailCopy)
273        return nullptr;
274  
275      std::optional<DestSourcePair> CopyOperands =
276          isCopyInstr(*AvailCopy, TII, UseCopyInstr);
277      Register AvailSrc = CopyOperands->Source->getReg();
278      Register AvailDef = CopyOperands->Destination->getReg();
279      if (!TRI.isSubRegisterEq(AvailDef, Reg))
280        return nullptr;
281  
282      // Check that the available copy isn't clobbered by any regmasks between
283      // itself and the destination.
284      for (const MachineInstr &MI :
285           make_range(AvailCopy->getIterator(), DestCopy.getIterator()))
286        for (const MachineOperand &MO : MI.operands())
287          if (MO.isRegMask())
288            if (MO.clobbersPhysReg(AvailSrc) || MO.clobbersPhysReg(AvailDef))
289              return nullptr;
290  
291      return AvailCopy;
292    }
293  
294    // Find last COPY that defines Reg before Current MachineInstr.
295    MachineInstr *findLastSeenDefInCopy(const MachineInstr &Current,
296                                        MCRegister Reg,
297                                        const TargetRegisterInfo &TRI,
298                                        const TargetInstrInfo &TII,
299                                        bool UseCopyInstr) {
300      MCRegUnit RU = *TRI.regunits(Reg).begin();
301      auto CI = Copies.find(RU);
302      if (CI == Copies.end() || !CI->second.Avail)
303        return nullptr;
304  
305      MachineInstr *DefCopy = CI->second.MI;
306      std::optional<DestSourcePair> CopyOperands =
307          isCopyInstr(*DefCopy, TII, UseCopyInstr);
308      Register Def = CopyOperands->Destination->getReg();
309      if (!TRI.isSubRegisterEq(Def, Reg))
310        return nullptr;
311  
312      for (const MachineInstr &MI :
313           make_range(static_cast<const MachineInstr *>(DefCopy)->getIterator(),
314                      Current.getIterator()))
315        for (const MachineOperand &MO : MI.operands())
316          if (MO.isRegMask())
317            if (MO.clobbersPhysReg(Def)) {
318              LLVM_DEBUG(dbgs() << "MCP: Removed tracking of "
319                                << printReg(Def, &TRI) << "\n");
320              return nullptr;
321            }
322  
323      return DefCopy;
324    }
325  
326    // Find last COPY that uses Reg.
327    MachineInstr *findLastSeenUseInCopy(MCRegister Reg,
328                                        const TargetRegisterInfo &TRI) {
329      MCRegUnit RU = *TRI.regunits(Reg).begin();
330      auto CI = Copies.find(RU);
331      if (CI == Copies.end())
332        return nullptr;
333      return CI->second.LastSeenUseInCopy;
334    }
335  
336    void clear() {
337      Copies.clear();
338    }
339  };
340  
341  class MachineCopyPropagation : public MachineFunctionPass {
342    const TargetRegisterInfo *TRI = nullptr;
343    const TargetInstrInfo *TII = nullptr;
344    const MachineRegisterInfo *MRI = nullptr;
345  
346    // Return true if this is a copy instruction and false otherwise.
347    bool UseCopyInstr;
348  
349  public:
350    static char ID; // Pass identification, replacement for typeid
351  
352    MachineCopyPropagation(bool CopyInstr = false)
353        : MachineFunctionPass(ID), UseCopyInstr(CopyInstr || MCPUseCopyInstr) {
354      initializeMachineCopyPropagationPass(*PassRegistry::getPassRegistry());
355    }
356  
357    void getAnalysisUsage(AnalysisUsage &AU) const override {
358      AU.setPreservesCFG();
359      MachineFunctionPass::getAnalysisUsage(AU);
360    }
361  
362    bool runOnMachineFunction(MachineFunction &MF) override;
363  
364    MachineFunctionProperties getRequiredProperties() const override {
365      return MachineFunctionProperties().set(
366          MachineFunctionProperties::Property::NoVRegs);
367    }
368  
369  private:
370    typedef enum { DebugUse = false, RegularUse = true } DebugType;
371  
372    void ReadRegister(MCRegister Reg, MachineInstr &Reader, DebugType DT);
373    void ForwardCopyPropagateBlock(MachineBasicBlock &MBB);
374    void BackwardCopyPropagateBlock(MachineBasicBlock &MBB);
375    void EliminateSpillageCopies(MachineBasicBlock &MBB);
376    bool eraseIfRedundant(MachineInstr &Copy, MCRegister Src, MCRegister Def);
377    void forwardUses(MachineInstr &MI);
378    void propagateDefs(MachineInstr &MI);
379    bool isForwardableRegClassCopy(const MachineInstr &Copy,
380                                   const MachineInstr &UseI, unsigned UseIdx);
381    bool isBackwardPropagatableRegClassCopy(const MachineInstr &Copy,
382                                            const MachineInstr &UseI,
383                                            unsigned UseIdx);
384    bool hasImplicitOverlap(const MachineInstr &MI, const MachineOperand &Use);
385    bool hasOverlappingMultipleDef(const MachineInstr &MI,
386                                   const MachineOperand &MODef, Register Def);
387  
388    /// Candidates for deletion.
389    SmallSetVector<MachineInstr *, 8> MaybeDeadCopies;
390  
391    /// Multimap tracking debug users in current BB
392    DenseMap<MachineInstr *, SmallSet<MachineInstr *, 2>> CopyDbgUsers;
393  
394    CopyTracker Tracker;
395  
396    bool Changed = false;
397  };
398  
399  } // end anonymous namespace
400  
401  char MachineCopyPropagation::ID = 0;
402  
403  char &llvm::MachineCopyPropagationID = MachineCopyPropagation::ID;
404  
405  INITIALIZE_PASS(MachineCopyPropagation, DEBUG_TYPE,
406                  "Machine Copy Propagation Pass", false, false)
407  
408  void MachineCopyPropagation::ReadRegister(MCRegister Reg, MachineInstr &Reader,
409                                            DebugType DT) {
410    // If 'Reg' is defined by a copy, the copy is no longer a candidate
411    // for elimination. If a copy is "read" by a debug user, record the user
412    // for propagation.
413    for (MCRegUnit Unit : TRI->regunits(Reg)) {
414      if (MachineInstr *Copy = Tracker.findCopyForUnit(Unit, *TRI)) {
415        if (DT == RegularUse) {
416          LLVM_DEBUG(dbgs() << "MCP: Copy is used - not dead: "; Copy->dump());
417          MaybeDeadCopies.remove(Copy);
418        } else {
419          CopyDbgUsers[Copy].insert(&Reader);
420        }
421      }
422    }
423  }
424  
425  /// Return true if \p PreviousCopy did copy register \p Src to register \p Def.
426  /// This fact may have been obscured by sub register usage or may not be true at
427  /// all even though Src and Def are subregisters of the registers used in
428  /// PreviousCopy. e.g.
429  /// isNopCopy("ecx = COPY eax", AX, CX) == true
430  /// isNopCopy("ecx = COPY eax", AH, CL) == false
431  static bool isNopCopy(const MachineInstr &PreviousCopy, MCRegister Src,
432                        MCRegister Def, const TargetRegisterInfo *TRI,
433                        const TargetInstrInfo *TII, bool UseCopyInstr) {
434  
435    std::optional<DestSourcePair> CopyOperands =
436        isCopyInstr(PreviousCopy, *TII, UseCopyInstr);
437    MCRegister PreviousSrc = CopyOperands->Source->getReg().asMCReg();
438    MCRegister PreviousDef = CopyOperands->Destination->getReg().asMCReg();
439    if (Src == PreviousSrc && Def == PreviousDef)
440      return true;
441    if (!TRI->isSubRegister(PreviousSrc, Src))
442      return false;
443    unsigned SubIdx = TRI->getSubRegIndex(PreviousSrc, Src);
444    return SubIdx == TRI->getSubRegIndex(PreviousDef, Def);
445  }
446  
447  /// Remove instruction \p Copy if there exists a previous copy that copies the
448  /// register \p Src to the register \p Def; This may happen indirectly by
449  /// copying the super registers.
450  bool MachineCopyPropagation::eraseIfRedundant(MachineInstr &Copy,
451                                                MCRegister Src, MCRegister Def) {
452    // Avoid eliminating a copy from/to a reserved registers as we cannot predict
453    // the value (Example: The sparc zero register is writable but stays zero).
454    if (MRI->isReserved(Src) || MRI->isReserved(Def))
455      return false;
456  
457    // Search for an existing copy.
458    MachineInstr *PrevCopy =
459        Tracker.findAvailCopy(Copy, Def, *TRI, *TII, UseCopyInstr);
460    if (!PrevCopy)
461      return false;
462  
463    auto PrevCopyOperands = isCopyInstr(*PrevCopy, *TII, UseCopyInstr);
464    // Check that the existing copy uses the correct sub registers.
465    if (PrevCopyOperands->Destination->isDead())
466      return false;
467    if (!isNopCopy(*PrevCopy, Src, Def, TRI, TII, UseCopyInstr))
468      return false;
469  
470    LLVM_DEBUG(dbgs() << "MCP: copy is a NOP, removing: "; Copy.dump());
471  
472    // Copy was redundantly redefining either Src or Def. Remove earlier kill
473    // flags between Copy and PrevCopy because the value will be reused now.
474    std::optional<DestSourcePair> CopyOperands =
475        isCopyInstr(Copy, *TII, UseCopyInstr);
476    assert(CopyOperands);
477  
478    Register CopyDef = CopyOperands->Destination->getReg();
479    assert(CopyDef == Src || CopyDef == Def);
480    for (MachineInstr &MI :
481         make_range(PrevCopy->getIterator(), Copy.getIterator()))
482      MI.clearRegisterKills(CopyDef, TRI);
483  
484    // Clear undef flag from remaining copy if needed.
485    if (!CopyOperands->Source->isUndef()) {
486      PrevCopy->getOperand(PrevCopyOperands->Source->getOperandNo())
487          .setIsUndef(false);
488    }
489  
490    Copy.eraseFromParent();
491    Changed = true;
492    ++NumDeletes;
493    return true;
494  }
495  
496  bool MachineCopyPropagation::isBackwardPropagatableRegClassCopy(
497      const MachineInstr &Copy, const MachineInstr &UseI, unsigned UseIdx) {
498    std::optional<DestSourcePair> CopyOperands =
499        isCopyInstr(Copy, *TII, UseCopyInstr);
500    Register Def = CopyOperands->Destination->getReg();
501  
502    if (const TargetRegisterClass *URC =
503            UseI.getRegClassConstraint(UseIdx, TII, TRI))
504      return URC->contains(Def);
505  
506    // We don't process further if UseI is a COPY, since forward copy propagation
507    // should handle that.
508    return false;
509  }
510  
511  /// Decide whether we should forward the source of \param Copy to its use in
512  /// \param UseI based on the physical register class constraints of the opcode
513  /// and avoiding introducing more cross-class COPYs.
514  bool MachineCopyPropagation::isForwardableRegClassCopy(const MachineInstr &Copy,
515                                                         const MachineInstr &UseI,
516                                                         unsigned UseIdx) {
517    std::optional<DestSourcePair> CopyOperands =
518        isCopyInstr(Copy, *TII, UseCopyInstr);
519    Register CopySrcReg = CopyOperands->Source->getReg();
520  
521    // If the new register meets the opcode register constraints, then allow
522    // forwarding.
523    if (const TargetRegisterClass *URC =
524            UseI.getRegClassConstraint(UseIdx, TII, TRI))
525      return URC->contains(CopySrcReg);
526  
527    auto UseICopyOperands = isCopyInstr(UseI, *TII, UseCopyInstr);
528    if (!UseICopyOperands)
529      return false;
530  
531    /// COPYs don't have register class constraints, so if the user instruction
532    /// is a COPY, we just try to avoid introducing additional cross-class
533    /// COPYs.  For example:
534    ///
535    ///   RegClassA = COPY RegClassB  // Copy parameter
536    ///   ...
537    ///   RegClassB = COPY RegClassA  // UseI parameter
538    ///
539    /// which after forwarding becomes
540    ///
541    ///   RegClassA = COPY RegClassB
542    ///   ...
543    ///   RegClassB = COPY RegClassB
544    ///
545    /// so we have reduced the number of cross-class COPYs and potentially
546    /// introduced a nop COPY that can be removed.
547  
548    // Allow forwarding if src and dst belong to any common class, so long as they
549    // don't belong to any (possibly smaller) common class that requires copies to
550    // go via a different class.
551    Register UseDstReg = UseICopyOperands->Destination->getReg();
552    bool Found = false;
553    bool IsCrossClass = false;
554    for (const TargetRegisterClass *RC : TRI->regclasses()) {
555      if (RC->contains(CopySrcReg) && RC->contains(UseDstReg)) {
556        Found = true;
557        if (TRI->getCrossCopyRegClass(RC) != RC) {
558          IsCrossClass = true;
559          break;
560        }
561      }
562    }
563    if (!Found)
564      return false;
565    if (!IsCrossClass)
566      return true;
567    // The forwarded copy would be cross-class. Only do this if the original copy
568    // was also cross-class.
569    Register CopyDstReg = CopyOperands->Destination->getReg();
570    for (const TargetRegisterClass *RC : TRI->regclasses()) {
571      if (RC->contains(CopySrcReg) && RC->contains(CopyDstReg) &&
572          TRI->getCrossCopyRegClass(RC) != RC)
573        return true;
574    }
575    return false;
576  }
577  
578  /// Check that \p MI does not have implicit uses that overlap with it's \p Use
579  /// operand (the register being replaced), since these can sometimes be
580  /// implicitly tied to other operands.  For example, on AMDGPU:
581  ///
582  /// V_MOVRELS_B32_e32 %VGPR2, %M0<imp-use>, %EXEC<imp-use>, %VGPR2_VGPR3_VGPR4_VGPR5<imp-use>
583  ///
584  /// the %VGPR2 is implicitly tied to the larger reg operand, but we have no
585  /// way of knowing we need to update the latter when updating the former.
586  bool MachineCopyPropagation::hasImplicitOverlap(const MachineInstr &MI,
587                                                  const MachineOperand &Use) {
588    for (const MachineOperand &MIUse : MI.uses())
589      if (&MIUse != &Use && MIUse.isReg() && MIUse.isImplicit() &&
590          MIUse.isUse() && TRI->regsOverlap(Use.getReg(), MIUse.getReg()))
591        return true;
592  
593    return false;
594  }
595  
596  /// For an MI that has multiple definitions, check whether \p MI has
597  /// a definition that overlaps with another of its definitions.
598  /// For example, on ARM: umull   r9, r9, lr, r0
599  /// The umull instruction is unpredictable unless RdHi and RdLo are different.
600  bool MachineCopyPropagation::hasOverlappingMultipleDef(
601      const MachineInstr &MI, const MachineOperand &MODef, Register Def) {
602    for (const MachineOperand &MIDef : MI.defs()) {
603      if ((&MIDef != &MODef) && MIDef.isReg() &&
604          TRI->regsOverlap(Def, MIDef.getReg()))
605        return true;
606    }
607  
608    return false;
609  }
610  
611  /// Look for available copies whose destination register is used by \p MI and
612  /// replace the use in \p MI with the copy's source register.
613  void MachineCopyPropagation::forwardUses(MachineInstr &MI) {
614    if (!Tracker.hasAnyCopies())
615      return;
616  
617    // Look for non-tied explicit vreg uses that have an active COPY
618    // instruction that defines the physical register allocated to them.
619    // Replace the vreg with the source of the active COPY.
620    for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx < OpEnd;
621         ++OpIdx) {
622      MachineOperand &MOUse = MI.getOperand(OpIdx);
623      // Don't forward into undef use operands since doing so can cause problems
624      // with the machine verifier, since it doesn't treat undef reads as reads,
625      // so we can end up with a live range that ends on an undef read, leading to
626      // an error that the live range doesn't end on a read of the live range
627      // register.
628      if (!MOUse.isReg() || MOUse.isTied() || MOUse.isUndef() || MOUse.isDef() ||
629          MOUse.isImplicit())
630        continue;
631  
632      if (!MOUse.getReg())
633        continue;
634  
635      // Check that the register is marked 'renamable' so we know it is safe to
636      // rename it without violating any constraints that aren't expressed in the
637      // IR (e.g. ABI or opcode requirements).
638      if (!MOUse.isRenamable())
639        continue;
640  
641      MachineInstr *Copy = Tracker.findAvailCopy(MI, MOUse.getReg().asMCReg(),
642                                                 *TRI, *TII, UseCopyInstr);
643      if (!Copy)
644        continue;
645  
646      std::optional<DestSourcePair> CopyOperands =
647          isCopyInstr(*Copy, *TII, UseCopyInstr);
648      Register CopyDstReg = CopyOperands->Destination->getReg();
649      const MachineOperand &CopySrc = *CopyOperands->Source;
650      Register CopySrcReg = CopySrc.getReg();
651  
652      Register ForwardedReg = CopySrcReg;
653      // MI might use a sub-register of the Copy destination, in which case the
654      // forwarded register is the matching sub-register of the Copy source.
655      if (MOUse.getReg() != CopyDstReg) {
656        unsigned SubRegIdx = TRI->getSubRegIndex(CopyDstReg, MOUse.getReg());
657        assert(SubRegIdx &&
658               "MI source is not a sub-register of Copy destination");
659        ForwardedReg = TRI->getSubReg(CopySrcReg, SubRegIdx);
660        if (!ForwardedReg) {
661          LLVM_DEBUG(dbgs() << "MCP: Copy source does not have sub-register "
662                            << TRI->getSubRegIndexName(SubRegIdx) << '\n');
663          continue;
664        }
665      }
666  
667      // Don't forward COPYs of reserved regs unless they are constant.
668      if (MRI->isReserved(CopySrcReg) && !MRI->isConstantPhysReg(CopySrcReg))
669        continue;
670  
671      if (!isForwardableRegClassCopy(*Copy, MI, OpIdx))
672        continue;
673  
674      if (hasImplicitOverlap(MI, MOUse))
675        continue;
676  
677      // Check that the instruction is not a copy that partially overwrites the
678      // original copy source that we are about to use. The tracker mechanism
679      // cannot cope with that.
680      if (isCopyInstr(MI, *TII, UseCopyInstr) &&
681          MI.modifiesRegister(CopySrcReg, TRI) &&
682          !MI.definesRegister(CopySrcReg)) {
683        LLVM_DEBUG(dbgs() << "MCP: Copy source overlap with dest in " << MI);
684        continue;
685      }
686  
687      if (!DebugCounter::shouldExecute(FwdCounter)) {
688        LLVM_DEBUG(dbgs() << "MCP: Skipping forwarding due to debug counter:\n  "
689                          << MI);
690        continue;
691      }
692  
693      LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MOUse.getReg(), TRI)
694                        << "\n     with " << printReg(ForwardedReg, TRI)
695                        << "\n     in " << MI << "     from " << *Copy);
696  
697      MOUse.setReg(ForwardedReg);
698  
699      if (!CopySrc.isRenamable())
700        MOUse.setIsRenamable(false);
701      MOUse.setIsUndef(CopySrc.isUndef());
702  
703      LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
704  
705      // Clear kill markers that may have been invalidated.
706      for (MachineInstr &KMI :
707           make_range(Copy->getIterator(), std::next(MI.getIterator())))
708        KMI.clearRegisterKills(CopySrcReg, TRI);
709  
710      ++NumCopyForwards;
711      Changed = true;
712    }
713  }
714  
715  void MachineCopyPropagation::ForwardCopyPropagateBlock(MachineBasicBlock &MBB) {
716    LLVM_DEBUG(dbgs() << "MCP: ForwardCopyPropagateBlock " << MBB.getName()
717                      << "\n");
718  
719    for (MachineInstr &MI : llvm::make_early_inc_range(MBB)) {
720      // Analyze copies (which don't overlap themselves).
721      std::optional<DestSourcePair> CopyOperands =
722          isCopyInstr(MI, *TII, UseCopyInstr);
723      if (CopyOperands) {
724  
725        Register RegSrc = CopyOperands->Source->getReg();
726        Register RegDef = CopyOperands->Destination->getReg();
727  
728        if (!TRI->regsOverlap(RegDef, RegSrc)) {
729          assert(RegDef.isPhysical() && RegSrc.isPhysical() &&
730                "MachineCopyPropagation should be run after register allocation!");
731  
732          MCRegister Def = RegDef.asMCReg();
733          MCRegister Src = RegSrc.asMCReg();
734  
735          // The two copies cancel out and the source of the first copy
736          // hasn't been overridden, eliminate the second one. e.g.
737          //  %ecx = COPY %eax
738          //  ... nothing clobbered eax.
739          //  %eax = COPY %ecx
740          // =>
741          //  %ecx = COPY %eax
742          //
743          // or
744          //
745          //  %ecx = COPY %eax
746          //  ... nothing clobbered eax.
747          //  %ecx = COPY %eax
748          // =>
749          //  %ecx = COPY %eax
750          if (eraseIfRedundant(MI, Def, Src) || eraseIfRedundant(MI, Src, Def))
751            continue;
752  
753          forwardUses(MI);
754  
755          // Src may have been changed by forwardUses()
756          CopyOperands = isCopyInstr(MI, *TII, UseCopyInstr);
757          Src = CopyOperands->Source->getReg().asMCReg();
758  
759          // If Src is defined by a previous copy, the previous copy cannot be
760          // eliminated.
761          ReadRegister(Src, MI, RegularUse);
762          for (const MachineOperand &MO : MI.implicit_operands()) {
763            if (!MO.isReg() || !MO.readsReg())
764              continue;
765            MCRegister Reg = MO.getReg().asMCReg();
766            if (!Reg)
767              continue;
768            ReadRegister(Reg, MI, RegularUse);
769          }
770  
771          LLVM_DEBUG(dbgs() << "MCP: Copy is a deletion candidate: "; MI.dump());
772  
773          // Copy is now a candidate for deletion.
774          if (!MRI->isReserved(Def))
775            MaybeDeadCopies.insert(&MI);
776  
777          // If 'Def' is previously source of another copy, then this earlier copy's
778          // source is no longer available. e.g.
779          // %xmm9 = copy %xmm2
780          // ...
781          // %xmm2 = copy %xmm0
782          // ...
783          // %xmm2 = copy %xmm9
784          Tracker.clobberRegister(Def, *TRI, *TII, UseCopyInstr);
785          for (const MachineOperand &MO : MI.implicit_operands()) {
786            if (!MO.isReg() || !MO.isDef())
787              continue;
788            MCRegister Reg = MO.getReg().asMCReg();
789            if (!Reg)
790              continue;
791            Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
792          }
793  
794          Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
795  
796          continue;
797        }
798      }
799  
800      // Clobber any earlyclobber regs first.
801      for (const MachineOperand &MO : MI.operands())
802        if (MO.isReg() && MO.isEarlyClobber()) {
803          MCRegister Reg = MO.getReg().asMCReg();
804          // If we have a tied earlyclobber, that means it is also read by this
805          // instruction, so we need to make sure we don't remove it as dead
806          // later.
807          if (MO.isTied())
808            ReadRegister(Reg, MI, RegularUse);
809          Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
810        }
811  
812      forwardUses(MI);
813  
814      // Not a copy.
815      SmallVector<Register, 2> Defs;
816      const MachineOperand *RegMask = nullptr;
817      for (const MachineOperand &MO : MI.operands()) {
818        if (MO.isRegMask())
819          RegMask = &MO;
820        if (!MO.isReg())
821          continue;
822        Register Reg = MO.getReg();
823        if (!Reg)
824          continue;
825  
826        assert(!Reg.isVirtual() &&
827               "MachineCopyPropagation should be run after register allocation!");
828  
829        if (MO.isDef() && !MO.isEarlyClobber()) {
830          Defs.push_back(Reg.asMCReg());
831          continue;
832        } else if (MO.readsReg())
833          ReadRegister(Reg.asMCReg(), MI, MO.isDebug() ? DebugUse : RegularUse);
834      }
835  
836      // The instruction has a register mask operand which means that it clobbers
837      // a large set of registers.  Treat clobbered registers the same way as
838      // defined registers.
839      if (RegMask) {
840        // Erase any MaybeDeadCopies whose destination register is clobbered.
841        for (SmallSetVector<MachineInstr *, 8>::iterator DI =
842                 MaybeDeadCopies.begin();
843             DI != MaybeDeadCopies.end();) {
844          MachineInstr *MaybeDead = *DI;
845          std::optional<DestSourcePair> CopyOperands =
846              isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
847          MCRegister Reg = CopyOperands->Destination->getReg().asMCReg();
848          assert(!MRI->isReserved(Reg));
849  
850          if (!RegMask->clobbersPhysReg(Reg)) {
851            ++DI;
852            continue;
853          }
854  
855          LLVM_DEBUG(dbgs() << "MCP: Removing copy due to regmask clobbering: ";
856                     MaybeDead->dump());
857  
858          // Make sure we invalidate any entries in the copy maps before erasing
859          // the instruction.
860          Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
861  
862          // erase() will return the next valid iterator pointing to the next
863          // element after the erased one.
864          DI = MaybeDeadCopies.erase(DI);
865          MaybeDead->eraseFromParent();
866          Changed = true;
867          ++NumDeletes;
868        }
869      }
870  
871      // Any previous copy definition or reading the Defs is no longer available.
872      for (MCRegister Reg : Defs)
873        Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
874    }
875  
876    // If MBB doesn't have successors, delete the copies whose defs are not used.
877    // If MBB does have successors, then conservative assume the defs are live-out
878    // since we don't want to trust live-in lists.
879    if (MBB.succ_empty()) {
880      for (MachineInstr *MaybeDead : MaybeDeadCopies) {
881        LLVM_DEBUG(dbgs() << "MCP: Removing copy due to no live-out succ: ";
882                   MaybeDead->dump());
883  
884        std::optional<DestSourcePair> CopyOperands =
885            isCopyInstr(*MaybeDead, *TII, UseCopyInstr);
886        assert(CopyOperands);
887  
888        Register SrcReg = CopyOperands->Source->getReg();
889        Register DestReg = CopyOperands->Destination->getReg();
890        assert(!MRI->isReserved(DestReg));
891  
892        // Update matching debug values, if any.
893        SmallVector<MachineInstr *> MaybeDeadDbgUsers(
894            CopyDbgUsers[MaybeDead].begin(), CopyDbgUsers[MaybeDead].end());
895        MRI->updateDbgUsersToReg(DestReg.asMCReg(), SrcReg.asMCReg(),
896                                 MaybeDeadDbgUsers);
897  
898        MaybeDead->eraseFromParent();
899        Changed = true;
900        ++NumDeletes;
901      }
902    }
903  
904    MaybeDeadCopies.clear();
905    CopyDbgUsers.clear();
906    Tracker.clear();
907  }
908  
909  static bool isBackwardPropagatableCopy(const DestSourcePair &CopyOperands,
910                                         const MachineRegisterInfo &MRI,
911                                         const TargetInstrInfo &TII) {
912    Register Def = CopyOperands.Destination->getReg();
913    Register Src = CopyOperands.Source->getReg();
914  
915    if (!Def || !Src)
916      return false;
917  
918    if (MRI.isReserved(Def) || MRI.isReserved(Src))
919      return false;
920  
921    return CopyOperands.Source->isRenamable() && CopyOperands.Source->isKill();
922  }
923  
924  void MachineCopyPropagation::propagateDefs(MachineInstr &MI) {
925    if (!Tracker.hasAnyCopies())
926      return;
927  
928    for (unsigned OpIdx = 0, OpEnd = MI.getNumOperands(); OpIdx != OpEnd;
929         ++OpIdx) {
930      MachineOperand &MODef = MI.getOperand(OpIdx);
931  
932      if (!MODef.isReg() || MODef.isUse())
933        continue;
934  
935      // Ignore non-trivial cases.
936      if (MODef.isTied() || MODef.isUndef() || MODef.isImplicit())
937        continue;
938  
939      if (!MODef.getReg())
940        continue;
941  
942      // We only handle if the register comes from a vreg.
943      if (!MODef.isRenamable())
944        continue;
945  
946      MachineInstr *Copy = Tracker.findAvailBackwardCopy(
947          MI, MODef.getReg().asMCReg(), *TRI, *TII, UseCopyInstr);
948      if (!Copy)
949        continue;
950  
951      std::optional<DestSourcePair> CopyOperands =
952          isCopyInstr(*Copy, *TII, UseCopyInstr);
953      Register Def = CopyOperands->Destination->getReg();
954      Register Src = CopyOperands->Source->getReg();
955  
956      if (MODef.getReg() != Src)
957        continue;
958  
959      if (!isBackwardPropagatableRegClassCopy(*Copy, MI, OpIdx))
960        continue;
961  
962      if (hasImplicitOverlap(MI, MODef))
963        continue;
964  
965      if (hasOverlappingMultipleDef(MI, MODef, Def))
966        continue;
967  
968      LLVM_DEBUG(dbgs() << "MCP: Replacing " << printReg(MODef.getReg(), TRI)
969                        << "\n     with " << printReg(Def, TRI) << "\n     in "
970                        << MI << "     from " << *Copy);
971  
972      MODef.setReg(Def);
973      MODef.setIsRenamable(CopyOperands->Destination->isRenamable());
974  
975      LLVM_DEBUG(dbgs() << "MCP: After replacement: " << MI << "\n");
976      MaybeDeadCopies.insert(Copy);
977      Changed = true;
978      ++NumCopyBackwardPropagated;
979    }
980  }
981  
982  void MachineCopyPropagation::BackwardCopyPropagateBlock(
983      MachineBasicBlock &MBB) {
984    LLVM_DEBUG(dbgs() << "MCP: BackwardCopyPropagateBlock " << MBB.getName()
985                      << "\n");
986  
987    for (MachineInstr &MI : llvm::make_early_inc_range(llvm::reverse(MBB))) {
988      // Ignore non-trivial COPYs.
989      std::optional<DestSourcePair> CopyOperands =
990          isCopyInstr(MI, *TII, UseCopyInstr);
991      if (CopyOperands && MI.getNumOperands() == 2) {
992        Register DefReg = CopyOperands->Destination->getReg();
993        Register SrcReg = CopyOperands->Source->getReg();
994  
995        if (!TRI->regsOverlap(DefReg, SrcReg)) {
996          // Unlike forward cp, we don't invoke propagateDefs here,
997          // just let forward cp do COPY-to-COPY propagation.
998          if (isBackwardPropagatableCopy(*CopyOperands, *MRI, *TII)) {
999            Tracker.invalidateRegister(SrcReg.asMCReg(), *TRI, *TII,
1000                                       UseCopyInstr);
1001            Tracker.invalidateRegister(DefReg.asMCReg(), *TRI, *TII,
1002                                       UseCopyInstr);
1003            Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1004            continue;
1005          }
1006        }
1007      }
1008  
1009      // Invalidate any earlyclobber regs first.
1010      for (const MachineOperand &MO : MI.operands())
1011        if (MO.isReg() && MO.isEarlyClobber()) {
1012          MCRegister Reg = MO.getReg().asMCReg();
1013          if (!Reg)
1014            continue;
1015          Tracker.invalidateRegister(Reg, *TRI, *TII, UseCopyInstr);
1016        }
1017  
1018      propagateDefs(MI);
1019      for (const MachineOperand &MO : MI.operands()) {
1020        if (!MO.isReg())
1021          continue;
1022  
1023        if (!MO.getReg())
1024          continue;
1025  
1026        if (MO.isDef())
1027          Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1028                                     UseCopyInstr);
1029  
1030        if (MO.readsReg()) {
1031          if (MO.isDebug()) {
1032            //  Check if the register in the debug instruction is utilized
1033            // in a copy instruction, so we can update the debug info if the
1034            // register is changed.
1035            for (MCRegUnit Unit : TRI->regunits(MO.getReg().asMCReg())) {
1036              if (auto *Copy = Tracker.findCopyDefViaUnit(Unit, *TRI)) {
1037                CopyDbgUsers[Copy].insert(&MI);
1038              }
1039            }
1040          } else {
1041            Tracker.invalidateRegister(MO.getReg().asMCReg(), *TRI, *TII,
1042                                       UseCopyInstr);
1043          }
1044        }
1045      }
1046    }
1047  
1048    for (auto *Copy : MaybeDeadCopies) {
1049      std::optional<DestSourcePair> CopyOperands =
1050          isCopyInstr(*Copy, *TII, UseCopyInstr);
1051      Register Src = CopyOperands->Source->getReg();
1052      Register Def = CopyOperands->Destination->getReg();
1053      SmallVector<MachineInstr *> MaybeDeadDbgUsers(CopyDbgUsers[Copy].begin(),
1054                                                    CopyDbgUsers[Copy].end());
1055  
1056      MRI->updateDbgUsersToReg(Src.asMCReg(), Def.asMCReg(), MaybeDeadDbgUsers);
1057      Copy->eraseFromParent();
1058      ++NumDeletes;
1059    }
1060  
1061    MaybeDeadCopies.clear();
1062    CopyDbgUsers.clear();
1063    Tracker.clear();
1064  }
1065  
1066  static void LLVM_ATTRIBUTE_UNUSED printSpillReloadChain(
1067      DenseMap<MachineInstr *, SmallVector<MachineInstr *>> &SpillChain,
1068      DenseMap<MachineInstr *, SmallVector<MachineInstr *>> &ReloadChain,
1069      MachineInstr *Leader) {
1070    auto &SC = SpillChain[Leader];
1071    auto &RC = ReloadChain[Leader];
1072    for (auto I = SC.rbegin(), E = SC.rend(); I != E; ++I)
1073      (*I)->dump();
1074    for (MachineInstr *MI : RC)
1075      MI->dump();
1076  }
1077  
1078  // Remove spill-reload like copy chains. For example
1079  // r0 = COPY r1
1080  // r1 = COPY r2
1081  // r2 = COPY r3
1082  // r3 = COPY r4
1083  // <def-use r4>
1084  // r4 = COPY r3
1085  // r3 = COPY r2
1086  // r2 = COPY r1
1087  // r1 = COPY r0
1088  // will be folded into
1089  // r0 = COPY r1
1090  // r1 = COPY r4
1091  // <def-use r4>
1092  // r4 = COPY r1
1093  // r1 = COPY r0
1094  // TODO: Currently we don't track usage of r0 outside the chain, so we
1095  // conservatively keep its value as it was before the rewrite.
1096  //
1097  // The algorithm is trying to keep
1098  // property#1: No Def of spill COPY in the chain is used or defined until the
1099  // paired reload COPY in the chain uses the Def.
1100  //
1101  // property#2: NO Source of COPY in the chain is used or defined until the next
1102  // COPY in the chain defines the Source, except the innermost spill-reload
1103  // pair.
1104  //
1105  // The algorithm is conducted by checking every COPY inside the MBB, assuming
1106  // the COPY is a reload COPY, then try to find paired spill COPY by searching
1107  // the COPY defines the Src of the reload COPY backward. If such pair is found,
1108  // it either belongs to an existing chain or a new chain depends on
1109  // last available COPY uses the Def of the reload COPY.
1110  // Implementation notes, we use CopyTracker::findLastDefCopy(Reg, ...) to find
1111  // out last COPY that defines Reg; we use CopyTracker::findLastUseCopy(Reg, ...)
1112  // to find out last COPY that uses Reg. When we are encountered with a Non-COPY
1113  // instruction, we check registers in the operands of this instruction. If this
1114  // Reg is defined by a COPY, we untrack this Reg via
1115  // CopyTracker::clobberRegister(Reg, ...).
1116  void MachineCopyPropagation::EliminateSpillageCopies(MachineBasicBlock &MBB) {
1117    // ChainLeader maps MI inside a spill-reload chain to its innermost reload COPY.
1118    // Thus we can track if a MI belongs to an existing spill-reload chain.
1119    DenseMap<MachineInstr *, MachineInstr *> ChainLeader;
1120    // SpillChain maps innermost reload COPY of a spill-reload chain to a sequence
1121    // of COPYs that forms spills of a spill-reload chain.
1122    // ReloadChain maps innermost reload COPY of a spill-reload chain to a
1123    // sequence of COPYs that forms reloads of a spill-reload chain.
1124    DenseMap<MachineInstr *, SmallVector<MachineInstr *>> SpillChain, ReloadChain;
1125    // If a COPY's Source has use or def until next COPY defines the Source,
1126    // we put the COPY in this set to keep property#2.
1127    DenseSet<const MachineInstr *> CopySourceInvalid;
1128  
1129    auto TryFoldSpillageCopies =
1130        [&, this](const SmallVectorImpl<MachineInstr *> &SC,
1131                  const SmallVectorImpl<MachineInstr *> &RC) {
1132          assert(SC.size() == RC.size() && "Spill-reload should be paired");
1133  
1134          // We need at least 3 pairs of copies for the transformation to apply,
1135          // because the first outermost pair cannot be removed since we don't
1136          // recolor outside of the chain and that we need at least one temporary
1137          // spill slot to shorten the chain. If we only have a chain of two
1138          // pairs, we already have the shortest sequence this code can handle:
1139          // the outermost pair for the temporary spill slot, and the pair that
1140          // use that temporary spill slot for the other end of the chain.
1141          // TODO: We might be able to simplify to one spill-reload pair if collecting
1142          // more infomation about the outermost COPY.
1143          if (SC.size() <= 2)
1144            return;
1145  
1146          // If violate property#2, we don't fold the chain.
1147          for (const MachineInstr *Spill : make_range(SC.begin() + 1, SC.end()))
1148            if (CopySourceInvalid.count(Spill))
1149              return;
1150  
1151          for (const MachineInstr *Reload : make_range(RC.begin(), RC.end() - 1))
1152            if (CopySourceInvalid.count(Reload))
1153              return;
1154  
1155          auto CheckCopyConstraint = [this](Register Def, Register Src) {
1156            for (const TargetRegisterClass *RC : TRI->regclasses()) {
1157              if (RC->contains(Def) && RC->contains(Src))
1158                return true;
1159            }
1160            return false;
1161          };
1162  
1163          auto UpdateReg = [](MachineInstr *MI, const MachineOperand *Old,
1164                              const MachineOperand *New) {
1165            for (MachineOperand &MO : MI->operands()) {
1166              if (&MO == Old)
1167                MO.setReg(New->getReg());
1168            }
1169          };
1170  
1171          std::optional<DestSourcePair> InnerMostSpillCopy =
1172              isCopyInstr(*SC[0], *TII, UseCopyInstr);
1173          std::optional<DestSourcePair> OuterMostSpillCopy =
1174              isCopyInstr(*SC.back(), *TII, UseCopyInstr);
1175          std::optional<DestSourcePair> InnerMostReloadCopy =
1176              isCopyInstr(*RC[0], *TII, UseCopyInstr);
1177          std::optional<DestSourcePair> OuterMostReloadCopy =
1178              isCopyInstr(*RC.back(), *TII, UseCopyInstr);
1179          if (!CheckCopyConstraint(OuterMostSpillCopy->Source->getReg(),
1180                                   InnerMostSpillCopy->Source->getReg()) ||
1181              !CheckCopyConstraint(InnerMostReloadCopy->Destination->getReg(),
1182                                   OuterMostReloadCopy->Destination->getReg()))
1183            return;
1184  
1185          SpillageChainsLength += SC.size() + RC.size();
1186          NumSpillageChains += 1;
1187          UpdateReg(SC[0], InnerMostSpillCopy->Destination,
1188                    OuterMostSpillCopy->Source);
1189          UpdateReg(RC[0], InnerMostReloadCopy->Source,
1190                    OuterMostReloadCopy->Destination);
1191  
1192          for (size_t I = 1; I < SC.size() - 1; ++I) {
1193            SC[I]->eraseFromParent();
1194            RC[I]->eraseFromParent();
1195            NumDeletes += 2;
1196          }
1197        };
1198  
1199    auto IsFoldableCopy = [this](const MachineInstr &MaybeCopy) {
1200      if (MaybeCopy.getNumImplicitOperands() > 0)
1201        return false;
1202      std::optional<DestSourcePair> CopyOperands =
1203          isCopyInstr(MaybeCopy, *TII, UseCopyInstr);
1204      if (!CopyOperands)
1205        return false;
1206      Register Src = CopyOperands->Source->getReg();
1207      Register Def = CopyOperands->Destination->getReg();
1208      return Src && Def && !TRI->regsOverlap(Src, Def) &&
1209             CopyOperands->Source->isRenamable() &&
1210             CopyOperands->Destination->isRenamable();
1211    };
1212  
1213    auto IsSpillReloadPair = [&, this](const MachineInstr &Spill,
1214                                       const MachineInstr &Reload) {
1215      if (!IsFoldableCopy(Spill) || !IsFoldableCopy(Reload))
1216        return false;
1217      std::optional<DestSourcePair> SpillCopy =
1218          isCopyInstr(Spill, *TII, UseCopyInstr);
1219      std::optional<DestSourcePair> ReloadCopy =
1220          isCopyInstr(Reload, *TII, UseCopyInstr);
1221      if (!SpillCopy || !ReloadCopy)
1222        return false;
1223      return SpillCopy->Source->getReg() == ReloadCopy->Destination->getReg() &&
1224             SpillCopy->Destination->getReg() == ReloadCopy->Source->getReg();
1225    };
1226  
1227    auto IsChainedCopy = [&, this](const MachineInstr &Prev,
1228                                   const MachineInstr &Current) {
1229      if (!IsFoldableCopy(Prev) || !IsFoldableCopy(Current))
1230        return false;
1231      std::optional<DestSourcePair> PrevCopy =
1232          isCopyInstr(Prev, *TII, UseCopyInstr);
1233      std::optional<DestSourcePair> CurrentCopy =
1234          isCopyInstr(Current, *TII, UseCopyInstr);
1235      if (!PrevCopy || !CurrentCopy)
1236        return false;
1237      return PrevCopy->Source->getReg() == CurrentCopy->Destination->getReg();
1238    };
1239  
1240    for (MachineInstr &MI : llvm::make_early_inc_range(MBB)) {
1241      std::optional<DestSourcePair> CopyOperands =
1242          isCopyInstr(MI, *TII, UseCopyInstr);
1243  
1244      // Update track information via non-copy instruction.
1245      SmallSet<Register, 8> RegsToClobber;
1246      if (!CopyOperands) {
1247        for (const MachineOperand &MO : MI.operands()) {
1248          if (!MO.isReg())
1249            continue;
1250          Register Reg = MO.getReg();
1251          if (!Reg)
1252            continue;
1253          MachineInstr *LastUseCopy =
1254              Tracker.findLastSeenUseInCopy(Reg.asMCReg(), *TRI);
1255          if (LastUseCopy) {
1256            LLVM_DEBUG(dbgs() << "MCP: Copy source of\n");
1257            LLVM_DEBUG(LastUseCopy->dump());
1258            LLVM_DEBUG(dbgs() << "might be invalidated by\n");
1259            LLVM_DEBUG(MI.dump());
1260            CopySourceInvalid.insert(LastUseCopy);
1261          }
1262          // Must be noted Tracker.clobberRegister(Reg, ...) removes tracking of
1263          // Reg, i.e, COPY that defines Reg is removed from the mapping as well
1264          // as marking COPYs that uses Reg unavailable.
1265          // We don't invoke CopyTracker::clobberRegister(Reg, ...) if Reg is not
1266          // defined by a previous COPY, since we don't want to make COPYs uses
1267          // Reg unavailable.
1268          if (Tracker.findLastSeenDefInCopy(MI, Reg.asMCReg(), *TRI, *TII,
1269                                      UseCopyInstr))
1270            // Thus we can keep the property#1.
1271            RegsToClobber.insert(Reg);
1272        }
1273        for (Register Reg : RegsToClobber) {
1274          Tracker.clobberRegister(Reg, *TRI, *TII, UseCopyInstr);
1275          LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Reg, TRI)
1276                            << "\n");
1277        }
1278        continue;
1279      }
1280  
1281      Register Src = CopyOperands->Source->getReg();
1282      Register Def = CopyOperands->Destination->getReg();
1283      // Check if we can find a pair spill-reload copy.
1284      LLVM_DEBUG(dbgs() << "MCP: Searching paired spill for reload: ");
1285      LLVM_DEBUG(MI.dump());
1286      MachineInstr *MaybeSpill =
1287          Tracker.findLastSeenDefInCopy(MI, Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1288      bool MaybeSpillIsChained = ChainLeader.count(MaybeSpill);
1289      if (!MaybeSpillIsChained && MaybeSpill &&
1290          IsSpillReloadPair(*MaybeSpill, MI)) {
1291        // Check if we already have an existing chain. Now we have a
1292        // spill-reload pair.
1293        // L2: r2 = COPY r3
1294        // L5: r3 = COPY r2
1295        // Looking for a valid COPY before L5 which uses r3.
1296        // This can be serverial cases.
1297        // Case #1:
1298        // No COPY is found, which can be r3 is def-use between (L2, L5), we
1299        // create a new chain for L2 and L5.
1300        // Case #2:
1301        // L2: r2 = COPY r3
1302        // L5: r3 = COPY r2
1303        // Such COPY is found and is L2, we create a new chain for L2 and L5.
1304        // Case #3:
1305        // L2: r2 = COPY r3
1306        // L3: r1 = COPY r3
1307        // L5: r3 = COPY r2
1308        // we create a new chain for L2 and L5.
1309        // Case #4:
1310        // L2: r2 = COPY r3
1311        // L3: r1 = COPY r3
1312        // L4: r3 = COPY r1
1313        // L5: r3 = COPY r2
1314        // Such COPY won't be found since L4 defines r3. we create a new chain
1315        // for L2 and L5.
1316        // Case #5:
1317        // L2: r2 = COPY r3
1318        // L3: r3 = COPY r1
1319        // L4: r1 = COPY r3
1320        // L5: r3 = COPY r2
1321        // COPY is found and is L4 which belongs to an existing chain, we add
1322        // L2 and L5 to this chain.
1323        LLVM_DEBUG(dbgs() << "MCP: Found spill: ");
1324        LLVM_DEBUG(MaybeSpill->dump());
1325        MachineInstr *MaybePrevReload =
1326            Tracker.findLastSeenUseInCopy(Def.asMCReg(), *TRI);
1327        auto Leader = ChainLeader.find(MaybePrevReload);
1328        MachineInstr *L = nullptr;
1329        if (Leader == ChainLeader.end() ||
1330            (MaybePrevReload && !IsChainedCopy(*MaybePrevReload, MI))) {
1331          L = &MI;
1332          assert(!SpillChain.count(L) &&
1333                 "SpillChain should not have contained newly found chain");
1334        } else {
1335          assert(MaybePrevReload &&
1336                 "Found a valid leader through nullptr should not happend");
1337          L = Leader->second;
1338          assert(SpillChain[L].size() > 0 &&
1339                 "Existing chain's length should be larger than zero");
1340        }
1341        assert(!ChainLeader.count(&MI) && !ChainLeader.count(MaybeSpill) &&
1342               "Newly found paired spill-reload should not belong to any chain "
1343               "at this point");
1344        ChainLeader.insert({MaybeSpill, L});
1345        ChainLeader.insert({&MI, L});
1346        SpillChain[L].push_back(MaybeSpill);
1347        ReloadChain[L].push_back(&MI);
1348        LLVM_DEBUG(dbgs() << "MCP: Chain " << L << " now is:\n");
1349        LLVM_DEBUG(printSpillReloadChain(SpillChain, ReloadChain, L));
1350      } else if (MaybeSpill && !MaybeSpillIsChained) {
1351        // MaybeSpill is unable to pair with MI. That's to say adding MI makes
1352        // the chain invalid.
1353        // The COPY defines Src is no longer considered as a candidate of a
1354        // valid chain. Since we expect the Def of a spill copy isn't used by
1355        // any COPY instruction until a reload copy. For example:
1356        // L1: r1 = COPY r2
1357        // L2: r3 = COPY r1
1358        // If we later have
1359        // L1: r1 = COPY r2
1360        // L2: r3 = COPY r1
1361        // L3: r2 = COPY r1
1362        // L1 and L3 can't be a valid spill-reload pair.
1363        // Thus we keep the property#1.
1364        LLVM_DEBUG(dbgs() << "MCP: Not paired spill-reload:\n");
1365        LLVM_DEBUG(MaybeSpill->dump());
1366        LLVM_DEBUG(MI.dump());
1367        Tracker.clobberRegister(Src.asMCReg(), *TRI, *TII, UseCopyInstr);
1368        LLVM_DEBUG(dbgs() << "MCP: Removed tracking of " << printReg(Src, TRI)
1369                          << "\n");
1370      }
1371      Tracker.trackCopy(&MI, *TRI, *TII, UseCopyInstr);
1372    }
1373  
1374    for (auto I = SpillChain.begin(), E = SpillChain.end(); I != E; ++I) {
1375      auto &SC = I->second;
1376      assert(ReloadChain.count(I->first) &&
1377             "Reload chain of the same leader should exist");
1378      auto &RC = ReloadChain[I->first];
1379      TryFoldSpillageCopies(SC, RC);
1380    }
1381  
1382    MaybeDeadCopies.clear();
1383    CopyDbgUsers.clear();
1384    Tracker.clear();
1385  }
1386  
1387  bool MachineCopyPropagation::runOnMachineFunction(MachineFunction &MF) {
1388    if (skipFunction(MF.getFunction()))
1389      return false;
1390  
1391    bool isSpillageCopyElimEnabled = false;
1392    switch (EnableSpillageCopyElimination) {
1393    case cl::BOU_UNSET:
1394      isSpillageCopyElimEnabled =
1395          MF.getSubtarget().enableSpillageCopyElimination();
1396      break;
1397    case cl::BOU_TRUE:
1398      isSpillageCopyElimEnabled = true;
1399      break;
1400    case cl::BOU_FALSE:
1401      isSpillageCopyElimEnabled = false;
1402      break;
1403    }
1404  
1405    Changed = false;
1406  
1407    TRI = MF.getSubtarget().getRegisterInfo();
1408    TII = MF.getSubtarget().getInstrInfo();
1409    MRI = &MF.getRegInfo();
1410  
1411    for (MachineBasicBlock &MBB : MF) {
1412      if (isSpillageCopyElimEnabled)
1413        EliminateSpillageCopies(MBB);
1414      BackwardCopyPropagateBlock(MBB);
1415      ForwardCopyPropagateBlock(MBB);
1416    }
1417  
1418    return Changed;
1419  }
1420  
1421  MachineFunctionPass *
1422  llvm::createMachineCopyPropagationPass(bool UseCopyInstr = false) {
1423    return new MachineCopyPropagation(UseCopyInstr);
1424  }
1425