xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/SwiftErrorValueTracking.cpp (revision 0c47338023d44ff130cf69465bd1b2e75ff0bb39)
1  //===-- SwiftErrorValueTracking.cpp --------------------------------------===//
2  //
3  // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4  // See https://llvm.org/LICENSE.txt for license information.
5  // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6  //
7  //===----------------------------------------------------------------------===//
8  //
9  // This implements a limited mem2reg-like analysis to promote uses of function
10  // arguments and allocas marked with swiftalloc from memory into virtual
11  // registers tracked by this class.
12  //
13  //===----------------------------------------------------------------------===//
14  
15  #include "llvm/CodeGen/SwiftErrorValueTracking.h"
16  #include "llvm/ADT/PostOrderIterator.h"
17  #include "llvm/ADT/SmallSet.h"
18  #include "llvm/CodeGen/MachineInstrBuilder.h"
19  #include "llvm/CodeGen/MachineRegisterInfo.h"
20  #include "llvm/CodeGen/TargetInstrInfo.h"
21  #include "llvm/CodeGen/TargetLowering.h"
22  #include "llvm/IR/Value.h"
23  
24  using namespace llvm;
25  
26  Register SwiftErrorValueTracking::getOrCreateVReg(const MachineBasicBlock *MBB,
27                                                    const Value *Val) {
28    auto Key = std::make_pair(MBB, Val);
29    auto It = VRegDefMap.find(Key);
30    // If this is the first use of this swifterror value in this basic block,
31    // create a new virtual register.
32    // After we processed all basic blocks we will satisfy this "upwards exposed
33    // use" by inserting a copy or phi at the beginning of this block.
34    if (It == VRegDefMap.end()) {
35      auto &DL = MF->getDataLayout();
36      const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
37      auto VReg = MF->getRegInfo().createVirtualRegister(RC);
38      VRegDefMap[Key] = VReg;
39      VRegUpwardsUse[Key] = VReg;
40      return VReg;
41    } else
42      return It->second;
43  }
44  
45  void SwiftErrorValueTracking::setCurrentVReg(const MachineBasicBlock *MBB,
46                                               const Value *Val, Register VReg) {
47    VRegDefMap[std::make_pair(MBB, Val)] = VReg;
48  }
49  
50  Register SwiftErrorValueTracking::getOrCreateVRegDefAt(
51      const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
52    auto Key = PointerIntPair<const Instruction *, 1, bool>(I, true);
53    auto It = VRegDefUses.find(Key);
54    if (It != VRegDefUses.end())
55      return It->second;
56  
57    auto &DL = MF->getDataLayout();
58    const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
59    Register VReg = MF->getRegInfo().createVirtualRegister(RC);
60    VRegDefUses[Key] = VReg;
61    setCurrentVReg(MBB, Val, VReg);
62    return VReg;
63  }
64  
65  Register SwiftErrorValueTracking::getOrCreateVRegUseAt(
66      const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
67    auto Key = PointerIntPair<const Instruction *, 1, bool>(I, false);
68    auto It = VRegDefUses.find(Key);
69    if (It != VRegDefUses.end())
70      return It->second;
71  
72    Register VReg = getOrCreateVReg(MBB, Val);
73    VRegDefUses[Key] = VReg;
74    return VReg;
75  }
76  
77  /// Set up SwiftErrorVals by going through the function. If the function has
78  /// swifterror argument, it will be the first entry.
79  void SwiftErrorValueTracking::setFunction(MachineFunction &mf) {
80    MF = &mf;
81    Fn = &MF->getFunction();
82    TLI = MF->getSubtarget().getTargetLowering();
83    TII = MF->getSubtarget().getInstrInfo();
84  
85    if (!TLI->supportSwiftError())
86      return;
87  
88    SwiftErrorVals.clear();
89    VRegDefMap.clear();
90    VRegUpwardsUse.clear();
91    VRegDefUses.clear();
92    SwiftErrorArg = nullptr;
93  
94    // Check if function has a swifterror argument.
95    bool HaveSeenSwiftErrorArg = false;
96    for (Function::const_arg_iterator AI = Fn->arg_begin(), AE = Fn->arg_end();
97         AI != AE; ++AI)
98      if (AI->hasSwiftErrorAttr()) {
99        assert(!HaveSeenSwiftErrorArg &&
100               "Must have only one swifterror parameter");
101        (void)HaveSeenSwiftErrorArg; // silence warning.
102        HaveSeenSwiftErrorArg = true;
103        SwiftErrorArg = &*AI;
104        SwiftErrorVals.push_back(&*AI);
105      }
106  
107    for (const auto &LLVMBB : *Fn)
108      for (const auto &Inst : LLVMBB) {
109        if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(&Inst))
110          if (Alloca->isSwiftError())
111            SwiftErrorVals.push_back(Alloca);
112      }
113  }
114  
115  bool SwiftErrorValueTracking::createEntriesInEntryBlock(DebugLoc DbgLoc) {
116    if (!TLI->supportSwiftError())
117      return false;
118  
119    // We only need to do this when we have swifterror parameter or swifterror
120    // alloc.
121    if (SwiftErrorVals.empty())
122      return false;
123  
124    MachineBasicBlock *MBB = &*MF->begin();
125    auto &DL = MF->getDataLayout();
126    auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
127    bool Inserted = false;
128    for (const auto *SwiftErrorVal : SwiftErrorVals) {
129      // We will always generate a copy from the argument. It is always used at
130      // least by the 'return' of the swifterror.
131      if (SwiftErrorArg && SwiftErrorArg == SwiftErrorVal)
132        continue;
133      Register VReg = MF->getRegInfo().createVirtualRegister(RC);
134      // Assign Undef to Vreg. We construct MI directly to make sure it works
135      // with FastISel.
136      BuildMI(*MBB, MBB->getFirstNonPHI(), DbgLoc,
137              TII->get(TargetOpcode::IMPLICIT_DEF), VReg);
138  
139      setCurrentVReg(MBB, SwiftErrorVal, VReg);
140      Inserted = true;
141    }
142  
143    return Inserted;
144  }
145  
146  /// Propagate swifterror values through the machine function CFG.
147  void SwiftErrorValueTracking::propagateVRegs() {
148    if (!TLI->supportSwiftError())
149      return;
150  
151    // We only need to do this when we have swifterror parameter or swifterror
152    // alloc.
153    if (SwiftErrorVals.empty())
154      return;
155  
156    // For each machine basic block in reverse post order.
157    ReversePostOrderTraversal<MachineFunction *> RPOT(MF);
158    for (MachineBasicBlock *MBB : RPOT) {
159      // For each swifterror value in the function.
160      for (const auto *SwiftErrorVal : SwiftErrorVals) {
161        auto Key = std::make_pair(MBB, SwiftErrorVal);
162        auto UUseIt = VRegUpwardsUse.find(Key);
163        auto VRegDefIt = VRegDefMap.find(Key);
164        bool UpwardsUse = UUseIt != VRegUpwardsUse.end();
165        Register UUseVReg = UpwardsUse ? UUseIt->second : Register();
166        bool DownwardDef = VRegDefIt != VRegDefMap.end();
167        assert(!(UpwardsUse && !DownwardDef) &&
168               "We can't have an upwards use but no downwards def");
169  
170        // If there is no upwards exposed use and an entry for the swifterror in
171        // the def map for this value we don't need to do anything: We already
172        // have a downward def for this basic block.
173        if (!UpwardsUse && DownwardDef)
174          continue;
175  
176        // Otherwise we either have an upwards exposed use vreg that we need to
177        // materialize or need to forward the downward def from predecessors.
178  
179        // Check whether we have a single vreg def from all predecessors.
180        // Otherwise we need a phi.
181        SmallVector<std::pair<MachineBasicBlock *, Register>, 4> VRegs;
182        SmallSet<const MachineBasicBlock *, 8> Visited;
183        for (auto *Pred : MBB->predecessors()) {
184          if (!Visited.insert(Pred).second)
185            continue;
186          VRegs.push_back(std::make_pair(
187              Pred, getOrCreateVReg(Pred, SwiftErrorVal)));
188          if (Pred != MBB)
189            continue;
190          // We have a self-edge.
191          // If there was no upwards use in this basic block there is now one: the
192          // phi needs to use it self.
193          if (!UpwardsUse) {
194            UpwardsUse = true;
195            UUseIt = VRegUpwardsUse.find(Key);
196            assert(UUseIt != VRegUpwardsUse.end());
197            UUseVReg = UUseIt->second;
198          }
199        }
200  
201        // We need a phi node if we have more than one predecessor with different
202        // downward defs.
203        bool needPHI =
204            VRegs.size() >= 1 &&
205            llvm::find_if(
206                VRegs,
207                [&](const std::pair<const MachineBasicBlock *, Register> &V)
208                    -> bool { return V.second != VRegs[0].second; }) !=
209                VRegs.end();
210  
211        // If there is no upwards exposed used and we don't need a phi just
212        // forward the swifterror vreg from the predecessor(s).
213        if (!UpwardsUse && !needPHI) {
214          assert(!VRegs.empty() &&
215                 "No predecessors? The entry block should bail out earlier");
216          // Just forward the swifterror vreg from the predecessor(s).
217          setCurrentVReg(MBB, SwiftErrorVal, VRegs[0].second);
218          continue;
219        }
220  
221        auto DLoc = isa<Instruction>(SwiftErrorVal)
222                        ? cast<Instruction>(SwiftErrorVal)->getDebugLoc()
223                        : DebugLoc();
224        const auto *TII = MF->getSubtarget().getInstrInfo();
225  
226        // If we don't need a phi create a copy to the upward exposed vreg.
227        if (!needPHI) {
228          assert(UpwardsUse);
229          assert(!VRegs.empty() &&
230                 "No predecessors?  Is the Calling Convention correct?");
231          Register DestReg = UUseVReg;
232          BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc, TII->get(TargetOpcode::COPY),
233                  DestReg)
234              .addReg(VRegs[0].second);
235          continue;
236        }
237  
238        // We need a phi: if there is an upwards exposed use we already have a
239        // destination virtual register number otherwise we generate a new one.
240        auto &DL = MF->getDataLayout();
241        auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
242        Register PHIVReg =
243            UpwardsUse ? UUseVReg : MF->getRegInfo().createVirtualRegister(RC);
244        MachineInstrBuilder PHI =
245            BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc,
246                    TII->get(TargetOpcode::PHI), PHIVReg);
247        for (auto BBRegPair : VRegs) {
248          PHI.addReg(BBRegPair.second).addMBB(BBRegPair.first);
249        }
250  
251        // We did not have a definition in this block before: store the phi's vreg
252        // as this block downward exposed def.
253        if (!UpwardsUse)
254          setCurrentVReg(MBB, SwiftErrorVal, PHIVReg);
255      }
256    }
257  }
258  
259  void SwiftErrorValueTracking::preassignVRegs(
260      MachineBasicBlock *MBB, BasicBlock::const_iterator Begin,
261      BasicBlock::const_iterator End) {
262    if (!TLI->supportSwiftError() || SwiftErrorVals.empty())
263      return;
264  
265    // Iterator over instructions and assign vregs to swifterror defs and uses.
266    for (auto It = Begin; It != End; ++It) {
267      if (auto *CB = dyn_cast<CallBase>(&*It)) {
268        // A call-site with a swifterror argument is both use and def.
269        const Value *SwiftErrorAddr = nullptr;
270        for (auto &Arg : CB->args()) {
271          if (!Arg->isSwiftError())
272            continue;
273          // Use of swifterror.
274          assert(!SwiftErrorAddr && "Cannot have multiple swifterror arguments");
275          SwiftErrorAddr = &*Arg;
276          assert(SwiftErrorAddr->isSwiftError() &&
277                 "Must have a swifterror value argument");
278          getOrCreateVRegUseAt(&*It, MBB, SwiftErrorAddr);
279        }
280        if (!SwiftErrorAddr)
281          continue;
282  
283        // Def of swifterror.
284        getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
285  
286        // A load is a use.
287      } else if (const LoadInst *LI = dyn_cast<const LoadInst>(&*It)) {
288        const Value *V = LI->getOperand(0);
289        if (!V->isSwiftError())
290          continue;
291  
292        getOrCreateVRegUseAt(LI, MBB, V);
293  
294        // A store is a def.
295      } else if (const StoreInst *SI = dyn_cast<const StoreInst>(&*It)) {
296        const Value *SwiftErrorAddr = SI->getOperand(1);
297        if (!SwiftErrorAddr->isSwiftError())
298          continue;
299  
300        // Def of swifterror.
301        getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
302  
303        // A return in a swiferror returning function is a use.
304      } else if (const ReturnInst *R = dyn_cast<const ReturnInst>(&*It)) {
305        const Function *F = R->getParent()->getParent();
306        if (!F->getAttributes().hasAttrSomewhere(Attribute::SwiftError))
307          continue;
308  
309        getOrCreateVRegUseAt(R, MBB, SwiftErrorArg);
310      }
311    }
312  }
313