xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/SwiftErrorValueTracking.cpp (revision 911f0260390e18cf85f3dbf2c719b593efdc1e3c)
1 //===-- SwiftErrorValueTracking.cpp --------------------------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This implements a limited mem2reg-like analysis to promote uses of function
10 // arguments and allocas marked with swiftalloc from memory into virtual
11 // registers tracked by this class.
12 //
13 //===----------------------------------------------------------------------===//
14 
15 #include "llvm/CodeGen/SwiftErrorValueTracking.h"
16 #include "llvm/ADT/PostOrderIterator.h"
17 #include "llvm/ADT/SmallSet.h"
18 #include "llvm/CodeGen/MachineInstrBuilder.h"
19 #include "llvm/CodeGen/MachineRegisterInfo.h"
20 #include "llvm/CodeGen/TargetInstrInfo.h"
21 #include "llvm/CodeGen/TargetLowering.h"
22 #include "llvm/IR/Value.h"
23 
24 using namespace llvm;
25 
26 Register SwiftErrorValueTracking::getOrCreateVReg(const MachineBasicBlock *MBB,
27                                                   const Value *Val) {
28   auto Key = std::make_pair(MBB, Val);
29   auto It = VRegDefMap.find(Key);
30   // If this is the first use of this swifterror value in this basic block,
31   // create a new virtual register.
32   // After we processed all basic blocks we will satisfy this "upwards exposed
33   // use" by inserting a copy or phi at the beginning of this block.
34   if (It == VRegDefMap.end()) {
35     auto &DL = MF->getDataLayout();
36     const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
37     auto VReg = MF->getRegInfo().createVirtualRegister(RC);
38     VRegDefMap[Key] = VReg;
39     VRegUpwardsUse[Key] = VReg;
40     return VReg;
41   } else
42     return It->second;
43 }
44 
45 void SwiftErrorValueTracking::setCurrentVReg(const MachineBasicBlock *MBB,
46                                              const Value *Val, Register VReg) {
47   VRegDefMap[std::make_pair(MBB, Val)] = VReg;
48 }
49 
50 Register SwiftErrorValueTracking::getOrCreateVRegDefAt(
51     const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
52   auto Key = PointerIntPair<const Instruction *, 1, bool>(I, true);
53   auto It = VRegDefUses.find(Key);
54   if (It != VRegDefUses.end())
55     return It->second;
56 
57   auto &DL = MF->getDataLayout();
58   const TargetRegisterClass *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
59   Register VReg = MF->getRegInfo().createVirtualRegister(RC);
60   VRegDefUses[Key] = VReg;
61   setCurrentVReg(MBB, Val, VReg);
62   return VReg;
63 }
64 
65 Register SwiftErrorValueTracking::getOrCreateVRegUseAt(
66     const Instruction *I, const MachineBasicBlock *MBB, const Value *Val) {
67   auto Key = PointerIntPair<const Instruction *, 1, bool>(I, false);
68   auto It = VRegDefUses.find(Key);
69   if (It != VRegDefUses.end())
70     return It->second;
71 
72   Register VReg = getOrCreateVReg(MBB, Val);
73   VRegDefUses[Key] = VReg;
74   return VReg;
75 }
76 
77 /// Set up SwiftErrorVals by going through the function. If the function has
78 /// swifterror argument, it will be the first entry.
79 void SwiftErrorValueTracking::setFunction(MachineFunction &mf) {
80   MF = &mf;
81   Fn = &MF->getFunction();
82   TLI = MF->getSubtarget().getTargetLowering();
83   TII = MF->getSubtarget().getInstrInfo();
84 
85   if (!TLI->supportSwiftError())
86     return;
87 
88   SwiftErrorVals.clear();
89   VRegDefMap.clear();
90   VRegUpwardsUse.clear();
91   VRegDefUses.clear();
92   SwiftErrorArg = nullptr;
93 
94   // Check if function has a swifterror argument.
95   bool HaveSeenSwiftErrorArg = false;
96   for (Function::const_arg_iterator AI = Fn->arg_begin(), AE = Fn->arg_end();
97        AI != AE; ++AI)
98     if (AI->hasSwiftErrorAttr()) {
99       assert(!HaveSeenSwiftErrorArg &&
100              "Must have only one swifterror parameter");
101       (void)HaveSeenSwiftErrorArg; // silence warning.
102       HaveSeenSwiftErrorArg = true;
103       SwiftErrorArg = &*AI;
104       SwiftErrorVals.push_back(&*AI);
105     }
106 
107   for (const auto &LLVMBB : *Fn)
108     for (const auto &Inst : LLVMBB) {
109       if (const AllocaInst *Alloca = dyn_cast<AllocaInst>(&Inst))
110         if (Alloca->isSwiftError())
111           SwiftErrorVals.push_back(Alloca);
112     }
113 }
114 
115 bool SwiftErrorValueTracking::createEntriesInEntryBlock(DebugLoc DbgLoc) {
116   if (!TLI->supportSwiftError())
117     return false;
118 
119   // We only need to do this when we have swifterror parameter or swifterror
120   // alloc.
121   if (SwiftErrorVals.empty())
122     return false;
123 
124   MachineBasicBlock *MBB = &*MF->begin();
125   auto &DL = MF->getDataLayout();
126   auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
127   bool Inserted = false;
128   for (const auto *SwiftErrorVal : SwiftErrorVals) {
129     // We will always generate a copy from the argument. It is always used at
130     // least by the 'return' of the swifterror.
131     if (SwiftErrorArg && SwiftErrorArg == SwiftErrorVal)
132       continue;
133     Register VReg = MF->getRegInfo().createVirtualRegister(RC);
134     // Assign Undef to Vreg. We construct MI directly to make sure it works
135     // with FastISel.
136     BuildMI(*MBB, MBB->getFirstNonPHI(), DbgLoc,
137             TII->get(TargetOpcode::IMPLICIT_DEF), VReg);
138 
139     setCurrentVReg(MBB, SwiftErrorVal, VReg);
140     Inserted = true;
141   }
142 
143   return Inserted;
144 }
145 
146 /// Propagate swifterror values through the machine function CFG.
147 void SwiftErrorValueTracking::propagateVRegs() {
148   if (!TLI->supportSwiftError())
149     return;
150 
151   // We only need to do this when we have swifterror parameter or swifterror
152   // alloc.
153   if (SwiftErrorVals.empty())
154     return;
155 
156   // For each machine basic block in reverse post order.
157   ReversePostOrderTraversal<MachineFunction *> RPOT(MF);
158   for (MachineBasicBlock *MBB : RPOT) {
159     // For each swifterror value in the function.
160     for (const auto *SwiftErrorVal : SwiftErrorVals) {
161       auto Key = std::make_pair(MBB, SwiftErrorVal);
162       auto UUseIt = VRegUpwardsUse.find(Key);
163       auto VRegDefIt = VRegDefMap.find(Key);
164       bool UpwardsUse = UUseIt != VRegUpwardsUse.end();
165       Register UUseVReg = UpwardsUse ? UUseIt->second : Register();
166       bool DownwardDef = VRegDefIt != VRegDefMap.end();
167       assert(!(UpwardsUse && !DownwardDef) &&
168              "We can't have an upwards use but no downwards def");
169 
170       // If there is no upwards exposed use and an entry for the swifterror in
171       // the def map for this value we don't need to do anything: We already
172       // have a downward def for this basic block.
173       if (!UpwardsUse && DownwardDef)
174         continue;
175 
176       // Otherwise we either have an upwards exposed use vreg that we need to
177       // materialize or need to forward the downward def from predecessors.
178 
179       // Check whether we have a single vreg def from all predecessors.
180       // Otherwise we need a phi.
181       SmallVector<std::pair<MachineBasicBlock *, Register>, 4> VRegs;
182       SmallSet<const MachineBasicBlock *, 8> Visited;
183       for (auto *Pred : MBB->predecessors()) {
184         if (!Visited.insert(Pred).second)
185           continue;
186         VRegs.push_back(std::make_pair(
187             Pred, getOrCreateVReg(Pred, SwiftErrorVal)));
188         if (Pred != MBB)
189           continue;
190         // We have a self-edge.
191         // If there was no upwards use in this basic block there is now one: the
192         // phi needs to use it self.
193         if (!UpwardsUse) {
194           UpwardsUse = true;
195           UUseIt = VRegUpwardsUse.find(Key);
196           assert(UUseIt != VRegUpwardsUse.end());
197           UUseVReg = UUseIt->second;
198         }
199       }
200 
201       // We need a phi node if we have more than one predecessor with different
202       // downward defs.
203       bool needPHI =
204           VRegs.size() >= 1 &&
205           llvm::any_of(
206               VRegs,
207               [&](const std::pair<const MachineBasicBlock *, Register> &V)
208                   -> bool { return V.second != VRegs[0].second; });
209 
210       // If there is no upwards exposed used and we don't need a phi just
211       // forward the swifterror vreg from the predecessor(s).
212       if (!UpwardsUse && !needPHI) {
213         assert(!VRegs.empty() &&
214                "No predecessors? The entry block should bail out earlier");
215         // Just forward the swifterror vreg from the predecessor(s).
216         setCurrentVReg(MBB, SwiftErrorVal, VRegs[0].second);
217         continue;
218       }
219 
220       auto DLoc = isa<Instruction>(SwiftErrorVal)
221                       ? cast<Instruction>(SwiftErrorVal)->getDebugLoc()
222                       : DebugLoc();
223       const auto *TII = MF->getSubtarget().getInstrInfo();
224 
225       // If we don't need a phi create a copy to the upward exposed vreg.
226       if (!needPHI) {
227         assert(UpwardsUse);
228         assert(!VRegs.empty() &&
229                "No predecessors?  Is the Calling Convention correct?");
230         Register DestReg = UUseVReg;
231         BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc, TII->get(TargetOpcode::COPY),
232                 DestReg)
233             .addReg(VRegs[0].second);
234         continue;
235       }
236 
237       // We need a phi: if there is an upwards exposed use we already have a
238       // destination virtual register number otherwise we generate a new one.
239       auto &DL = MF->getDataLayout();
240       auto const *RC = TLI->getRegClassFor(TLI->getPointerTy(DL));
241       Register PHIVReg =
242           UpwardsUse ? UUseVReg : MF->getRegInfo().createVirtualRegister(RC);
243       MachineInstrBuilder PHI =
244           BuildMI(*MBB, MBB->getFirstNonPHI(), DLoc,
245                   TII->get(TargetOpcode::PHI), PHIVReg);
246       for (auto BBRegPair : VRegs) {
247         PHI.addReg(BBRegPair.second).addMBB(BBRegPair.first);
248       }
249 
250       // We did not have a definition in this block before: store the phi's vreg
251       // as this block downward exposed def.
252       if (!UpwardsUse)
253         setCurrentVReg(MBB, SwiftErrorVal, PHIVReg);
254     }
255   }
256 }
257 
258 void SwiftErrorValueTracking::preassignVRegs(
259     MachineBasicBlock *MBB, BasicBlock::const_iterator Begin,
260     BasicBlock::const_iterator End) {
261   if (!TLI->supportSwiftError() || SwiftErrorVals.empty())
262     return;
263 
264   // Iterator over instructions and assign vregs to swifterror defs and uses.
265   for (auto It = Begin; It != End; ++It) {
266     if (auto *CB = dyn_cast<CallBase>(&*It)) {
267       // A call-site with a swifterror argument is both use and def.
268       const Value *SwiftErrorAddr = nullptr;
269       for (const auto &Arg : CB->args()) {
270         if (!Arg->isSwiftError())
271           continue;
272         // Use of swifterror.
273         assert(!SwiftErrorAddr && "Cannot have multiple swifterror arguments");
274         SwiftErrorAddr = &*Arg;
275         assert(SwiftErrorAddr->isSwiftError() &&
276                "Must have a swifterror value argument");
277         getOrCreateVRegUseAt(&*It, MBB, SwiftErrorAddr);
278       }
279       if (!SwiftErrorAddr)
280         continue;
281 
282       // Def of swifterror.
283       getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
284 
285       // A load is a use.
286     } else if (const LoadInst *LI = dyn_cast<const LoadInst>(&*It)) {
287       const Value *V = LI->getOperand(0);
288       if (!V->isSwiftError())
289         continue;
290 
291       getOrCreateVRegUseAt(LI, MBB, V);
292 
293       // A store is a def.
294     } else if (const StoreInst *SI = dyn_cast<const StoreInst>(&*It)) {
295       const Value *SwiftErrorAddr = SI->getOperand(1);
296       if (!SwiftErrorAddr->isSwiftError())
297         continue;
298 
299       // Def of swifterror.
300       getOrCreateVRegDefAt(&*It, MBB, SwiftErrorAddr);
301 
302       // A return in a swiferror returning function is a use.
303     } else if (const ReturnInst *R = dyn_cast<const ReturnInst>(&*It)) {
304       const Function *F = R->getParent()->getParent();
305       if (!F->getAttributes().hasAttrSomewhere(Attribute::SwiftError))
306         continue;
307 
308       getOrCreateVRegUseAt(R, MBB, SwiftErrorArg);
309     }
310   }
311 }
312