xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/GCRootLowering.cpp (revision d5b0e70f7e04d971691517ce1304d86a1e367e2e)
1 //===-- GCRootLowering.cpp - Garbage collection infrastructure ------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the lowering for the gc.root mechanism.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "llvm/CodeGen/GCMetadata.h"
14 #include "llvm/CodeGen/MachineFrameInfo.h"
15 #include "llvm/CodeGen/MachineFunctionPass.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/MachineModuleInfo.h"
18 #include "llvm/CodeGen/Passes.h"
19 #include "llvm/CodeGen/TargetFrameLowering.h"
20 #include "llvm/CodeGen/TargetInstrInfo.h"
21 #include "llvm/CodeGen/TargetRegisterInfo.h"
22 #include "llvm/CodeGen/TargetSubtargetInfo.h"
23 #include "llvm/IR/Dominators.h"
24 #include "llvm/IR/IntrinsicInst.h"
25 #include "llvm/IR/Module.h"
26 #include "llvm/InitializePasses.h"
27 #include "llvm/Support/Debug.h"
28 #include "llvm/Support/ErrorHandling.h"
29 #include "llvm/Support/raw_ostream.h"
30 
31 using namespace llvm;
32 
33 namespace {
34 
35 /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or
36 /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as
37 /// directed by the GCStrategy. It also performs automatic root initialization
38 /// and custom intrinsic lowering.
39 class LowerIntrinsics : public FunctionPass {
40   bool DoLowering(Function &F, GCStrategy &S);
41 
42 public:
43   static char ID;
44 
45   LowerIntrinsics();
46   StringRef getPassName() const override;
47   void getAnalysisUsage(AnalysisUsage &AU) const override;
48 
49   bool doInitialization(Module &M) override;
50   bool runOnFunction(Function &F) override;
51 };
52 
53 /// GCMachineCodeAnalysis - This is a target-independent pass over the machine
54 /// function representation to identify safe points for the garbage collector
55 /// in the machine code. It inserts labels at safe points and populates a
56 /// GCMetadata record for each function.
57 class GCMachineCodeAnalysis : public MachineFunctionPass {
58   GCFunctionInfo *FI;
59   const TargetInstrInfo *TII;
60 
61   void FindSafePoints(MachineFunction &MF);
62   void VisitCallPoint(MachineBasicBlock::iterator CI);
63   MCSymbol *InsertLabel(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI,
64                         const DebugLoc &DL) const;
65 
66   void FindStackOffsets(MachineFunction &MF);
67 
68 public:
69   static char ID;
70 
71   GCMachineCodeAnalysis();
72   void getAnalysisUsage(AnalysisUsage &AU) const override;
73 
74   bool runOnMachineFunction(MachineFunction &MF) override;
75 };
76 }
77 
78 // -----------------------------------------------------------------------------
79 
80 INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering", false,
81                       false)
82 INITIALIZE_PASS_DEPENDENCY(GCModuleInfo)
83 INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false)
84 
85 FunctionPass *llvm::createGCLoweringPass() { return new LowerIntrinsics(); }
86 
87 char LowerIntrinsics::ID = 0;
88 char &llvm::GCLoweringID = LowerIntrinsics::ID;
89 
90 LowerIntrinsics::LowerIntrinsics() : FunctionPass(ID) {
91   initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry());
92 }
93 
94 StringRef LowerIntrinsics::getPassName() const {
95   return "Lower Garbage Collection Instructions";
96 }
97 
98 void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const {
99   FunctionPass::getAnalysisUsage(AU);
100   AU.addRequired<GCModuleInfo>();
101   AU.addPreserved<DominatorTreeWrapperPass>();
102 }
103 
104 /// doInitialization - If this module uses the GC intrinsics, find them now.
105 bool LowerIntrinsics::doInitialization(Module &M) {
106   GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>();
107   assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?");
108   for (Function &F : M)
109     if (!F.isDeclaration() && F.hasGC())
110       MI->getFunctionInfo(F); // Instantiate the GC strategy.
111 
112   return false;
113 }
114 
115 /// CouldBecomeSafePoint - Predicate to conservatively determine whether the
116 /// instruction could introduce a safe point.
117 static bool CouldBecomeSafePoint(Instruction *I) {
118   // The natural definition of instructions which could introduce safe points
119   // are:
120   //
121   //   - call, invoke (AfterCall, BeforeCall)
122   //   - phis (Loops)
123   //   - invoke, ret, unwind (Exit)
124   //
125   // However, instructions as seemingly inoccuous as arithmetic can become
126   // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead
127   // it is necessary to take a conservative approach.
128 
129   if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || isa<StoreInst>(I) ||
130       isa<LoadInst>(I))
131     return false;
132 
133   // llvm.gcroot is safe because it doesn't do anything at runtime.
134   if (CallInst *CI = dyn_cast<CallInst>(I))
135     if (Function *F = CI->getCalledFunction())
136       if (Intrinsic::ID IID = F->getIntrinsicID())
137         if (IID == Intrinsic::gcroot)
138           return false;
139 
140   return true;
141 }
142 
143 static bool InsertRootInitializers(Function &F, ArrayRef<AllocaInst *> Roots) {
144   // Scroll past alloca instructions.
145   BasicBlock::iterator IP = F.getEntryBlock().begin();
146   while (isa<AllocaInst>(IP))
147     ++IP;
148 
149   // Search for initializers in the initial BB.
150   SmallPtrSet<AllocaInst *, 16> InitedRoots;
151   for (; !CouldBecomeSafePoint(&*IP); ++IP)
152     if (StoreInst *SI = dyn_cast<StoreInst>(IP))
153       if (AllocaInst *AI =
154               dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts()))
155         InitedRoots.insert(AI);
156 
157   // Add root initializers.
158   bool MadeChange = false;
159 
160   for (AllocaInst *Root : Roots)
161     if (!InitedRoots.count(Root)) {
162       new StoreInst(
163           ConstantPointerNull::get(cast<PointerType>(Root->getAllocatedType())),
164           Root, Root->getNextNode());
165       MadeChange = true;
166     }
167 
168   return MadeChange;
169 }
170 
171 /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores.
172 /// Leave gcroot intrinsics; the code generator needs to see those.
173 bool LowerIntrinsics::runOnFunction(Function &F) {
174   // Quick exit for functions that do not use GC.
175   if (!F.hasGC())
176     return false;
177 
178   GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F);
179   GCStrategy &S = FI.getStrategy();
180 
181   return DoLowering(F, S);
182 }
183 
184 /// Lower barriers out of existance (if the associated GCStrategy hasn't
185 /// already done so...), and insert initializing stores to roots as a defensive
186 /// measure.  Given we're going to report all roots live at all safepoints, we
187 /// need to be able to ensure each root has been initialized by the point the
188 /// first safepoint is reached.  This really should have been done by the
189 /// frontend, but the old API made this non-obvious, so we do a potentially
190 /// redundant store just in case.
191 bool LowerIntrinsics::DoLowering(Function &F, GCStrategy &S) {
192   SmallVector<AllocaInst *, 32> Roots;
193 
194   bool MadeChange = false;
195   for (BasicBlock &BB : F)
196     for (Instruction &I : llvm::make_early_inc_range(BB)) {
197       IntrinsicInst *CI = dyn_cast<IntrinsicInst>(&I);
198       if (!CI)
199         continue;
200 
201       Function *F = CI->getCalledFunction();
202       switch (F->getIntrinsicID()) {
203       default: break;
204       case Intrinsic::gcwrite: {
205         // Replace a write barrier with a simple store.
206         Value *St = new StoreInst(CI->getArgOperand(0),
207                                   CI->getArgOperand(2), CI);
208         CI->replaceAllUsesWith(St);
209         CI->eraseFromParent();
210         MadeChange = true;
211         break;
212       }
213       case Intrinsic::gcread: {
214         // Replace a read barrier with a simple load.
215         Value *Ld = new LoadInst(CI->getType(), CI->getArgOperand(1), "", CI);
216         Ld->takeName(CI);
217         CI->replaceAllUsesWith(Ld);
218         CI->eraseFromParent();
219         MadeChange = true;
220         break;
221       }
222       case Intrinsic::gcroot: {
223         // Initialize the GC root, but do not delete the intrinsic. The
224         // backend needs the intrinsic to flag the stack slot.
225         Roots.push_back(
226             cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts()));
227         break;
228       }
229       }
230     }
231 
232   if (Roots.size())
233     MadeChange |= InsertRootInitializers(F, Roots);
234 
235   return MadeChange;
236 }
237 
238 // -----------------------------------------------------------------------------
239 
240 char GCMachineCodeAnalysis::ID = 0;
241 char &llvm::GCMachineCodeAnalysisID = GCMachineCodeAnalysis::ID;
242 
243 INITIALIZE_PASS(GCMachineCodeAnalysis, "gc-analysis",
244                 "Analyze Machine Code For Garbage Collection", false, false)
245 
246 GCMachineCodeAnalysis::GCMachineCodeAnalysis() : MachineFunctionPass(ID) {}
247 
248 void GCMachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
249   MachineFunctionPass::getAnalysisUsage(AU);
250   AU.setPreservesAll();
251   AU.addRequired<GCModuleInfo>();
252 }
253 
254 MCSymbol *GCMachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB,
255                                              MachineBasicBlock::iterator MI,
256                                              const DebugLoc &DL) const {
257   MCSymbol *Label = MBB.getParent()->getContext().createTempSymbol();
258   BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label);
259   return Label;
260 }
261 
262 void GCMachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) {
263   // Find the return address (next instruction), since that's what will be on
264   // the stack when the call is suspended and we need to inspect the stack.
265   MachineBasicBlock::iterator RAI = CI;
266   ++RAI;
267 
268   MCSymbol *Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc());
269   FI->addSafePoint(Label, CI->getDebugLoc());
270 }
271 
272 void GCMachineCodeAnalysis::FindSafePoints(MachineFunction &MF) {
273   for (MachineBasicBlock &MBB : MF)
274     for (MachineInstr &MI : MBB)
275       if (MI.isCall()) {
276         // Do not treat tail or sibling call sites as safe points.  This is
277         // legal since any arguments passed to the callee which live in the
278         // remnants of the callers frame will be owned and updated by the
279         // callee if required.
280         if (MI.isTerminator())
281           continue;
282         VisitCallPoint(&MI);
283       }
284 }
285 
286 void GCMachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) {
287   const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
288   assert(TFI && "TargetRegisterInfo not available!");
289 
290   for (GCFunctionInfo::roots_iterator RI = FI->roots_begin();
291        RI != FI->roots_end();) {
292     // If the root references a dead object, no need to keep it.
293     if (MF.getFrameInfo().isDeadObjectIndex(RI->Num)) {
294       RI = FI->removeStackRoot(RI);
295     } else {
296       Register FrameReg; // FIXME: surely GCRoot ought to store the
297                          // register that the offset is from?
298       auto FrameOffset = TFI->getFrameIndexReference(MF, RI->Num, FrameReg);
299       assert(!FrameOffset.getScalable() &&
300              "Frame offsets with a scalable component are not supported");
301       RI->StackOffset = FrameOffset.getFixed();
302       ++RI;
303     }
304   }
305 }
306 
307 bool GCMachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) {
308   // Quick exit for functions that do not use GC.
309   if (!MF.getFunction().hasGC())
310     return false;
311 
312   FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(MF.getFunction());
313   TII = MF.getSubtarget().getInstrInfo();
314 
315   // Find the size of the stack frame.  There may be no correct static frame
316   // size, we use UINT64_MAX to represent this.
317   const MachineFrameInfo &MFI = MF.getFrameInfo();
318   const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo();
319   const bool DynamicFrameSize =
320       MFI.hasVarSizedObjects() || RegInfo->hasStackRealignment(MF);
321   FI->setFrameSize(DynamicFrameSize ? UINT64_MAX : MFI.getStackSize());
322 
323   // Find all safe points.
324   if (FI->getStrategy().needsSafePoints())
325     FindSafePoints(MF);
326 
327   // Find the concrete stack offsets for all roots (stack slots)
328   FindStackOffsets(MF);
329 
330   return false;
331 }
332