xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/GCRootLowering.cpp (revision a90b9d0159070121c221b966469c3e36d912bf82)
1 //===-- GCRootLowering.cpp - Garbage collection infrastructure ------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the lowering for the gc.root mechanism.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "llvm/CodeGen/GCMetadata.h"
14 #include "llvm/CodeGen/MachineFrameInfo.h"
15 #include "llvm/CodeGen/MachineFunctionPass.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/Passes.h"
18 #include "llvm/CodeGen/TargetFrameLowering.h"
19 #include "llvm/CodeGen/TargetInstrInfo.h"
20 #include "llvm/CodeGen/TargetRegisterInfo.h"
21 #include "llvm/CodeGen/TargetSubtargetInfo.h"
22 #include "llvm/IR/Dominators.h"
23 #include "llvm/IR/IntrinsicInst.h"
24 #include "llvm/IR/Module.h"
25 #include "llvm/InitializePasses.h"
26 #include "llvm/MC/MCContext.h"
27 
28 using namespace llvm;
29 
30 /// Lower barriers out of existence (if the associated GCStrategy hasn't
31 /// already done so...), and insert initializing stores to roots as a defensive
32 /// measure.  Given we're going to report all roots live at all safepoints, we
33 /// need to be able to ensure each root has been initialized by the point the
34 /// first safepoint is reached.  This really should have been done by the
35 /// frontend, but the old API made this non-obvious, so we do a potentially
36 /// redundant store just in case.
37 static bool DoLowering(Function &F, GCStrategy &S);
38 
39 namespace {
40 
41 /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or
42 /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as
43 /// directed by the GCStrategy. It also performs automatic root initialization
44 /// and custom intrinsic lowering.
45 class LowerIntrinsics : public FunctionPass {
46 public:
47   static char ID;
48 
49   LowerIntrinsics();
50   StringRef getPassName() const override;
51   void getAnalysisUsage(AnalysisUsage &AU) const override;
52 
53   bool doInitialization(Module &M) override;
54   bool runOnFunction(Function &F) override;
55 };
56 
57 /// GCMachineCodeAnalysis - This is a target-independent pass over the machine
58 /// function representation to identify safe points for the garbage collector
59 /// in the machine code. It inserts labels at safe points and populates a
60 /// GCMetadata record for each function.
61 class GCMachineCodeAnalysis : public MachineFunctionPass {
62   GCFunctionInfo *FI = nullptr;
63   const TargetInstrInfo *TII = nullptr;
64 
65   void FindSafePoints(MachineFunction &MF);
66   void VisitCallPoint(MachineBasicBlock::iterator CI);
67   MCSymbol *InsertLabel(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI,
68                         const DebugLoc &DL) const;
69 
70   void FindStackOffsets(MachineFunction &MF);
71 
72 public:
73   static char ID;
74 
75   GCMachineCodeAnalysis();
76   void getAnalysisUsage(AnalysisUsage &AU) const override;
77 
78   bool runOnMachineFunction(MachineFunction &MF) override;
79 };
80 }
81 
82 PreservedAnalyses GCLoweringPass::run(Function &F,
83                                       FunctionAnalysisManager &FAM) {
84   auto &Info = FAM.getResult<GCFunctionAnalysis>(F);
85 
86   bool Changed = DoLowering(F, Info.getStrategy());
87 
88   if (!Changed)
89     return PreservedAnalyses::all();
90   PreservedAnalyses PA;
91   PA.preserve<DominatorTreeAnalysis>();
92   return PA;
93 }
94 
95 // -----------------------------------------------------------------------------
96 
97 INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering", false,
98                       false)
99 INITIALIZE_PASS_DEPENDENCY(GCModuleInfo)
100 INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false)
101 
102 FunctionPass *llvm::createGCLoweringPass() { return new LowerIntrinsics(); }
103 
104 char LowerIntrinsics::ID = 0;
105 char &llvm::GCLoweringID = LowerIntrinsics::ID;
106 
107 LowerIntrinsics::LowerIntrinsics() : FunctionPass(ID) {
108   initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry());
109 }
110 
111 StringRef LowerIntrinsics::getPassName() const {
112   return "Lower Garbage Collection Instructions";
113 }
114 
115 void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const {
116   FunctionPass::getAnalysisUsage(AU);
117   AU.addRequired<GCModuleInfo>();
118   AU.addPreserved<DominatorTreeWrapperPass>();
119 }
120 
121 /// doInitialization - If this module uses the GC intrinsics, find them now.
122 bool LowerIntrinsics::doInitialization(Module &M) {
123   GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>();
124   assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?");
125   for (Function &F : M)
126     if (!F.isDeclaration() && F.hasGC())
127       MI->getFunctionInfo(F); // Instantiate the GC strategy.
128 
129   return false;
130 }
131 
132 /// CouldBecomeSafePoint - Predicate to conservatively determine whether the
133 /// instruction could introduce a safe point.
134 static bool CouldBecomeSafePoint(Instruction *I) {
135   // The natural definition of instructions which could introduce safe points
136   // are:
137   //
138   //   - call, invoke (AfterCall, BeforeCall)
139   //   - phis (Loops)
140   //   - invoke, ret, unwind (Exit)
141   //
142   // However, instructions as seemingly inoccuous as arithmetic can become
143   // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead
144   // it is necessary to take a conservative approach.
145 
146   if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || isa<StoreInst>(I) ||
147       isa<LoadInst>(I))
148     return false;
149 
150   // llvm.gcroot is safe because it doesn't do anything at runtime.
151   if (CallInst *CI = dyn_cast<CallInst>(I))
152     if (Function *F = CI->getCalledFunction())
153       if (Intrinsic::ID IID = F->getIntrinsicID())
154         if (IID == Intrinsic::gcroot)
155           return false;
156 
157   return true;
158 }
159 
160 static bool InsertRootInitializers(Function &F, ArrayRef<AllocaInst *> Roots) {
161   // Scroll past alloca instructions.
162   BasicBlock::iterator IP = F.getEntryBlock().begin();
163   while (isa<AllocaInst>(IP))
164     ++IP;
165 
166   // Search for initializers in the initial BB.
167   SmallPtrSet<AllocaInst *, 16> InitedRoots;
168   for (; !CouldBecomeSafePoint(&*IP); ++IP)
169     if (StoreInst *SI = dyn_cast<StoreInst>(IP))
170       if (AllocaInst *AI =
171               dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts()))
172         InitedRoots.insert(AI);
173 
174   // Add root initializers.
175   bool MadeChange = false;
176 
177   for (AllocaInst *Root : Roots)
178     if (!InitedRoots.count(Root)) {
179       new StoreInst(
180           ConstantPointerNull::get(cast<PointerType>(Root->getAllocatedType())),
181           Root, Root->getNextNode());
182       MadeChange = true;
183     }
184 
185   return MadeChange;
186 }
187 
188 /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores.
189 /// Leave gcroot intrinsics; the code generator needs to see those.
190 bool LowerIntrinsics::runOnFunction(Function &F) {
191   // Quick exit for functions that do not use GC.
192   if (!F.hasGC())
193     return false;
194 
195   GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F);
196   GCStrategy &S = FI.getStrategy();
197 
198   return DoLowering(F, S);
199 }
200 
201 bool DoLowering(Function &F, GCStrategy &S) {
202   SmallVector<AllocaInst *, 32> Roots;
203 
204   bool MadeChange = false;
205   for (BasicBlock &BB : F)
206     for (Instruction &I : llvm::make_early_inc_range(BB)) {
207       IntrinsicInst *CI = dyn_cast<IntrinsicInst>(&I);
208       if (!CI)
209         continue;
210 
211       Function *F = CI->getCalledFunction();
212       switch (F->getIntrinsicID()) {
213       default: break;
214       case Intrinsic::gcwrite: {
215         // Replace a write barrier with a simple store.
216         Value *St = new StoreInst(CI->getArgOperand(0),
217                                   CI->getArgOperand(2), CI);
218         CI->replaceAllUsesWith(St);
219         CI->eraseFromParent();
220         MadeChange = true;
221         break;
222       }
223       case Intrinsic::gcread: {
224         // Replace a read barrier with a simple load.
225         Value *Ld = new LoadInst(CI->getType(), CI->getArgOperand(1), "", CI);
226         Ld->takeName(CI);
227         CI->replaceAllUsesWith(Ld);
228         CI->eraseFromParent();
229         MadeChange = true;
230         break;
231       }
232       case Intrinsic::gcroot: {
233         // Initialize the GC root, but do not delete the intrinsic. The
234         // backend needs the intrinsic to flag the stack slot.
235         Roots.push_back(
236             cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts()));
237         break;
238       }
239       }
240     }
241 
242   if (Roots.size())
243     MadeChange |= InsertRootInitializers(F, Roots);
244 
245   return MadeChange;
246 }
247 
248 // -----------------------------------------------------------------------------
249 
250 char GCMachineCodeAnalysis::ID = 0;
251 char &llvm::GCMachineCodeAnalysisID = GCMachineCodeAnalysis::ID;
252 
253 INITIALIZE_PASS(GCMachineCodeAnalysis, "gc-analysis",
254                 "Analyze Machine Code For Garbage Collection", false, false)
255 
256 GCMachineCodeAnalysis::GCMachineCodeAnalysis() : MachineFunctionPass(ID) {}
257 
258 void GCMachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
259   MachineFunctionPass::getAnalysisUsage(AU);
260   AU.setPreservesAll();
261   AU.addRequired<GCModuleInfo>();
262 }
263 
264 MCSymbol *GCMachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB,
265                                              MachineBasicBlock::iterator MI,
266                                              const DebugLoc &DL) const {
267   MCSymbol *Label = MBB.getParent()->getContext().createTempSymbol();
268   BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label);
269   return Label;
270 }
271 
272 void GCMachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) {
273   // Find the return address (next instruction), since that's what will be on
274   // the stack when the call is suspended and we need to inspect the stack.
275   MachineBasicBlock::iterator RAI = CI;
276   ++RAI;
277 
278   MCSymbol *Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc());
279   FI->addSafePoint(Label, CI->getDebugLoc());
280 }
281 
282 void GCMachineCodeAnalysis::FindSafePoints(MachineFunction &MF) {
283   for (MachineBasicBlock &MBB : MF)
284     for (MachineInstr &MI : MBB)
285       if (MI.isCall()) {
286         // Do not treat tail or sibling call sites as safe points.  This is
287         // legal since any arguments passed to the callee which live in the
288         // remnants of the callers frame will be owned and updated by the
289         // callee if required.
290         if (MI.isTerminator())
291           continue;
292         VisitCallPoint(&MI);
293       }
294 }
295 
296 void GCMachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) {
297   const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
298   assert(TFI && "TargetRegisterInfo not available!");
299 
300   for (GCFunctionInfo::roots_iterator RI = FI->roots_begin();
301        RI != FI->roots_end();) {
302     // If the root references a dead object, no need to keep it.
303     if (MF.getFrameInfo().isDeadObjectIndex(RI->Num)) {
304       RI = FI->removeStackRoot(RI);
305     } else {
306       Register FrameReg; // FIXME: surely GCRoot ought to store the
307                          // register that the offset is from?
308       auto FrameOffset = TFI->getFrameIndexReference(MF, RI->Num, FrameReg);
309       assert(!FrameOffset.getScalable() &&
310              "Frame offsets with a scalable component are not supported");
311       RI->StackOffset = FrameOffset.getFixed();
312       ++RI;
313     }
314   }
315 }
316 
317 bool GCMachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) {
318   // Quick exit for functions that do not use GC.
319   if (!MF.getFunction().hasGC())
320     return false;
321 
322   FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(MF.getFunction());
323   TII = MF.getSubtarget().getInstrInfo();
324 
325   // Find the size of the stack frame.  There may be no correct static frame
326   // size, we use UINT64_MAX to represent this.
327   const MachineFrameInfo &MFI = MF.getFrameInfo();
328   const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo();
329   const bool DynamicFrameSize =
330       MFI.hasVarSizedObjects() || RegInfo->hasStackRealignment(MF);
331   FI->setFrameSize(DynamicFrameSize ? UINT64_MAX : MFI.getStackSize());
332 
333   // Find all safe points.
334   if (FI->getStrategy().needsSafePoints())
335     FindSafePoints(MF);
336 
337   // Find the concrete stack offsets for all roots (stack slots)
338   FindStackOffsets(MF);
339 
340   return false;
341 }
342