xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/GCRootLowering.cpp (revision 0fca6ea1d4eea4c934cfff25ac9ee8ad6fe95583)
1 //===-- GCRootLowering.cpp - Garbage collection infrastructure ------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements the lowering for the gc.root mechanism.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "llvm/CodeGen/GCMetadata.h"
14 #include "llvm/CodeGen/MachineFrameInfo.h"
15 #include "llvm/CodeGen/MachineFunctionPass.h"
16 #include "llvm/CodeGen/MachineInstrBuilder.h"
17 #include "llvm/CodeGen/Passes.h"
18 #include "llvm/CodeGen/TargetFrameLowering.h"
19 #include "llvm/CodeGen/TargetInstrInfo.h"
20 #include "llvm/CodeGen/TargetRegisterInfo.h"
21 #include "llvm/CodeGen/TargetSubtargetInfo.h"
22 #include "llvm/IR/Dominators.h"
23 #include "llvm/IR/IntrinsicInst.h"
24 #include "llvm/IR/Module.h"
25 #include "llvm/InitializePasses.h"
26 #include "llvm/MC/MCContext.h"
27 
28 using namespace llvm;
29 
30 /// Lower barriers out of existence (if the associated GCStrategy hasn't
31 /// already done so...), and insert initializing stores to roots as a defensive
32 /// measure.  Given we're going to report all roots live at all safepoints, we
33 /// need to be able to ensure each root has been initialized by the point the
34 /// first safepoint is reached.  This really should have been done by the
35 /// frontend, but the old API made this non-obvious, so we do a potentially
36 /// redundant store just in case.
37 static bool DoLowering(Function &F, GCStrategy &S);
38 
39 namespace {
40 
41 /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or
42 /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as
43 /// directed by the GCStrategy. It also performs automatic root initialization
44 /// and custom intrinsic lowering.
45 class LowerIntrinsics : public FunctionPass {
46 public:
47   static char ID;
48 
49   LowerIntrinsics();
50   StringRef getPassName() const override;
51   void getAnalysisUsage(AnalysisUsage &AU) const override;
52 
53   bool doInitialization(Module &M) override;
54   bool runOnFunction(Function &F) override;
55 };
56 
57 /// GCMachineCodeAnalysis - This is a target-independent pass over the machine
58 /// function representation to identify safe points for the garbage collector
59 /// in the machine code. It inserts labels at safe points and populates a
60 /// GCMetadata record for each function.
61 class GCMachineCodeAnalysis : public MachineFunctionPass {
62   GCFunctionInfo *FI = nullptr;
63   const TargetInstrInfo *TII = nullptr;
64 
65   void FindSafePoints(MachineFunction &MF);
66   void VisitCallPoint(MachineBasicBlock::iterator CI);
67   MCSymbol *InsertLabel(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI,
68                         const DebugLoc &DL) const;
69 
70   void FindStackOffsets(MachineFunction &MF);
71 
72 public:
73   static char ID;
74 
75   GCMachineCodeAnalysis();
76   void getAnalysisUsage(AnalysisUsage &AU) const override;
77 
78   bool runOnMachineFunction(MachineFunction &MF) override;
79 };
80 }
81 
run(Function & F,FunctionAnalysisManager & FAM)82 PreservedAnalyses GCLoweringPass::run(Function &F,
83                                       FunctionAnalysisManager &FAM) {
84   if (!F.hasGC())
85     return PreservedAnalyses::all();
86 
87   auto &Info = FAM.getResult<GCFunctionAnalysis>(F);
88 
89   bool Changed = DoLowering(F, Info.getStrategy());
90 
91   if (!Changed)
92     return PreservedAnalyses::all();
93   PreservedAnalyses PA;
94   PA.preserve<DominatorTreeAnalysis>();
95   return PA;
96 }
97 
98 // -----------------------------------------------------------------------------
99 
100 INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering", false,
101                       false)
INITIALIZE_PASS_DEPENDENCY(GCModuleInfo)102 INITIALIZE_PASS_DEPENDENCY(GCModuleInfo)
103 INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false)
104 
105 FunctionPass *llvm::createGCLoweringPass() { return new LowerIntrinsics(); }
106 
107 char LowerIntrinsics::ID = 0;
108 char &llvm::GCLoweringID = LowerIntrinsics::ID;
109 
LowerIntrinsics()110 LowerIntrinsics::LowerIntrinsics() : FunctionPass(ID) {
111   initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry());
112 }
113 
getPassName() const114 StringRef LowerIntrinsics::getPassName() const {
115   return "Lower Garbage Collection Instructions";
116 }
117 
getAnalysisUsage(AnalysisUsage & AU) const118 void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const {
119   FunctionPass::getAnalysisUsage(AU);
120   AU.addRequired<GCModuleInfo>();
121   AU.addPreserved<DominatorTreeWrapperPass>();
122 }
123 
124 /// doInitialization - If this module uses the GC intrinsics, find them now.
doInitialization(Module & M)125 bool LowerIntrinsics::doInitialization(Module &M) {
126   GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>();
127   assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?");
128   for (Function &F : M)
129     if (!F.isDeclaration() && F.hasGC())
130       MI->getFunctionInfo(F); // Instantiate the GC strategy.
131 
132   return false;
133 }
134 
135 /// CouldBecomeSafePoint - Predicate to conservatively determine whether the
136 /// instruction could introduce a safe point.
CouldBecomeSafePoint(Instruction * I)137 static bool CouldBecomeSafePoint(Instruction *I) {
138   // The natural definition of instructions which could introduce safe points
139   // are:
140   //
141   //   - call, invoke (AfterCall, BeforeCall)
142   //   - phis (Loops)
143   //   - invoke, ret, unwind (Exit)
144   //
145   // However, instructions as seemingly inoccuous as arithmetic can become
146   // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead
147   // it is necessary to take a conservative approach.
148 
149   if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || isa<StoreInst>(I) ||
150       isa<LoadInst>(I))
151     return false;
152 
153   // llvm.gcroot is safe because it doesn't do anything at runtime.
154   if (CallInst *CI = dyn_cast<CallInst>(I))
155     if (Function *F = CI->getCalledFunction())
156       if (Intrinsic::ID IID = F->getIntrinsicID())
157         if (IID == Intrinsic::gcroot)
158           return false;
159 
160   return true;
161 }
162 
InsertRootInitializers(Function & F,ArrayRef<AllocaInst * > Roots)163 static bool InsertRootInitializers(Function &F, ArrayRef<AllocaInst *> Roots) {
164   // Scroll past alloca instructions.
165   BasicBlock::iterator IP = F.getEntryBlock().begin();
166   while (isa<AllocaInst>(IP))
167     ++IP;
168 
169   // Search for initializers in the initial BB.
170   SmallPtrSet<AllocaInst *, 16> InitedRoots;
171   for (; !CouldBecomeSafePoint(&*IP); ++IP)
172     if (StoreInst *SI = dyn_cast<StoreInst>(IP))
173       if (AllocaInst *AI =
174               dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts()))
175         InitedRoots.insert(AI);
176 
177   // Add root initializers.
178   bool MadeChange = false;
179 
180   for (AllocaInst *Root : Roots)
181     if (!InitedRoots.count(Root)) {
182       new StoreInst(
183           ConstantPointerNull::get(cast<PointerType>(Root->getAllocatedType())),
184           Root, std::next(Root->getIterator()));
185       MadeChange = true;
186     }
187 
188   return MadeChange;
189 }
190 
191 /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores.
192 /// Leave gcroot intrinsics; the code generator needs to see those.
runOnFunction(Function & F)193 bool LowerIntrinsics::runOnFunction(Function &F) {
194   // Quick exit for functions that do not use GC.
195   if (!F.hasGC())
196     return false;
197 
198   GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F);
199   GCStrategy &S = FI.getStrategy();
200 
201   return DoLowering(F, S);
202 }
203 
DoLowering(Function & F,GCStrategy & S)204 bool DoLowering(Function &F, GCStrategy &S) {
205   SmallVector<AllocaInst *, 32> Roots;
206 
207   bool MadeChange = false;
208   for (BasicBlock &BB : F)
209     for (Instruction &I : llvm::make_early_inc_range(BB)) {
210       IntrinsicInst *CI = dyn_cast<IntrinsicInst>(&I);
211       if (!CI)
212         continue;
213 
214       Function *F = CI->getCalledFunction();
215       switch (F->getIntrinsicID()) {
216       default: break;
217       case Intrinsic::gcwrite: {
218         // Replace a write barrier with a simple store.
219         Value *St = new StoreInst(CI->getArgOperand(0), CI->getArgOperand(2),
220                                   CI->getIterator());
221         CI->replaceAllUsesWith(St);
222         CI->eraseFromParent();
223         MadeChange = true;
224         break;
225       }
226       case Intrinsic::gcread: {
227         // Replace a read barrier with a simple load.
228         Value *Ld = new LoadInst(CI->getType(), CI->getArgOperand(1), "",
229                                  CI->getIterator());
230         Ld->takeName(CI);
231         CI->replaceAllUsesWith(Ld);
232         CI->eraseFromParent();
233         MadeChange = true;
234         break;
235       }
236       case Intrinsic::gcroot: {
237         // Initialize the GC root, but do not delete the intrinsic. The
238         // backend needs the intrinsic to flag the stack slot.
239         Roots.push_back(
240             cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts()));
241         break;
242       }
243       }
244     }
245 
246   if (Roots.size())
247     MadeChange |= InsertRootInitializers(F, Roots);
248 
249   return MadeChange;
250 }
251 
252 // -----------------------------------------------------------------------------
253 
254 char GCMachineCodeAnalysis::ID = 0;
255 char &llvm::GCMachineCodeAnalysisID = GCMachineCodeAnalysis::ID;
256 
257 INITIALIZE_PASS(GCMachineCodeAnalysis, "gc-analysis",
258                 "Analyze Machine Code For Garbage Collection", false, false)
259 
GCMachineCodeAnalysis()260 GCMachineCodeAnalysis::GCMachineCodeAnalysis() : MachineFunctionPass(ID) {}
261 
getAnalysisUsage(AnalysisUsage & AU) const262 void GCMachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const {
263   MachineFunctionPass::getAnalysisUsage(AU);
264   AU.setPreservesAll();
265   AU.addRequired<GCModuleInfo>();
266 }
267 
InsertLabel(MachineBasicBlock & MBB,MachineBasicBlock::iterator MI,const DebugLoc & DL) const268 MCSymbol *GCMachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB,
269                                              MachineBasicBlock::iterator MI,
270                                              const DebugLoc &DL) const {
271   MCSymbol *Label = MBB.getParent()->getContext().createTempSymbol();
272   BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label);
273   return Label;
274 }
275 
VisitCallPoint(MachineBasicBlock::iterator CI)276 void GCMachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) {
277   // Find the return address (next instruction), since that's what will be on
278   // the stack when the call is suspended and we need to inspect the stack.
279   MachineBasicBlock::iterator RAI = CI;
280   ++RAI;
281 
282   MCSymbol *Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc());
283   FI->addSafePoint(Label, CI->getDebugLoc());
284 }
285 
FindSafePoints(MachineFunction & MF)286 void GCMachineCodeAnalysis::FindSafePoints(MachineFunction &MF) {
287   for (MachineBasicBlock &MBB : MF)
288     for (MachineInstr &MI : MBB)
289       if (MI.isCall()) {
290         // Do not treat tail or sibling call sites as safe points.  This is
291         // legal since any arguments passed to the callee which live in the
292         // remnants of the callers frame will be owned and updated by the
293         // callee if required.
294         if (MI.isTerminator())
295           continue;
296         VisitCallPoint(&MI);
297       }
298 }
299 
FindStackOffsets(MachineFunction & MF)300 void GCMachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) {
301   const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering();
302   assert(TFI && "TargetRegisterInfo not available!");
303 
304   for (GCFunctionInfo::roots_iterator RI = FI->roots_begin();
305        RI != FI->roots_end();) {
306     // If the root references a dead object, no need to keep it.
307     if (MF.getFrameInfo().isDeadObjectIndex(RI->Num)) {
308       RI = FI->removeStackRoot(RI);
309     } else {
310       Register FrameReg; // FIXME: surely GCRoot ought to store the
311                          // register that the offset is from?
312       auto FrameOffset = TFI->getFrameIndexReference(MF, RI->Num, FrameReg);
313       assert(!FrameOffset.getScalable() &&
314              "Frame offsets with a scalable component are not supported");
315       RI->StackOffset = FrameOffset.getFixed();
316       ++RI;
317     }
318   }
319 }
320 
runOnMachineFunction(MachineFunction & MF)321 bool GCMachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) {
322   // Quick exit for functions that do not use GC.
323   if (!MF.getFunction().hasGC())
324     return false;
325 
326   FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(MF.getFunction());
327   TII = MF.getSubtarget().getInstrInfo();
328 
329   // Find the size of the stack frame.  There may be no correct static frame
330   // size, we use UINT64_MAX to represent this.
331   const MachineFrameInfo &MFI = MF.getFrameInfo();
332   const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo();
333   const bool DynamicFrameSize =
334       MFI.hasVarSizedObjects() || RegInfo->hasStackRealignment(MF);
335   FI->setFrameSize(DynamicFrameSize ? UINT64_MAX : MFI.getStackSize());
336 
337   // Find all safe points.
338   if (FI->getStrategy().needsSafePoints())
339     FindSafePoints(MF);
340 
341   // Find the concrete stack offsets for all roots (stack slots)
342   FindStackOffsets(MF);
343 
344   return false;
345 }
346