1 //===-- GCRootLowering.cpp - Garbage collection infrastructure ------------===// 2 // 3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions. 4 // See https://llvm.org/LICENSE.txt for license information. 5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception 6 // 7 //===----------------------------------------------------------------------===// 8 // 9 // This file implements the lowering for the gc.root mechanism. 10 // 11 //===----------------------------------------------------------------------===// 12 13 #include "llvm/CodeGen/GCMetadata.h" 14 #include "llvm/CodeGen/MachineFrameInfo.h" 15 #include "llvm/CodeGen/MachineFunctionPass.h" 16 #include "llvm/CodeGen/MachineInstrBuilder.h" 17 #include "llvm/CodeGen/Passes.h" 18 #include "llvm/CodeGen/TargetFrameLowering.h" 19 #include "llvm/CodeGen/TargetInstrInfo.h" 20 #include "llvm/CodeGen/TargetRegisterInfo.h" 21 #include "llvm/CodeGen/TargetSubtargetInfo.h" 22 #include "llvm/IR/Dominators.h" 23 #include "llvm/IR/IntrinsicInst.h" 24 #include "llvm/IR/Module.h" 25 #include "llvm/InitializePasses.h" 26 #include "llvm/MC/MCContext.h" 27 28 using namespace llvm; 29 30 namespace { 31 32 /// LowerIntrinsics - This pass rewrites calls to the llvm.gcread or 33 /// llvm.gcwrite intrinsics, replacing them with simple loads and stores as 34 /// directed by the GCStrategy. It also performs automatic root initialization 35 /// and custom intrinsic lowering. 36 class LowerIntrinsics : public FunctionPass { 37 bool DoLowering(Function &F, GCStrategy &S); 38 39 public: 40 static char ID; 41 42 LowerIntrinsics(); 43 StringRef getPassName() const override; 44 void getAnalysisUsage(AnalysisUsage &AU) const override; 45 46 bool doInitialization(Module &M) override; 47 bool runOnFunction(Function &F) override; 48 }; 49 50 /// GCMachineCodeAnalysis - This is a target-independent pass over the machine 51 /// function representation to identify safe points for the garbage collector 52 /// in the machine code. It inserts labels at safe points and populates a 53 /// GCMetadata record for each function. 54 class GCMachineCodeAnalysis : public MachineFunctionPass { 55 GCFunctionInfo *FI = nullptr; 56 const TargetInstrInfo *TII = nullptr; 57 58 void FindSafePoints(MachineFunction &MF); 59 void VisitCallPoint(MachineBasicBlock::iterator CI); 60 MCSymbol *InsertLabel(MachineBasicBlock &MBB, MachineBasicBlock::iterator MI, 61 const DebugLoc &DL) const; 62 63 void FindStackOffsets(MachineFunction &MF); 64 65 public: 66 static char ID; 67 68 GCMachineCodeAnalysis(); 69 void getAnalysisUsage(AnalysisUsage &AU) const override; 70 71 bool runOnMachineFunction(MachineFunction &MF) override; 72 }; 73 } 74 75 // ----------------------------------------------------------------------------- 76 77 INITIALIZE_PASS_BEGIN(LowerIntrinsics, "gc-lowering", "GC Lowering", false, 78 false) 79 INITIALIZE_PASS_DEPENDENCY(GCModuleInfo) 80 INITIALIZE_PASS_END(LowerIntrinsics, "gc-lowering", "GC Lowering", false, false) 81 82 FunctionPass *llvm::createGCLoweringPass() { return new LowerIntrinsics(); } 83 84 char LowerIntrinsics::ID = 0; 85 char &llvm::GCLoweringID = LowerIntrinsics::ID; 86 87 LowerIntrinsics::LowerIntrinsics() : FunctionPass(ID) { 88 initializeLowerIntrinsicsPass(*PassRegistry::getPassRegistry()); 89 } 90 91 StringRef LowerIntrinsics::getPassName() const { 92 return "Lower Garbage Collection Instructions"; 93 } 94 95 void LowerIntrinsics::getAnalysisUsage(AnalysisUsage &AU) const { 96 FunctionPass::getAnalysisUsage(AU); 97 AU.addRequired<GCModuleInfo>(); 98 AU.addPreserved<DominatorTreeWrapperPass>(); 99 } 100 101 /// doInitialization - If this module uses the GC intrinsics, find them now. 102 bool LowerIntrinsics::doInitialization(Module &M) { 103 GCModuleInfo *MI = getAnalysisIfAvailable<GCModuleInfo>(); 104 assert(MI && "LowerIntrinsics didn't require GCModuleInfo!?"); 105 for (Function &F : M) 106 if (!F.isDeclaration() && F.hasGC()) 107 MI->getFunctionInfo(F); // Instantiate the GC strategy. 108 109 return false; 110 } 111 112 /// CouldBecomeSafePoint - Predicate to conservatively determine whether the 113 /// instruction could introduce a safe point. 114 static bool CouldBecomeSafePoint(Instruction *I) { 115 // The natural definition of instructions which could introduce safe points 116 // are: 117 // 118 // - call, invoke (AfterCall, BeforeCall) 119 // - phis (Loops) 120 // - invoke, ret, unwind (Exit) 121 // 122 // However, instructions as seemingly inoccuous as arithmetic can become 123 // libcalls upon lowering (e.g., div i64 on a 32-bit platform), so instead 124 // it is necessary to take a conservative approach. 125 126 if (isa<AllocaInst>(I) || isa<GetElementPtrInst>(I) || isa<StoreInst>(I) || 127 isa<LoadInst>(I)) 128 return false; 129 130 // llvm.gcroot is safe because it doesn't do anything at runtime. 131 if (CallInst *CI = dyn_cast<CallInst>(I)) 132 if (Function *F = CI->getCalledFunction()) 133 if (Intrinsic::ID IID = F->getIntrinsicID()) 134 if (IID == Intrinsic::gcroot) 135 return false; 136 137 return true; 138 } 139 140 static bool InsertRootInitializers(Function &F, ArrayRef<AllocaInst *> Roots) { 141 // Scroll past alloca instructions. 142 BasicBlock::iterator IP = F.getEntryBlock().begin(); 143 while (isa<AllocaInst>(IP)) 144 ++IP; 145 146 // Search for initializers in the initial BB. 147 SmallPtrSet<AllocaInst *, 16> InitedRoots; 148 for (; !CouldBecomeSafePoint(&*IP); ++IP) 149 if (StoreInst *SI = dyn_cast<StoreInst>(IP)) 150 if (AllocaInst *AI = 151 dyn_cast<AllocaInst>(SI->getOperand(1)->stripPointerCasts())) 152 InitedRoots.insert(AI); 153 154 // Add root initializers. 155 bool MadeChange = false; 156 157 for (AllocaInst *Root : Roots) 158 if (!InitedRoots.count(Root)) { 159 new StoreInst( 160 ConstantPointerNull::get(cast<PointerType>(Root->getAllocatedType())), 161 Root, Root->getNextNode()); 162 MadeChange = true; 163 } 164 165 return MadeChange; 166 } 167 168 /// runOnFunction - Replace gcread/gcwrite intrinsics with loads and stores. 169 /// Leave gcroot intrinsics; the code generator needs to see those. 170 bool LowerIntrinsics::runOnFunction(Function &F) { 171 // Quick exit for functions that do not use GC. 172 if (!F.hasGC()) 173 return false; 174 175 GCFunctionInfo &FI = getAnalysis<GCModuleInfo>().getFunctionInfo(F); 176 GCStrategy &S = FI.getStrategy(); 177 178 return DoLowering(F, S); 179 } 180 181 /// Lower barriers out of existance (if the associated GCStrategy hasn't 182 /// already done so...), and insert initializing stores to roots as a defensive 183 /// measure. Given we're going to report all roots live at all safepoints, we 184 /// need to be able to ensure each root has been initialized by the point the 185 /// first safepoint is reached. This really should have been done by the 186 /// frontend, but the old API made this non-obvious, so we do a potentially 187 /// redundant store just in case. 188 bool LowerIntrinsics::DoLowering(Function &F, GCStrategy &S) { 189 SmallVector<AllocaInst *, 32> Roots; 190 191 bool MadeChange = false; 192 for (BasicBlock &BB : F) 193 for (Instruction &I : llvm::make_early_inc_range(BB)) { 194 IntrinsicInst *CI = dyn_cast<IntrinsicInst>(&I); 195 if (!CI) 196 continue; 197 198 Function *F = CI->getCalledFunction(); 199 switch (F->getIntrinsicID()) { 200 default: break; 201 case Intrinsic::gcwrite: { 202 // Replace a write barrier with a simple store. 203 Value *St = new StoreInst(CI->getArgOperand(0), 204 CI->getArgOperand(2), CI); 205 CI->replaceAllUsesWith(St); 206 CI->eraseFromParent(); 207 MadeChange = true; 208 break; 209 } 210 case Intrinsic::gcread: { 211 // Replace a read barrier with a simple load. 212 Value *Ld = new LoadInst(CI->getType(), CI->getArgOperand(1), "", CI); 213 Ld->takeName(CI); 214 CI->replaceAllUsesWith(Ld); 215 CI->eraseFromParent(); 216 MadeChange = true; 217 break; 218 } 219 case Intrinsic::gcroot: { 220 // Initialize the GC root, but do not delete the intrinsic. The 221 // backend needs the intrinsic to flag the stack slot. 222 Roots.push_back( 223 cast<AllocaInst>(CI->getArgOperand(0)->stripPointerCasts())); 224 break; 225 } 226 } 227 } 228 229 if (Roots.size()) 230 MadeChange |= InsertRootInitializers(F, Roots); 231 232 return MadeChange; 233 } 234 235 // ----------------------------------------------------------------------------- 236 237 char GCMachineCodeAnalysis::ID = 0; 238 char &llvm::GCMachineCodeAnalysisID = GCMachineCodeAnalysis::ID; 239 240 INITIALIZE_PASS(GCMachineCodeAnalysis, "gc-analysis", 241 "Analyze Machine Code For Garbage Collection", false, false) 242 243 GCMachineCodeAnalysis::GCMachineCodeAnalysis() : MachineFunctionPass(ID) {} 244 245 void GCMachineCodeAnalysis::getAnalysisUsage(AnalysisUsage &AU) const { 246 MachineFunctionPass::getAnalysisUsage(AU); 247 AU.setPreservesAll(); 248 AU.addRequired<GCModuleInfo>(); 249 } 250 251 MCSymbol *GCMachineCodeAnalysis::InsertLabel(MachineBasicBlock &MBB, 252 MachineBasicBlock::iterator MI, 253 const DebugLoc &DL) const { 254 MCSymbol *Label = MBB.getParent()->getContext().createTempSymbol(); 255 BuildMI(MBB, MI, DL, TII->get(TargetOpcode::GC_LABEL)).addSym(Label); 256 return Label; 257 } 258 259 void GCMachineCodeAnalysis::VisitCallPoint(MachineBasicBlock::iterator CI) { 260 // Find the return address (next instruction), since that's what will be on 261 // the stack when the call is suspended and we need to inspect the stack. 262 MachineBasicBlock::iterator RAI = CI; 263 ++RAI; 264 265 MCSymbol *Label = InsertLabel(*CI->getParent(), RAI, CI->getDebugLoc()); 266 FI->addSafePoint(Label, CI->getDebugLoc()); 267 } 268 269 void GCMachineCodeAnalysis::FindSafePoints(MachineFunction &MF) { 270 for (MachineBasicBlock &MBB : MF) 271 for (MachineInstr &MI : MBB) 272 if (MI.isCall()) { 273 // Do not treat tail or sibling call sites as safe points. This is 274 // legal since any arguments passed to the callee which live in the 275 // remnants of the callers frame will be owned and updated by the 276 // callee if required. 277 if (MI.isTerminator()) 278 continue; 279 VisitCallPoint(&MI); 280 } 281 } 282 283 void GCMachineCodeAnalysis::FindStackOffsets(MachineFunction &MF) { 284 const TargetFrameLowering *TFI = MF.getSubtarget().getFrameLowering(); 285 assert(TFI && "TargetRegisterInfo not available!"); 286 287 for (GCFunctionInfo::roots_iterator RI = FI->roots_begin(); 288 RI != FI->roots_end();) { 289 // If the root references a dead object, no need to keep it. 290 if (MF.getFrameInfo().isDeadObjectIndex(RI->Num)) { 291 RI = FI->removeStackRoot(RI); 292 } else { 293 Register FrameReg; // FIXME: surely GCRoot ought to store the 294 // register that the offset is from? 295 auto FrameOffset = TFI->getFrameIndexReference(MF, RI->Num, FrameReg); 296 assert(!FrameOffset.getScalable() && 297 "Frame offsets with a scalable component are not supported"); 298 RI->StackOffset = FrameOffset.getFixed(); 299 ++RI; 300 } 301 } 302 } 303 304 bool GCMachineCodeAnalysis::runOnMachineFunction(MachineFunction &MF) { 305 // Quick exit for functions that do not use GC. 306 if (!MF.getFunction().hasGC()) 307 return false; 308 309 FI = &getAnalysis<GCModuleInfo>().getFunctionInfo(MF.getFunction()); 310 TII = MF.getSubtarget().getInstrInfo(); 311 312 // Find the size of the stack frame. There may be no correct static frame 313 // size, we use UINT64_MAX to represent this. 314 const MachineFrameInfo &MFI = MF.getFrameInfo(); 315 const TargetRegisterInfo *RegInfo = MF.getSubtarget().getRegisterInfo(); 316 const bool DynamicFrameSize = 317 MFI.hasVarSizedObjects() || RegInfo->hasStackRealignment(MF); 318 FI->setFrameSize(DynamicFrameSize ? UINT64_MAX : MFI.getStackSize()); 319 320 // Find all safe points. 321 if (FI->getStrategy().needsSafePoints()) 322 FindSafePoints(MF); 323 324 // Find the concrete stack offsets for all roots (stack slots) 325 FindStackOffsets(MF); 326 327 return false; 328 } 329