xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/PreISelIntrinsicLowering.cpp (revision 59c8e88e72633afbc47a4ace0d2170d00d51f7dc)
1 //===- PreISelIntrinsicLowering.cpp - Pre-ISel intrinsic lowering pass ----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass implements IR lowering for the llvm.memcpy, llvm.memmove,
10 // llvm.memset, llvm.load.relative and llvm.objc.* intrinsics.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "llvm/CodeGen/PreISelIntrinsicLowering.h"
15 #include "llvm/Analysis/ObjCARCInstKind.h"
16 #include "llvm/Analysis/ObjCARCUtil.h"
17 #include "llvm/Analysis/TargetTransformInfo.h"
18 #include "llvm/CodeGen/Passes.h"
19 #include "llvm/CodeGen/TargetLowering.h"
20 #include "llvm/CodeGen/TargetPassConfig.h"
21 #include "llvm/IR/Function.h"
22 #include "llvm/IR/IRBuilder.h"
23 #include "llvm/IR/Instructions.h"
24 #include "llvm/IR/IntrinsicInst.h"
25 #include "llvm/IR/Module.h"
26 #include "llvm/IR/Type.h"
27 #include "llvm/InitializePasses.h"
28 #include "llvm/Pass.h"
29 #include "llvm/Support/Casting.h"
30 #include "llvm/Target/TargetMachine.h"
31 #include "llvm/Transforms/Utils/LowerMemIntrinsics.h"
32 
33 using namespace llvm;
34 
35 /// Threshold to leave statically sized memory intrinsic calls. Calls of known
36 /// size larger than this will be expanded by the pass. Calls of unknown or
37 /// lower size will be left for expansion in codegen.
38 static cl::opt<int64_t> MemIntrinsicExpandSizeThresholdOpt(
39     "mem-intrinsic-expand-size",
40     cl::desc("Set minimum mem intrinsic size to expand in IR"), cl::init(-1),
41     cl::Hidden);
42 
43 namespace {
44 
45 struct PreISelIntrinsicLowering {
46   const TargetMachine &TM;
47   const function_ref<TargetTransformInfo &(Function &)> LookupTTI;
48 
49   /// If this is true, assume it's preferably to leave memory intrinsic calls
50   /// for replacement with a library call later. Otherwise this depends on
51   /// TargetLoweringInfo availability of the corresponding function.
52   const bool UseMemIntrinsicLibFunc;
53 
54   explicit PreISelIntrinsicLowering(
55       const TargetMachine &TM_,
56       function_ref<TargetTransformInfo &(Function &)> LookupTTI_,
57       bool UseMemIntrinsicLibFunc_ = true)
58       : TM(TM_), LookupTTI(LookupTTI_),
59         UseMemIntrinsicLibFunc(UseMemIntrinsicLibFunc_) {}
60 
61   static bool shouldExpandMemIntrinsicWithSize(Value *Size,
62                                                const TargetTransformInfo &TTI);
63   bool expandMemIntrinsicUses(Function &F) const;
64   bool lowerIntrinsics(Module &M) const;
65 };
66 
67 } // namespace
68 
69 static bool lowerLoadRelative(Function &F) {
70   if (F.use_empty())
71     return false;
72 
73   bool Changed = false;
74   Type *Int32Ty = Type::getInt32Ty(F.getContext());
75   Type *Int32PtrTy = Int32Ty->getPointerTo();
76   Type *Int8Ty = Type::getInt8Ty(F.getContext());
77 
78   for (Use &U : llvm::make_early_inc_range(F.uses())) {
79     auto CI = dyn_cast<CallInst>(U.getUser());
80     if (!CI || CI->getCalledOperand() != &F)
81       continue;
82 
83     IRBuilder<> B(CI);
84     Value *OffsetPtr =
85         B.CreateGEP(Int8Ty, CI->getArgOperand(0), CI->getArgOperand(1));
86     Value *OffsetPtrI32 = B.CreateBitCast(OffsetPtr, Int32PtrTy);
87     Value *OffsetI32 = B.CreateAlignedLoad(Int32Ty, OffsetPtrI32, Align(4));
88 
89     Value *ResultPtr = B.CreateGEP(Int8Ty, CI->getArgOperand(0), OffsetI32);
90 
91     CI->replaceAllUsesWith(ResultPtr);
92     CI->eraseFromParent();
93     Changed = true;
94   }
95 
96   return Changed;
97 }
98 
99 // ObjCARC has knowledge about whether an obj-c runtime function needs to be
100 // always tail-called or never tail-called.
101 static CallInst::TailCallKind getOverridingTailCallKind(const Function &F) {
102   objcarc::ARCInstKind Kind = objcarc::GetFunctionClass(&F);
103   if (objcarc::IsAlwaysTail(Kind))
104     return CallInst::TCK_Tail;
105   else if (objcarc::IsNeverTail(Kind))
106     return CallInst::TCK_NoTail;
107   return CallInst::TCK_None;
108 }
109 
110 static bool lowerObjCCall(Function &F, const char *NewFn,
111                           bool setNonLazyBind = false) {
112   assert(IntrinsicInst::mayLowerToFunctionCall(F.getIntrinsicID()) &&
113          "Pre-ISel intrinsics do lower into regular function calls");
114   if (F.use_empty())
115     return false;
116 
117   // If we haven't already looked up this function, check to see if the
118   // program already contains a function with this name.
119   Module *M = F.getParent();
120   FunctionCallee FCache = M->getOrInsertFunction(NewFn, F.getFunctionType());
121 
122   if (Function *Fn = dyn_cast<Function>(FCache.getCallee())) {
123     Fn->setLinkage(F.getLinkage());
124     if (setNonLazyBind && !Fn->isWeakForLinker()) {
125       // If we have Native ARC, set nonlazybind attribute for these APIs for
126       // performance.
127       Fn->addFnAttr(Attribute::NonLazyBind);
128     }
129   }
130 
131   CallInst::TailCallKind OverridingTCK = getOverridingTailCallKind(F);
132 
133   for (Use &U : llvm::make_early_inc_range(F.uses())) {
134     auto *CB = cast<CallBase>(U.getUser());
135 
136     if (CB->getCalledFunction() != &F) {
137       objcarc::ARCInstKind Kind = objcarc::getAttachedARCFunctionKind(CB);
138       (void)Kind;
139       assert((Kind == objcarc::ARCInstKind::RetainRV ||
140               Kind == objcarc::ARCInstKind::UnsafeClaimRV) &&
141              "use expected to be the argument of operand bundle "
142              "\"clang.arc.attachedcall\"");
143       U.set(FCache.getCallee());
144       continue;
145     }
146 
147     auto *CI = cast<CallInst>(CB);
148     assert(CI->getCalledFunction() && "Cannot lower an indirect call!");
149 
150     IRBuilder<> Builder(CI->getParent(), CI->getIterator());
151     SmallVector<Value *, 8> Args(CI->args());
152     SmallVector<llvm::OperandBundleDef, 1> BundleList;
153     CI->getOperandBundlesAsDefs(BundleList);
154     CallInst *NewCI = Builder.CreateCall(FCache, Args, BundleList);
155     NewCI->setName(CI->getName());
156 
157     // Try to set the most appropriate TailCallKind based on both the current
158     // attributes and the ones that we could get from ObjCARC's special
159     // knowledge of the runtime functions.
160     //
161     // std::max respects both requirements of notail and tail here:
162     // * notail on either the call or from ObjCARC becomes notail
163     // * tail on either side is stronger than none, but not notail
164     CallInst::TailCallKind TCK = CI->getTailCallKind();
165     NewCI->setTailCallKind(std::max(TCK, OverridingTCK));
166 
167     if (!CI->use_empty())
168       CI->replaceAllUsesWith(NewCI);
169     CI->eraseFromParent();
170   }
171 
172   return true;
173 }
174 
175 // TODO: Should refine based on estimated number of accesses (e.g. does it
176 // require splitting based on alignment)
177 bool PreISelIntrinsicLowering::shouldExpandMemIntrinsicWithSize(
178     Value *Size, const TargetTransformInfo &TTI) {
179   ConstantInt *CI = dyn_cast<ConstantInt>(Size);
180   if (!CI)
181     return true;
182   uint64_t Threshold = MemIntrinsicExpandSizeThresholdOpt.getNumOccurrences()
183                            ? MemIntrinsicExpandSizeThresholdOpt
184                            : TTI.getMaxMemIntrinsicInlineSizeThreshold();
185   uint64_t SizeVal = CI->getZExtValue();
186 
187   // Treat a threshold of 0 as a special case to force expansion of all
188   // intrinsics, including size 0.
189   return SizeVal > Threshold || Threshold == 0;
190 }
191 
192 static bool canEmitLibcall(const TargetMachine &TM, Function *F,
193                            RTLIB::Libcall LC) {
194   // TODO: Should this consider the address space of the memcpy?
195   const TargetLowering *TLI = TM.getSubtargetImpl(*F)->getTargetLowering();
196   return TLI->getLibcallName(LC) != nullptr;
197 }
198 
199 // TODO: Handle atomic memcpy and memcpy.inline
200 // TODO: Pass ScalarEvolution
201 bool PreISelIntrinsicLowering::expandMemIntrinsicUses(Function &F) const {
202   Intrinsic::ID ID = F.getIntrinsicID();
203   bool Changed = false;
204 
205   for (User *U : llvm::make_early_inc_range(F.users())) {
206     Instruction *Inst = cast<Instruction>(U);
207 
208     switch (ID) {
209     case Intrinsic::memcpy: {
210       auto *Memcpy = cast<MemCpyInst>(Inst);
211       Function *ParentFunc = Memcpy->getFunction();
212       const TargetTransformInfo &TTI = LookupTTI(*ParentFunc);
213       if (shouldExpandMemIntrinsicWithSize(Memcpy->getLength(), TTI)) {
214         if (UseMemIntrinsicLibFunc &&
215             canEmitLibcall(TM, ParentFunc, RTLIB::MEMCPY))
216           break;
217 
218         // TODO: For optsize, emit the loop into a separate function
219         expandMemCpyAsLoop(Memcpy, TTI);
220         Changed = true;
221         Memcpy->eraseFromParent();
222       }
223 
224       break;
225     }
226     case Intrinsic::memmove: {
227       auto *Memmove = cast<MemMoveInst>(Inst);
228       Function *ParentFunc = Memmove->getFunction();
229       const TargetTransformInfo &TTI = LookupTTI(*ParentFunc);
230       if (shouldExpandMemIntrinsicWithSize(Memmove->getLength(), TTI)) {
231         if (UseMemIntrinsicLibFunc &&
232             canEmitLibcall(TM, ParentFunc, RTLIB::MEMMOVE))
233           break;
234 
235         if (expandMemMoveAsLoop(Memmove, TTI)) {
236           Changed = true;
237           Memmove->eraseFromParent();
238         }
239       }
240 
241       break;
242     }
243     case Intrinsic::memset: {
244       auto *Memset = cast<MemSetInst>(Inst);
245       Function *ParentFunc = Memset->getFunction();
246       const TargetTransformInfo &TTI = LookupTTI(*ParentFunc);
247       if (shouldExpandMemIntrinsicWithSize(Memset->getLength(), TTI)) {
248         if (UseMemIntrinsicLibFunc &&
249             canEmitLibcall(TM, ParentFunc, RTLIB::MEMSET))
250           break;
251 
252         expandMemSetAsLoop(Memset);
253         Changed = true;
254         Memset->eraseFromParent();
255       }
256 
257       break;
258     }
259     default:
260       llvm_unreachable("unhandled intrinsic");
261     }
262   }
263 
264   return Changed;
265 }
266 
267 bool PreISelIntrinsicLowering::lowerIntrinsics(Module &M) const {
268   bool Changed = false;
269   for (Function &F : M) {
270     switch (F.getIntrinsicID()) {
271     default:
272       break;
273     case Intrinsic::memcpy:
274     case Intrinsic::memmove:
275     case Intrinsic::memset:
276       Changed |= expandMemIntrinsicUses(F);
277       break;
278     case Intrinsic::load_relative:
279       Changed |= lowerLoadRelative(F);
280       break;
281     case Intrinsic::objc_autorelease:
282       Changed |= lowerObjCCall(F, "objc_autorelease");
283       break;
284     case Intrinsic::objc_autoreleasePoolPop:
285       Changed |= lowerObjCCall(F, "objc_autoreleasePoolPop");
286       break;
287     case Intrinsic::objc_autoreleasePoolPush:
288       Changed |= lowerObjCCall(F, "objc_autoreleasePoolPush");
289       break;
290     case Intrinsic::objc_autoreleaseReturnValue:
291       Changed |= lowerObjCCall(F, "objc_autoreleaseReturnValue");
292       break;
293     case Intrinsic::objc_copyWeak:
294       Changed |= lowerObjCCall(F, "objc_copyWeak");
295       break;
296     case Intrinsic::objc_destroyWeak:
297       Changed |= lowerObjCCall(F, "objc_destroyWeak");
298       break;
299     case Intrinsic::objc_initWeak:
300       Changed |= lowerObjCCall(F, "objc_initWeak");
301       break;
302     case Intrinsic::objc_loadWeak:
303       Changed |= lowerObjCCall(F, "objc_loadWeak");
304       break;
305     case Intrinsic::objc_loadWeakRetained:
306       Changed |= lowerObjCCall(F, "objc_loadWeakRetained");
307       break;
308     case Intrinsic::objc_moveWeak:
309       Changed |= lowerObjCCall(F, "objc_moveWeak");
310       break;
311     case Intrinsic::objc_release:
312       Changed |= lowerObjCCall(F, "objc_release", true);
313       break;
314     case Intrinsic::objc_retain:
315       Changed |= lowerObjCCall(F, "objc_retain", true);
316       break;
317     case Intrinsic::objc_retainAutorelease:
318       Changed |= lowerObjCCall(F, "objc_retainAutorelease");
319       break;
320     case Intrinsic::objc_retainAutoreleaseReturnValue:
321       Changed |= lowerObjCCall(F, "objc_retainAutoreleaseReturnValue");
322       break;
323     case Intrinsic::objc_retainAutoreleasedReturnValue:
324       Changed |= lowerObjCCall(F, "objc_retainAutoreleasedReturnValue");
325       break;
326     case Intrinsic::objc_retainBlock:
327       Changed |= lowerObjCCall(F, "objc_retainBlock");
328       break;
329     case Intrinsic::objc_storeStrong:
330       Changed |= lowerObjCCall(F, "objc_storeStrong");
331       break;
332     case Intrinsic::objc_storeWeak:
333       Changed |= lowerObjCCall(F, "objc_storeWeak");
334       break;
335     case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
336       Changed |= lowerObjCCall(F, "objc_unsafeClaimAutoreleasedReturnValue");
337       break;
338     case Intrinsic::objc_retainedObject:
339       Changed |= lowerObjCCall(F, "objc_retainedObject");
340       break;
341     case Intrinsic::objc_unretainedObject:
342       Changed |= lowerObjCCall(F, "objc_unretainedObject");
343       break;
344     case Intrinsic::objc_unretainedPointer:
345       Changed |= lowerObjCCall(F, "objc_unretainedPointer");
346       break;
347     case Intrinsic::objc_retain_autorelease:
348       Changed |= lowerObjCCall(F, "objc_retain_autorelease");
349       break;
350     case Intrinsic::objc_sync_enter:
351       Changed |= lowerObjCCall(F, "objc_sync_enter");
352       break;
353     case Intrinsic::objc_sync_exit:
354       Changed |= lowerObjCCall(F, "objc_sync_exit");
355       break;
356     }
357   }
358   return Changed;
359 }
360 
361 namespace {
362 
363 class PreISelIntrinsicLoweringLegacyPass : public ModulePass {
364 public:
365   static char ID;
366 
367   PreISelIntrinsicLoweringLegacyPass() : ModulePass(ID) {}
368 
369   void getAnalysisUsage(AnalysisUsage &AU) const override {
370     AU.addRequired<TargetTransformInfoWrapperPass>();
371     AU.addRequired<TargetPassConfig>();
372   }
373 
374   bool runOnModule(Module &M) override {
375     auto LookupTTI = [this](Function &F) -> TargetTransformInfo & {
376       return this->getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
377     };
378 
379     const auto &TM = getAnalysis<TargetPassConfig>().getTM<TargetMachine>();
380     PreISelIntrinsicLowering Lowering(TM, LookupTTI);
381     return Lowering.lowerIntrinsics(M);
382   }
383 };
384 
385 } // end anonymous namespace
386 
387 char PreISelIntrinsicLoweringLegacyPass::ID;
388 
389 INITIALIZE_PASS_BEGIN(PreISelIntrinsicLoweringLegacyPass,
390                       "pre-isel-intrinsic-lowering",
391                       "Pre-ISel Intrinsic Lowering", false, false)
392 INITIALIZE_PASS_DEPENDENCY(TargetPassConfig)
393 INITIALIZE_PASS_DEPENDENCY(TargetTransformInfoWrapperPass)
394 INITIALIZE_PASS_END(PreISelIntrinsicLoweringLegacyPass,
395                     "pre-isel-intrinsic-lowering",
396                     "Pre-ISel Intrinsic Lowering", false, false)
397 
398 ModulePass *llvm::createPreISelIntrinsicLoweringPass() {
399   return new PreISelIntrinsicLoweringLegacyPass();
400 }
401 
402 PreservedAnalyses PreISelIntrinsicLoweringPass::run(Module &M,
403                                                     ModuleAnalysisManager &AM) {
404   auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
405 
406   auto LookupTTI = [&FAM](Function &F) -> TargetTransformInfo & {
407     return FAM.getResult<TargetIRAnalysis>(F);
408   };
409 
410   PreISelIntrinsicLowering Lowering(TM, LookupTTI);
411   if (!Lowering.lowerIntrinsics(M))
412     return PreservedAnalyses::all();
413   else
414     return PreservedAnalyses::none();
415 }
416