xref: /freebsd/contrib/llvm-project/llvm/lib/CodeGen/PreISelIntrinsicLowering.cpp (revision 06c3fb2749bda94cb5201f81ffdb8fa6c3161b2e)
1 //===- PreISelIntrinsicLowering.cpp - Pre-ISel intrinsic lowering pass ----===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This pass implements IR lowering for the llvm.memcpy, llvm.memmove,
10 // llvm.memset, llvm.load.relative and llvm.objc.* intrinsics.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "llvm/CodeGen/PreISelIntrinsicLowering.h"
15 #include "llvm/Analysis/ObjCARCInstKind.h"
16 #include "llvm/Analysis/ObjCARCUtil.h"
17 #include "llvm/Analysis/TargetLibraryInfo.h"
18 #include "llvm/Analysis/TargetTransformInfo.h"
19 #include "llvm/CodeGen/Passes.h"
20 #include "llvm/IR/Function.h"
21 #include "llvm/IR/IRBuilder.h"
22 #include "llvm/IR/Instructions.h"
23 #include "llvm/IR/IntrinsicInst.h"
24 #include "llvm/IR/Module.h"
25 #include "llvm/IR/Type.h"
26 #include "llvm/InitializePasses.h"
27 #include "llvm/Pass.h"
28 #include "llvm/Support/Casting.h"
29 #include "llvm/Transforms/Utils/LowerMemIntrinsics.h"
30 
31 using namespace llvm;
32 
33 /// Threshold to leave statically sized memory intrinsic calls. Calls of known
34 /// size larger than this will be expanded by the pass. Calls of unknown or
35 /// lower size will be left for expansion in codegen.
36 static cl::opt<int64_t> MemIntrinsicExpandSizeThresholdOpt(
37     "mem-intrinsic-expand-size",
38     cl::desc("Set minimum mem intrinsic size to expand in IR"), cl::init(-1),
39     cl::Hidden);
40 
41 namespace {
42 
43 struct PreISelIntrinsicLowering {
44   const function_ref<TargetTransformInfo &(Function &)> LookupTTI;
45   const function_ref<TargetLibraryInfo &(Function &)> LookupLibInfo;
46 
47   /// If this is true, assume it's preferably to leave memory intrinsic calls
48   /// for replacement with a library call later. Otherwise this depends on
49   /// TargetLibraryInfo availability of the corresponding function.
50   const bool UseMemIntrinsicLibFunc;
51 
52   explicit PreISelIntrinsicLowering(
53       function_ref<TargetTransformInfo &(Function &)> LookupTTI_,
54       function_ref<TargetLibraryInfo &(Function &)> LookupLibInfo_,
55       bool UseMemIntrinsicLibFunc_ = true)
56       : LookupTTI(LookupTTI_), LookupLibInfo(LookupLibInfo_),
57         UseMemIntrinsicLibFunc(UseMemIntrinsicLibFunc_) {}
58 
59   static bool shouldExpandMemIntrinsicWithSize(Value *Size,
60                                                const TargetTransformInfo &TTI);
61   bool expandMemIntrinsicUses(Function &F) const;
62   bool lowerIntrinsics(Module &M) const;
63 };
64 
65 } // namespace
66 
67 static bool lowerLoadRelative(Function &F) {
68   if (F.use_empty())
69     return false;
70 
71   bool Changed = false;
72   Type *Int32Ty = Type::getInt32Ty(F.getContext());
73   Type *Int32PtrTy = Int32Ty->getPointerTo();
74   Type *Int8Ty = Type::getInt8Ty(F.getContext());
75 
76   for (Use &U : llvm::make_early_inc_range(F.uses())) {
77     auto CI = dyn_cast<CallInst>(U.getUser());
78     if (!CI || CI->getCalledOperand() != &F)
79       continue;
80 
81     IRBuilder<> B(CI);
82     Value *OffsetPtr =
83         B.CreateGEP(Int8Ty, CI->getArgOperand(0), CI->getArgOperand(1));
84     Value *OffsetPtrI32 = B.CreateBitCast(OffsetPtr, Int32PtrTy);
85     Value *OffsetI32 = B.CreateAlignedLoad(Int32Ty, OffsetPtrI32, Align(4));
86 
87     Value *ResultPtr = B.CreateGEP(Int8Ty, CI->getArgOperand(0), OffsetI32);
88 
89     CI->replaceAllUsesWith(ResultPtr);
90     CI->eraseFromParent();
91     Changed = true;
92   }
93 
94   return Changed;
95 }
96 
97 // ObjCARC has knowledge about whether an obj-c runtime function needs to be
98 // always tail-called or never tail-called.
99 static CallInst::TailCallKind getOverridingTailCallKind(const Function &F) {
100   objcarc::ARCInstKind Kind = objcarc::GetFunctionClass(&F);
101   if (objcarc::IsAlwaysTail(Kind))
102     return CallInst::TCK_Tail;
103   else if (objcarc::IsNeverTail(Kind))
104     return CallInst::TCK_NoTail;
105   return CallInst::TCK_None;
106 }
107 
108 static bool lowerObjCCall(Function &F, const char *NewFn,
109                           bool setNonLazyBind = false) {
110   assert(IntrinsicInst::mayLowerToFunctionCall(F.getIntrinsicID()) &&
111          "Pre-ISel intrinsics do lower into regular function calls");
112   if (F.use_empty())
113     return false;
114 
115   // If we haven't already looked up this function, check to see if the
116   // program already contains a function with this name.
117   Module *M = F.getParent();
118   FunctionCallee FCache = M->getOrInsertFunction(NewFn, F.getFunctionType());
119 
120   if (Function *Fn = dyn_cast<Function>(FCache.getCallee())) {
121     Fn->setLinkage(F.getLinkage());
122     if (setNonLazyBind && !Fn->isWeakForLinker()) {
123       // If we have Native ARC, set nonlazybind attribute for these APIs for
124       // performance.
125       Fn->addFnAttr(Attribute::NonLazyBind);
126     }
127   }
128 
129   CallInst::TailCallKind OverridingTCK = getOverridingTailCallKind(F);
130 
131   for (Use &U : llvm::make_early_inc_range(F.uses())) {
132     auto *CB = cast<CallBase>(U.getUser());
133 
134     if (CB->getCalledFunction() != &F) {
135       objcarc::ARCInstKind Kind = objcarc::getAttachedARCFunctionKind(CB);
136       (void)Kind;
137       assert((Kind == objcarc::ARCInstKind::RetainRV ||
138               Kind == objcarc::ARCInstKind::UnsafeClaimRV) &&
139              "use expected to be the argument of operand bundle "
140              "\"clang.arc.attachedcall\"");
141       U.set(FCache.getCallee());
142       continue;
143     }
144 
145     auto *CI = cast<CallInst>(CB);
146     assert(CI->getCalledFunction() && "Cannot lower an indirect call!");
147 
148     IRBuilder<> Builder(CI->getParent(), CI->getIterator());
149     SmallVector<Value *, 8> Args(CI->args());
150     SmallVector<llvm::OperandBundleDef, 1> BundleList;
151     CI->getOperandBundlesAsDefs(BundleList);
152     CallInst *NewCI = Builder.CreateCall(FCache, Args, BundleList);
153     NewCI->setName(CI->getName());
154 
155     // Try to set the most appropriate TailCallKind based on both the current
156     // attributes and the ones that we could get from ObjCARC's special
157     // knowledge of the runtime functions.
158     //
159     // std::max respects both requirements of notail and tail here:
160     // * notail on either the call or from ObjCARC becomes notail
161     // * tail on either side is stronger than none, but not notail
162     CallInst::TailCallKind TCK = CI->getTailCallKind();
163     NewCI->setTailCallKind(std::max(TCK, OverridingTCK));
164 
165     if (!CI->use_empty())
166       CI->replaceAllUsesWith(NewCI);
167     CI->eraseFromParent();
168   }
169 
170   return true;
171 }
172 
173 // TODO: Should refine based on estimated number of accesses (e.g. does it
174 // require splitting based on alignment)
175 bool PreISelIntrinsicLowering::shouldExpandMemIntrinsicWithSize(
176     Value *Size, const TargetTransformInfo &TTI) {
177   ConstantInt *CI = dyn_cast<ConstantInt>(Size);
178   if (!CI)
179     return true;
180   uint64_t Threshold = MemIntrinsicExpandSizeThresholdOpt.getNumOccurrences()
181                            ? MemIntrinsicExpandSizeThresholdOpt
182                            : TTI.getMaxMemIntrinsicInlineSizeThreshold();
183   uint64_t SizeVal = CI->getZExtValue();
184 
185   // Treat a threshold of 0 as a special case to force expansion of all
186   // intrinsics, including size 0.
187   return SizeVal > Threshold || Threshold == 0;
188 }
189 
190 // TODO: Handle atomic memcpy and memcpy.inline
191 // TODO: Pass ScalarEvolution
192 bool PreISelIntrinsicLowering::expandMemIntrinsicUses(Function &F) const {
193   Intrinsic::ID ID = F.getIntrinsicID();
194   bool Changed = false;
195 
196   for (User *U : llvm::make_early_inc_range(F.users())) {
197     Instruction *Inst = cast<Instruction>(U);
198 
199     switch (ID) {
200     case Intrinsic::memcpy: {
201       auto *Memcpy = cast<MemCpyInst>(Inst);
202       Function *ParentFunc = Memcpy->getFunction();
203       const TargetTransformInfo &TTI = LookupTTI(*ParentFunc);
204       if (shouldExpandMemIntrinsicWithSize(Memcpy->getLength(), TTI)) {
205         if (UseMemIntrinsicLibFunc &&
206             LookupLibInfo(*ParentFunc).has(LibFunc_memcpy))
207           break;
208 
209         expandMemCpyAsLoop(Memcpy, TTI);
210         Changed = true;
211         Memcpy->eraseFromParent();
212       }
213 
214       break;
215     }
216     case Intrinsic::memmove: {
217       auto *Memmove = cast<MemMoveInst>(Inst);
218       Function *ParentFunc = Memmove->getFunction();
219       const TargetTransformInfo &TTI = LookupTTI(*ParentFunc);
220       if (shouldExpandMemIntrinsicWithSize(Memmove->getLength(), TTI)) {
221         if (UseMemIntrinsicLibFunc &&
222             LookupLibInfo(*ParentFunc).has(LibFunc_memmove))
223           break;
224 
225         if (expandMemMoveAsLoop(Memmove, TTI)) {
226           Changed = true;
227           Memmove->eraseFromParent();
228         }
229       }
230 
231       break;
232     }
233     case Intrinsic::memset: {
234       auto *Memset = cast<MemSetInst>(Inst);
235       Function *ParentFunc = Memset->getFunction();
236       const TargetTransformInfo &TTI = LookupTTI(*ParentFunc);
237       if (shouldExpandMemIntrinsicWithSize(Memset->getLength(), TTI)) {
238         if (UseMemIntrinsicLibFunc &&
239             LookupLibInfo(*Memset->getFunction()).has(LibFunc_memset))
240           break;
241 
242         expandMemSetAsLoop(Memset);
243         Changed = true;
244         Memset->eraseFromParent();
245       }
246 
247       break;
248     }
249     default:
250       llvm_unreachable("unhandled intrinsic");
251     }
252   }
253 
254   return Changed;
255 }
256 
257 bool PreISelIntrinsicLowering::lowerIntrinsics(Module &M) const {
258   bool Changed = false;
259   for (Function &F : M) {
260     switch (F.getIntrinsicID()) {
261     default:
262       break;
263     case Intrinsic::memcpy:
264     case Intrinsic::memmove:
265     case Intrinsic::memset:
266       Changed |= expandMemIntrinsicUses(F);
267       break;
268     case Intrinsic::load_relative:
269       Changed |= lowerLoadRelative(F);
270       break;
271     case Intrinsic::objc_autorelease:
272       Changed |= lowerObjCCall(F, "objc_autorelease");
273       break;
274     case Intrinsic::objc_autoreleasePoolPop:
275       Changed |= lowerObjCCall(F, "objc_autoreleasePoolPop");
276       break;
277     case Intrinsic::objc_autoreleasePoolPush:
278       Changed |= lowerObjCCall(F, "objc_autoreleasePoolPush");
279       break;
280     case Intrinsic::objc_autoreleaseReturnValue:
281       Changed |= lowerObjCCall(F, "objc_autoreleaseReturnValue");
282       break;
283     case Intrinsic::objc_copyWeak:
284       Changed |= lowerObjCCall(F, "objc_copyWeak");
285       break;
286     case Intrinsic::objc_destroyWeak:
287       Changed |= lowerObjCCall(F, "objc_destroyWeak");
288       break;
289     case Intrinsic::objc_initWeak:
290       Changed |= lowerObjCCall(F, "objc_initWeak");
291       break;
292     case Intrinsic::objc_loadWeak:
293       Changed |= lowerObjCCall(F, "objc_loadWeak");
294       break;
295     case Intrinsic::objc_loadWeakRetained:
296       Changed |= lowerObjCCall(F, "objc_loadWeakRetained");
297       break;
298     case Intrinsic::objc_moveWeak:
299       Changed |= lowerObjCCall(F, "objc_moveWeak");
300       break;
301     case Intrinsic::objc_release:
302       Changed |= lowerObjCCall(F, "objc_release", true);
303       break;
304     case Intrinsic::objc_retain:
305       Changed |= lowerObjCCall(F, "objc_retain", true);
306       break;
307     case Intrinsic::objc_retainAutorelease:
308       Changed |= lowerObjCCall(F, "objc_retainAutorelease");
309       break;
310     case Intrinsic::objc_retainAutoreleaseReturnValue:
311       Changed |= lowerObjCCall(F, "objc_retainAutoreleaseReturnValue");
312       break;
313     case Intrinsic::objc_retainAutoreleasedReturnValue:
314       Changed |= lowerObjCCall(F, "objc_retainAutoreleasedReturnValue");
315       break;
316     case Intrinsic::objc_retainBlock:
317       Changed |= lowerObjCCall(F, "objc_retainBlock");
318       break;
319     case Intrinsic::objc_storeStrong:
320       Changed |= lowerObjCCall(F, "objc_storeStrong");
321       break;
322     case Intrinsic::objc_storeWeak:
323       Changed |= lowerObjCCall(F, "objc_storeWeak");
324       break;
325     case Intrinsic::objc_unsafeClaimAutoreleasedReturnValue:
326       Changed |= lowerObjCCall(F, "objc_unsafeClaimAutoreleasedReturnValue");
327       break;
328     case Intrinsic::objc_retainedObject:
329       Changed |= lowerObjCCall(F, "objc_retainedObject");
330       break;
331     case Intrinsic::objc_unretainedObject:
332       Changed |= lowerObjCCall(F, "objc_unretainedObject");
333       break;
334     case Intrinsic::objc_unretainedPointer:
335       Changed |= lowerObjCCall(F, "objc_unretainedPointer");
336       break;
337     case Intrinsic::objc_retain_autorelease:
338       Changed |= lowerObjCCall(F, "objc_retain_autorelease");
339       break;
340     case Intrinsic::objc_sync_enter:
341       Changed |= lowerObjCCall(F, "objc_sync_enter");
342       break;
343     case Intrinsic::objc_sync_exit:
344       Changed |= lowerObjCCall(F, "objc_sync_exit");
345       break;
346     }
347   }
348   return Changed;
349 }
350 
351 namespace {
352 
353 class PreISelIntrinsicLoweringLegacyPass : public ModulePass {
354 public:
355   static char ID;
356 
357   PreISelIntrinsicLoweringLegacyPass() : ModulePass(ID) {}
358 
359   void getAnalysisUsage(AnalysisUsage &AU) const override {
360     AU.addRequired<TargetLibraryInfoWrapperPass>();
361     AU.addRequired<TargetTransformInfoWrapperPass>();
362   }
363 
364   bool runOnModule(Module &M) override {
365     auto LookupTTI = [this](Function &F) -> TargetTransformInfo & {
366       return this->getAnalysis<TargetTransformInfoWrapperPass>().getTTI(F);
367     };
368 
369     auto LookupTLI = [this](Function &F) -> TargetLibraryInfo & {
370       return this->getAnalysis<TargetLibraryInfoWrapperPass>().getTLI(F);
371     };
372 
373     PreISelIntrinsicLowering Lowering(LookupTTI, LookupTLI);
374     return Lowering.lowerIntrinsics(M);
375   }
376 };
377 
378 } // end anonymous namespace
379 
380 char PreISelIntrinsicLoweringLegacyPass::ID;
381 
382 INITIALIZE_PASS(PreISelIntrinsicLoweringLegacyPass,
383                 "pre-isel-intrinsic-lowering", "Pre-ISel Intrinsic Lowering",
384                 false, false)
385 
386 ModulePass *llvm::createPreISelIntrinsicLoweringPass() {
387   return new PreISelIntrinsicLoweringLegacyPass;
388 }
389 
390 PreservedAnalyses PreISelIntrinsicLoweringPass::run(Module &M,
391                                                     ModuleAnalysisManager &AM) {
392   auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
393 
394   auto LookupTLI = [&FAM](Function &F) -> TargetLibraryInfo & {
395     return FAM.getResult<TargetLibraryAnalysis>(F);
396   };
397 
398   auto LookupTTI = [&FAM](Function &F) -> TargetTransformInfo & {
399     return FAM.getResult<TargetIRAnalysis>(F);
400   };
401 
402   PreISelIntrinsicLowering Lowering(LookupTTI, LookupTLI);
403   if (!Lowering.lowerIntrinsics(M))
404     return PreservedAnalyses::all();
405   else
406     return PreservedAnalyses::none();
407 }
408