xref: /freebsd/contrib/llvm-project/llvm/lib/Transforms/Coroutines/CoroInternal.h (revision 0fca6ea1d4eea4c934cfff25ac9ee8ad6fe95583)
1 //===- CoroInternal.h - Internal Coroutine interfaces ---------*- C++ -*---===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 // Common definitions/declarations used internally by coroutine lowering passes.
9 //===----------------------------------------------------------------------===//
10 
11 #ifndef LLVM_LIB_TRANSFORMS_COROUTINES_COROINTERNAL_H
12 #define LLVM_LIB_TRANSFORMS_COROUTINES_COROINTERNAL_H
13 
14 #include "CoroInstr.h"
15 #include "llvm/Analysis/TargetTransformInfo.h"
16 #include "llvm/IR/IRBuilder.h"
17 
18 namespace llvm {
19 
20 class CallGraph;
21 
22 namespace coro {
23 
24 bool declaresAnyIntrinsic(const Module &M);
25 bool declaresIntrinsics(const Module &M,
26                         const std::initializer_list<StringRef>);
27 void replaceCoroFree(CoroIdInst *CoroId, bool Elide);
28 
29 /// Attempts to rewrite the location operand of debug intrinsics in terms of
30 /// the coroutine frame pointer, folding pointer offsets into the DIExpression
31 /// of the intrinsic.
32 /// If the frame pointer is an Argument, store it into an alloca if
33 /// OptimizeFrame is false.
34 void salvageDebugInfo(
35     SmallDenseMap<Argument *, AllocaInst *, 4> &ArgToAllocaMap,
36     DbgVariableIntrinsic &DVI, bool OptimizeFrame, bool IsEntryPoint);
37 void salvageDebugInfo(
38     SmallDenseMap<Argument *, AllocaInst *, 4> &ArgToAllocaMap,
39     DbgVariableRecord &DVR, bool OptimizeFrame, bool UseEntryValue);
40 
41 // Keeps data and helper functions for lowering coroutine intrinsics.
42 struct LowererBase {
43   Module &TheModule;
44   LLVMContext &Context;
45   PointerType *const Int8Ptr;
46   FunctionType *const ResumeFnType;
47   ConstantPointerNull *const NullPtr;
48 
49   LowererBase(Module &M);
50   CallInst *makeSubFnCall(Value *Arg, int Index, Instruction *InsertPt);
51 };
52 
53 enum class ABI {
54   /// The "resume-switch" lowering, where there are separate resume and
55   /// destroy functions that are shared between all suspend points.  The
56   /// coroutine frame implicitly stores the resume and destroy functions,
57   /// the current index, and any promise value.
58   Switch,
59 
60   /// The "returned-continuation" lowering, where each suspend point creates a
61   /// single continuation function that is used for both resuming and
62   /// destroying.  Does not support promises.
63   Retcon,
64 
65   /// The "unique returned-continuation" lowering, where each suspend point
66   /// creates a single continuation function that is used for both resuming
67   /// and destroying.  Does not support promises.  The function is known to
68   /// suspend at most once during its execution, and the return value of
69   /// the continuation is void.
70   RetconOnce,
71 
72   /// The "async continuation" lowering, where each suspend point creates a
73   /// single continuation function. The continuation function is available as an
74   /// intrinsic.
75   Async,
76 };
77 
78 // Holds structural Coroutine Intrinsics for a particular function and other
79 // values used during CoroSplit pass.
80 struct LLVM_LIBRARY_VISIBILITY Shape {
81   CoroBeginInst *CoroBegin;
82   SmallVector<AnyCoroEndInst *, 4> CoroEnds;
83   SmallVector<CoroSizeInst *, 2> CoroSizes;
84   SmallVector<CoroAlignInst *, 2> CoroAligns;
85   SmallVector<AnyCoroSuspendInst *, 4> CoroSuspends;
86   SmallVector<CallInst*, 2> SwiftErrorOps;
87   SmallVector<CoroAwaitSuspendInst *, 4> CoroAwaitSuspends;
88   SmallVector<CallInst *, 2> SymmetricTransfers;
89 
90   // Field indexes for special fields in the switch lowering.
91   struct SwitchFieldIndex {
92     enum {
93       Resume,
94       Destroy
95 
96       // The promise field is always at a fixed offset from the start of
97       // frame given its type, but the index isn't a constant for all
98       // possible frames.
99 
100       // The switch-index field isn't at a fixed offset or index, either;
101       // we just work it in where it fits best.
102     };
103   };
104 
105   coro::ABI ABI;
106 
107   StructType *FrameTy;
108   Align FrameAlign;
109   uint64_t FrameSize;
110   Value *FramePtr;
111   BasicBlock *AllocaSpillBlock;
112 
113   /// This would only be true if optimization are enabled.
114   bool OptimizeFrame;
115 
116   struct SwitchLoweringStorage {
117     SwitchInst *ResumeSwitch;
118     AllocaInst *PromiseAlloca;
119     BasicBlock *ResumeEntryBlock;
120     unsigned IndexField;
121     unsigned IndexAlign;
122     unsigned IndexOffset;
123     bool HasFinalSuspend;
124     bool HasUnwindCoroEnd;
125   };
126 
127   struct RetconLoweringStorage {
128     Function *ResumePrototype;
129     Function *Alloc;
130     Function *Dealloc;
131     BasicBlock *ReturnBlock;
132     bool IsFrameInlineInStorage;
133   };
134 
135   struct AsyncLoweringStorage {
136     Value *Context;
137     CallingConv::ID AsyncCC;
138     unsigned ContextArgNo;
139     uint64_t ContextHeaderSize;
140     uint64_t ContextAlignment;
141     uint64_t FrameOffset; // Start of the frame.
142     uint64_t ContextSize; // Includes frame size.
143     GlobalVariable *AsyncFuncPointer;
144 
getContextAlignmentShape::AsyncLoweringStorage145     Align getContextAlignment() const { return Align(ContextAlignment); }
146   };
147 
148   union {
149     SwitchLoweringStorage SwitchLowering;
150     RetconLoweringStorage RetconLowering;
151     AsyncLoweringStorage AsyncLowering;
152   };
153 
getSwitchCoroIdShape154   CoroIdInst *getSwitchCoroId() const {
155     assert(ABI == coro::ABI::Switch);
156     return cast<CoroIdInst>(CoroBegin->getId());
157   }
158 
getRetconCoroIdShape159   AnyCoroIdRetconInst *getRetconCoroId() const {
160     assert(ABI == coro::ABI::Retcon ||
161            ABI == coro::ABI::RetconOnce);
162     return cast<AnyCoroIdRetconInst>(CoroBegin->getId());
163   }
164 
getAsyncCoroIdShape165   CoroIdAsyncInst *getAsyncCoroId() const {
166     assert(ABI == coro::ABI::Async);
167     return cast<CoroIdAsyncInst>(CoroBegin->getId());
168   }
169 
getSwitchIndexFieldShape170   unsigned getSwitchIndexField() const {
171     assert(ABI == coro::ABI::Switch);
172     assert(FrameTy && "frame type not assigned");
173     return SwitchLowering.IndexField;
174   }
getIndexTypeShape175   IntegerType *getIndexType() const {
176     assert(ABI == coro::ABI::Switch);
177     assert(FrameTy && "frame type not assigned");
178     return cast<IntegerType>(FrameTy->getElementType(getSwitchIndexField()));
179   }
getIndexShape180   ConstantInt *getIndex(uint64_t Value) const {
181     return ConstantInt::get(getIndexType(), Value);
182   }
183 
getSwitchResumePointerTypeShape184   PointerType *getSwitchResumePointerType() const {
185     assert(ABI == coro::ABI::Switch);
186   assert(FrameTy && "frame type not assigned");
187   return cast<PointerType>(FrameTy->getElementType(SwitchFieldIndex::Resume));
188   }
189 
getResumeFunctionTypeShape190   FunctionType *getResumeFunctionType() const {
191     switch (ABI) {
192     case coro::ABI::Switch:
193       return FunctionType::get(Type::getVoidTy(FrameTy->getContext()),
194                                PointerType::getUnqual(FrameTy->getContext()),
195                                /*IsVarArg=*/false);
196     case coro::ABI::Retcon:
197     case coro::ABI::RetconOnce:
198       return RetconLowering.ResumePrototype->getFunctionType();
199     case coro::ABI::Async:
200       // Not used. The function type depends on the active suspend.
201       return nullptr;
202     }
203 
204     llvm_unreachable("Unknown coro::ABI enum");
205   }
206 
getRetconResultTypesShape207   ArrayRef<Type*> getRetconResultTypes() const {
208     assert(ABI == coro::ABI::Retcon ||
209            ABI == coro::ABI::RetconOnce);
210     auto FTy = CoroBegin->getFunction()->getFunctionType();
211 
212     // The safety of all this is checked by checkWFRetconPrototype.
213     if (auto STy = dyn_cast<StructType>(FTy->getReturnType())) {
214       return STy->elements().slice(1);
215     } else {
216       return ArrayRef<Type*>();
217     }
218   }
219 
getRetconResumeTypesShape220   ArrayRef<Type*> getRetconResumeTypes() const {
221     assert(ABI == coro::ABI::Retcon ||
222            ABI == coro::ABI::RetconOnce);
223 
224     // The safety of all this is checked by checkWFRetconPrototype.
225     auto FTy = RetconLowering.ResumePrototype->getFunctionType();
226     return FTy->params().slice(1);
227   }
228 
getResumeFunctionCCShape229   CallingConv::ID getResumeFunctionCC() const {
230     switch (ABI) {
231     case coro::ABI::Switch:
232       return CallingConv::Fast;
233 
234     case coro::ABI::Retcon:
235     case coro::ABI::RetconOnce:
236       return RetconLowering.ResumePrototype->getCallingConv();
237     case coro::ABI::Async:
238       return AsyncLowering.AsyncCC;
239     }
240     llvm_unreachable("Unknown coro::ABI enum");
241   }
242 
getPromiseAllocaShape243   AllocaInst *getPromiseAlloca() const {
244     if (ABI == coro::ABI::Switch)
245       return SwitchLowering.PromiseAlloca;
246     return nullptr;
247   }
248 
getInsertPtAfterFramePtrShape249   BasicBlock::iterator getInsertPtAfterFramePtr() const {
250     if (auto *I = dyn_cast<Instruction>(FramePtr)) {
251       BasicBlock::iterator It = std::next(I->getIterator());
252       It.setHeadBit(true); // Copy pre-RemoveDIs behaviour.
253       return It;
254     }
255     return cast<Argument>(FramePtr)->getParent()->getEntryBlock().begin();
256   }
257 
258   /// Allocate memory according to the rules of the active lowering.
259   ///
260   /// \param CG - if non-null, will be updated for the new call
261   Value *emitAlloc(IRBuilder<> &Builder, Value *Size, CallGraph *CG) const;
262 
263   /// Deallocate memory according to the rules of the active lowering.
264   ///
265   /// \param CG - if non-null, will be updated for the new call
266   void emitDealloc(IRBuilder<> &Builder, Value *Ptr, CallGraph *CG) const;
267 
268   Shape() = default;
269   explicit Shape(Function &F, bool OptimizeFrame = false)
OptimizeFrameShape270       : OptimizeFrame(OptimizeFrame) {
271     buildFrom(F);
272   }
273   void buildFrom(Function &F);
274 };
275 
276 bool defaultMaterializable(Instruction &V);
277 void buildCoroutineFrame(
278     Function &F, Shape &Shape, TargetTransformInfo &TTI,
279     const std::function<bool(Instruction &)> &MaterializableCallback);
280 CallInst *createMustTailCall(DebugLoc Loc, Function *MustTailCallFn,
281                              TargetTransformInfo &TTI,
282                              ArrayRef<Value *> Arguments, IRBuilder<> &);
283 } // End namespace coro.
284 } // End namespace llvm
285 
286 #endif
287