xref: /freebsd/contrib/llvm-project/clang/lib/CodeGen/CGCoroutine.cpp (revision 6966ac055c3b7a39266fb982493330df7a097997)
1 //===----- CGCoroutine.cpp - Emit LLVM Code for C++ coroutines ------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This contains code dealing with C++ code generation of coroutines.
10 //
11 //===----------------------------------------------------------------------===//
12 
13 #include "CGCleanup.h"
14 #include "CodeGenFunction.h"
15 #include "llvm/ADT/ScopeExit.h"
16 #include "clang/AST/StmtCXX.h"
17 #include "clang/AST/StmtVisitor.h"
18 
19 using namespace clang;
20 using namespace CodeGen;
21 
22 using llvm::Value;
23 using llvm::BasicBlock;
24 
25 namespace {
26 enum class AwaitKind { Init, Normal, Yield, Final };
27 static constexpr llvm::StringLiteral AwaitKindStr[] = {"init", "await", "yield",
28                                                        "final"};
29 }
30 
31 struct clang::CodeGen::CGCoroData {
32   // What is the current await expression kind and how many
33   // await/yield expressions were encountered so far.
34   // These are used to generate pretty labels for await expressions in LLVM IR.
35   AwaitKind CurrentAwaitKind = AwaitKind::Init;
36   unsigned AwaitNum = 0;
37   unsigned YieldNum = 0;
38 
39   // How many co_return statements are in the coroutine. Used to decide whether
40   // we need to add co_return; equivalent at the end of the user authored body.
41   unsigned CoreturnCount = 0;
42 
43   // A branch to this block is emitted when coroutine needs to suspend.
44   llvm::BasicBlock *SuspendBB = nullptr;
45 
46   // The promise type's 'unhandled_exception' handler, if it defines one.
47   Stmt *ExceptionHandler = nullptr;
48 
49   // A temporary i1 alloca that stores whether 'await_resume' threw an
50   // exception. If it did, 'true' is stored in this variable, and the coroutine
51   // body must be skipped. If the promise type does not define an exception
52   // handler, this is null.
53   llvm::Value *ResumeEHVar = nullptr;
54 
55   // Stores the jump destination just before the coroutine memory is freed.
56   // This is the destination that every suspend point jumps to for the cleanup
57   // branch.
58   CodeGenFunction::JumpDest CleanupJD;
59 
60   // Stores the jump destination just before the final suspend. The co_return
61   // statements jumps to this point after calling return_xxx promise member.
62   CodeGenFunction::JumpDest FinalJD;
63 
64   // Stores the llvm.coro.id emitted in the function so that we can supply it
65   // as the first argument to coro.begin, coro.alloc and coro.free intrinsics.
66   // Note: llvm.coro.id returns a token that cannot be directly expressed in a
67   // builtin.
68   llvm::CallInst *CoroId = nullptr;
69 
70   // Stores the llvm.coro.begin emitted in the function so that we can replace
71   // all coro.frame intrinsics with direct SSA value of coro.begin that returns
72   // the address of the coroutine frame of the current coroutine.
73   llvm::CallInst *CoroBegin = nullptr;
74 
75   // Stores the last emitted coro.free for the deallocate expressions, we use it
76   // to wrap dealloc code with if(auto mem = coro.free) dealloc(mem).
77   llvm::CallInst *LastCoroFree = nullptr;
78 
79   // If coro.id came from the builtin, remember the expression to give better
80   // diagnostic. If CoroIdExpr is nullptr, the coro.id was created by
81   // EmitCoroutineBody.
82   CallExpr const *CoroIdExpr = nullptr;
83 };
84 
85 // Defining these here allows to keep CGCoroData private to this file.
86 clang::CodeGen::CodeGenFunction::CGCoroInfo::CGCoroInfo() {}
87 CodeGenFunction::CGCoroInfo::~CGCoroInfo() {}
88 
89 static void createCoroData(CodeGenFunction &CGF,
90                            CodeGenFunction::CGCoroInfo &CurCoro,
91                            llvm::CallInst *CoroId,
92                            CallExpr const *CoroIdExpr = nullptr) {
93   if (CurCoro.Data) {
94     if (CurCoro.Data->CoroIdExpr)
95       CGF.CGM.Error(CoroIdExpr->getBeginLoc(),
96                     "only one __builtin_coro_id can be used in a function");
97     else if (CoroIdExpr)
98       CGF.CGM.Error(CoroIdExpr->getBeginLoc(),
99                     "__builtin_coro_id shall not be used in a C++ coroutine");
100     else
101       llvm_unreachable("EmitCoroutineBodyStatement called twice?");
102 
103     return;
104   }
105 
106   CurCoro.Data = std::unique_ptr<CGCoroData>(new CGCoroData);
107   CurCoro.Data->CoroId = CoroId;
108   CurCoro.Data->CoroIdExpr = CoroIdExpr;
109 }
110 
111 // Synthesize a pretty name for a suspend point.
112 static SmallString<32> buildSuspendPrefixStr(CGCoroData &Coro, AwaitKind Kind) {
113   unsigned No = 0;
114   switch (Kind) {
115   case AwaitKind::Init:
116   case AwaitKind::Final:
117     break;
118   case AwaitKind::Normal:
119     No = ++Coro.AwaitNum;
120     break;
121   case AwaitKind::Yield:
122     No = ++Coro.YieldNum;
123     break;
124   }
125   SmallString<32> Prefix(AwaitKindStr[static_cast<unsigned>(Kind)]);
126   if (No > 1) {
127     Twine(No).toVector(Prefix);
128   }
129   return Prefix;
130 }
131 
132 static bool memberCallExpressionCanThrow(const Expr *E) {
133   if (const auto *CE = dyn_cast<CXXMemberCallExpr>(E))
134     if (const auto *Proto =
135             CE->getMethodDecl()->getType()->getAs<FunctionProtoType>())
136       if (isNoexceptExceptionSpec(Proto->getExceptionSpecType()) &&
137           Proto->canThrow() == CT_Cannot)
138         return false;
139   return true;
140 }
141 
142 // Emit suspend expression which roughly looks like:
143 //
144 //   auto && x = CommonExpr();
145 //   if (!x.await_ready()) {
146 //      llvm_coro_save();
147 //      x.await_suspend(...);     (*)
148 //      llvm_coro_suspend(); (**)
149 //   }
150 //   x.await_resume();
151 //
152 // where the result of the entire expression is the result of x.await_resume()
153 //
154 //   (*) If x.await_suspend return type is bool, it allows to veto a suspend:
155 //      if (x.await_suspend(...))
156 //        llvm_coro_suspend();
157 //
158 //  (**) llvm_coro_suspend() encodes three possible continuations as
159 //       a switch instruction:
160 //
161 //  %where-to = call i8 @llvm.coro.suspend(...)
162 //  switch i8 %where-to, label %coro.ret [ ; jump to epilogue to suspend
163 //    i8 0, label %yield.ready   ; go here when resumed
164 //    i8 1, label %yield.cleanup ; go here when destroyed
165 //  ]
166 //
167 //  See llvm's docs/Coroutines.rst for more details.
168 //
169 namespace {
170   struct LValueOrRValue {
171     LValue LV;
172     RValue RV;
173   };
174 }
175 static LValueOrRValue emitSuspendExpression(CodeGenFunction &CGF, CGCoroData &Coro,
176                                     CoroutineSuspendExpr const &S,
177                                     AwaitKind Kind, AggValueSlot aggSlot,
178                                     bool ignoreResult, bool forLValue) {
179   auto *E = S.getCommonExpr();
180 
181   auto Binder =
182       CodeGenFunction::OpaqueValueMappingData::bind(CGF, S.getOpaqueValue(), E);
183   auto UnbindOnExit = llvm::make_scope_exit([&] { Binder.unbind(CGF); });
184 
185   auto Prefix = buildSuspendPrefixStr(Coro, Kind);
186   BasicBlock *ReadyBlock = CGF.createBasicBlock(Prefix + Twine(".ready"));
187   BasicBlock *SuspendBlock = CGF.createBasicBlock(Prefix + Twine(".suspend"));
188   BasicBlock *CleanupBlock = CGF.createBasicBlock(Prefix + Twine(".cleanup"));
189 
190   // If expression is ready, no need to suspend.
191   CGF.EmitBranchOnBoolExpr(S.getReadyExpr(), ReadyBlock, SuspendBlock, 0);
192 
193   // Otherwise, emit suspend logic.
194   CGF.EmitBlock(SuspendBlock);
195 
196   auto &Builder = CGF.Builder;
197   llvm::Function *CoroSave = CGF.CGM.getIntrinsic(llvm::Intrinsic::coro_save);
198   auto *NullPtr = llvm::ConstantPointerNull::get(CGF.CGM.Int8PtrTy);
199   auto *SaveCall = Builder.CreateCall(CoroSave, {NullPtr});
200 
201   auto *SuspendRet = CGF.EmitScalarExpr(S.getSuspendExpr());
202   if (SuspendRet != nullptr && SuspendRet->getType()->isIntegerTy(1)) {
203     // Veto suspension if requested by bool returning await_suspend.
204     BasicBlock *RealSuspendBlock =
205         CGF.createBasicBlock(Prefix + Twine(".suspend.bool"));
206     CGF.Builder.CreateCondBr(SuspendRet, RealSuspendBlock, ReadyBlock);
207     CGF.EmitBlock(RealSuspendBlock);
208   }
209 
210   // Emit the suspend point.
211   const bool IsFinalSuspend = (Kind == AwaitKind::Final);
212   llvm::Function *CoroSuspend =
213       CGF.CGM.getIntrinsic(llvm::Intrinsic::coro_suspend);
214   auto *SuspendResult = Builder.CreateCall(
215       CoroSuspend, {SaveCall, Builder.getInt1(IsFinalSuspend)});
216 
217   // Create a switch capturing three possible continuations.
218   auto *Switch = Builder.CreateSwitch(SuspendResult, Coro.SuspendBB, 2);
219   Switch->addCase(Builder.getInt8(0), ReadyBlock);
220   Switch->addCase(Builder.getInt8(1), CleanupBlock);
221 
222   // Emit cleanup for this suspend point.
223   CGF.EmitBlock(CleanupBlock);
224   CGF.EmitBranchThroughCleanup(Coro.CleanupJD);
225 
226   // Emit await_resume expression.
227   CGF.EmitBlock(ReadyBlock);
228 
229   // Exception handling requires additional IR. If the 'await_resume' function
230   // is marked as 'noexcept', we avoid generating this additional IR.
231   CXXTryStmt *TryStmt = nullptr;
232   if (Coro.ExceptionHandler && Kind == AwaitKind::Init &&
233       memberCallExpressionCanThrow(S.getResumeExpr())) {
234     Coro.ResumeEHVar =
235         CGF.CreateTempAlloca(Builder.getInt1Ty(), Prefix + Twine("resume.eh"));
236     Builder.CreateFlagStore(true, Coro.ResumeEHVar);
237 
238     auto Loc = S.getResumeExpr()->getExprLoc();
239     auto *Catch = new (CGF.getContext())
240         CXXCatchStmt(Loc, /*exDecl=*/nullptr, Coro.ExceptionHandler);
241     auto *TryBody =
242         CompoundStmt::Create(CGF.getContext(), S.getResumeExpr(), Loc, Loc);
243     TryStmt = CXXTryStmt::Create(CGF.getContext(), Loc, TryBody, Catch);
244     CGF.EnterCXXTryStmt(*TryStmt);
245   }
246 
247   LValueOrRValue Res;
248   if (forLValue)
249     Res.LV = CGF.EmitLValue(S.getResumeExpr());
250   else
251     Res.RV = CGF.EmitAnyExpr(S.getResumeExpr(), aggSlot, ignoreResult);
252 
253   if (TryStmt) {
254     Builder.CreateFlagStore(false, Coro.ResumeEHVar);
255     CGF.ExitCXXTryStmt(*TryStmt);
256   }
257 
258   return Res;
259 }
260 
261 RValue CodeGenFunction::EmitCoawaitExpr(const CoawaitExpr &E,
262                                         AggValueSlot aggSlot,
263                                         bool ignoreResult) {
264   return emitSuspendExpression(*this, *CurCoro.Data, E,
265                                CurCoro.Data->CurrentAwaitKind, aggSlot,
266                                ignoreResult, /*forLValue*/false).RV;
267 }
268 RValue CodeGenFunction::EmitCoyieldExpr(const CoyieldExpr &E,
269                                         AggValueSlot aggSlot,
270                                         bool ignoreResult) {
271   return emitSuspendExpression(*this, *CurCoro.Data, E, AwaitKind::Yield,
272                                aggSlot, ignoreResult, /*forLValue*/false).RV;
273 }
274 
275 void CodeGenFunction::EmitCoreturnStmt(CoreturnStmt const &S) {
276   ++CurCoro.Data->CoreturnCount;
277   const Expr *RV = S.getOperand();
278   if (RV && RV->getType()->isVoidType()) {
279     // Make sure to evaluate the expression of a co_return with a void
280     // expression for side effects.
281     RunCleanupsScope cleanupScope(*this);
282     EmitIgnoredExpr(RV);
283   }
284   EmitStmt(S.getPromiseCall());
285   EmitBranchThroughCleanup(CurCoro.Data->FinalJD);
286 }
287 
288 
289 #ifndef NDEBUG
290 static QualType getCoroutineSuspendExprReturnType(const ASTContext &Ctx,
291   const CoroutineSuspendExpr *E) {
292   const auto *RE = E->getResumeExpr();
293   // Is it possible for RE to be a CXXBindTemporaryExpr wrapping
294   // a MemberCallExpr?
295   assert(isa<CallExpr>(RE) && "unexpected suspend expression type");
296   return cast<CallExpr>(RE)->getCallReturnType(Ctx);
297 }
298 #endif
299 
300 LValue
301 CodeGenFunction::EmitCoawaitLValue(const CoawaitExpr *E) {
302   assert(getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() &&
303          "Can't have a scalar return unless the return type is a "
304          "reference type!");
305   return emitSuspendExpression(*this, *CurCoro.Data, *E,
306                                CurCoro.Data->CurrentAwaitKind, AggValueSlot::ignored(),
307                                /*ignoreResult*/false, /*forLValue*/true).LV;
308 }
309 
310 LValue
311 CodeGenFunction::EmitCoyieldLValue(const CoyieldExpr *E) {
312   assert(getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() &&
313          "Can't have a scalar return unless the return type is a "
314          "reference type!");
315   return emitSuspendExpression(*this, *CurCoro.Data, *E,
316                                AwaitKind::Yield, AggValueSlot::ignored(),
317                                /*ignoreResult*/false, /*forLValue*/true).LV;
318 }
319 
320 // Hunts for the parameter reference in the parameter copy/move declaration.
321 namespace {
322 struct GetParamRef : public StmtVisitor<GetParamRef> {
323 public:
324   DeclRefExpr *Expr = nullptr;
325   GetParamRef() {}
326   void VisitDeclRefExpr(DeclRefExpr *E) {
327     assert(Expr == nullptr && "multilple declref in param move");
328     Expr = E;
329   }
330   void VisitStmt(Stmt *S) {
331     for (auto *C : S->children()) {
332       if (C)
333         Visit(C);
334     }
335   }
336 };
337 }
338 
339 // This class replaces references to parameters to their copies by changing
340 // the addresses in CGF.LocalDeclMap and restoring back the original values in
341 // its destructor.
342 
343 namespace {
344   struct ParamReferenceReplacerRAII {
345     CodeGenFunction::DeclMapTy SavedLocals;
346     CodeGenFunction::DeclMapTy& LocalDeclMap;
347 
348     ParamReferenceReplacerRAII(CodeGenFunction::DeclMapTy &LocalDeclMap)
349         : LocalDeclMap(LocalDeclMap) {}
350 
351     void addCopy(DeclStmt const *PM) {
352       // Figure out what param it refers to.
353 
354       assert(PM->isSingleDecl());
355       VarDecl const*VD = static_cast<VarDecl const*>(PM->getSingleDecl());
356       Expr const *InitExpr = VD->getInit();
357       GetParamRef Visitor;
358       Visitor.Visit(const_cast<Expr*>(InitExpr));
359       assert(Visitor.Expr);
360       DeclRefExpr *DREOrig = Visitor.Expr;
361       auto *PD = DREOrig->getDecl();
362 
363       auto it = LocalDeclMap.find(PD);
364       assert(it != LocalDeclMap.end() && "parameter is not found");
365       SavedLocals.insert({ PD, it->second });
366 
367       auto copyIt = LocalDeclMap.find(VD);
368       assert(copyIt != LocalDeclMap.end() && "parameter copy is not found");
369       it->second = copyIt->getSecond();
370     }
371 
372     ~ParamReferenceReplacerRAII() {
373       for (auto&& SavedLocal : SavedLocals) {
374         LocalDeclMap.insert({SavedLocal.first, SavedLocal.second});
375       }
376     }
377   };
378 }
379 
380 // For WinEH exception representation backend needs to know what funclet coro.end
381 // belongs to. That information is passed in a funclet bundle.
382 static SmallVector<llvm::OperandBundleDef, 1>
383 getBundlesForCoroEnd(CodeGenFunction &CGF) {
384   SmallVector<llvm::OperandBundleDef, 1> BundleList;
385 
386   if (llvm::Instruction *EHPad = CGF.CurrentFuncletPad)
387     BundleList.emplace_back("funclet", EHPad);
388 
389   return BundleList;
390 }
391 
392 namespace {
393 // We will insert coro.end to cut any of the destructors for objects that
394 // do not need to be destroyed once the coroutine is resumed.
395 // See llvm/docs/Coroutines.rst for more details about coro.end.
396 struct CallCoroEnd final : public EHScopeStack::Cleanup {
397   void Emit(CodeGenFunction &CGF, Flags flags) override {
398     auto &CGM = CGF.CGM;
399     auto *NullPtr = llvm::ConstantPointerNull::get(CGF.Int8PtrTy);
400     llvm::Function *CoroEndFn = CGM.getIntrinsic(llvm::Intrinsic::coro_end);
401     // See if we have a funclet bundle to associate coro.end with. (WinEH)
402     auto Bundles = getBundlesForCoroEnd(CGF);
403     auto *CoroEnd = CGF.Builder.CreateCall(
404         CoroEndFn, {NullPtr, CGF.Builder.getTrue()}, Bundles);
405     if (Bundles.empty()) {
406       // Otherwise, (landingpad model), create a conditional branch that leads
407       // either to a cleanup block or a block with EH resume instruction.
408       auto *ResumeBB = CGF.getEHResumeBlock(/*isCleanup=*/true);
409       auto *CleanupContBB = CGF.createBasicBlock("cleanup.cont");
410       CGF.Builder.CreateCondBr(CoroEnd, ResumeBB, CleanupContBB);
411       CGF.EmitBlock(CleanupContBB);
412     }
413   }
414 };
415 }
416 
417 namespace {
418 // Make sure to call coro.delete on scope exit.
419 struct CallCoroDelete final : public EHScopeStack::Cleanup {
420   Stmt *Deallocate;
421 
422   // Emit "if (coro.free(CoroId, CoroBegin)) Deallocate;"
423 
424   // Note: That deallocation will be emitted twice: once for a normal exit and
425   // once for exceptional exit. This usage is safe because Deallocate does not
426   // contain any declarations. The SubStmtBuilder::makeNewAndDeleteExpr()
427   // builds a single call to a deallocation function which is safe to emit
428   // multiple times.
429   void Emit(CodeGenFunction &CGF, Flags) override {
430     // Remember the current point, as we are going to emit deallocation code
431     // first to get to coro.free instruction that is an argument to a delete
432     // call.
433     BasicBlock *SaveInsertBlock = CGF.Builder.GetInsertBlock();
434 
435     auto *FreeBB = CGF.createBasicBlock("coro.free");
436     CGF.EmitBlock(FreeBB);
437     CGF.EmitStmt(Deallocate);
438 
439     auto *AfterFreeBB = CGF.createBasicBlock("after.coro.free");
440     CGF.EmitBlock(AfterFreeBB);
441 
442     // We should have captured coro.free from the emission of deallocate.
443     auto *CoroFree = CGF.CurCoro.Data->LastCoroFree;
444     if (!CoroFree) {
445       CGF.CGM.Error(Deallocate->getBeginLoc(),
446                     "Deallocation expressoin does not refer to coro.free");
447       return;
448     }
449 
450     // Get back to the block we were originally and move coro.free there.
451     auto *InsertPt = SaveInsertBlock->getTerminator();
452     CoroFree->moveBefore(InsertPt);
453     CGF.Builder.SetInsertPoint(InsertPt);
454 
455     // Add if (auto *mem = coro.free) Deallocate;
456     auto *NullPtr = llvm::ConstantPointerNull::get(CGF.Int8PtrTy);
457     auto *Cond = CGF.Builder.CreateICmpNE(CoroFree, NullPtr);
458     CGF.Builder.CreateCondBr(Cond, FreeBB, AfterFreeBB);
459 
460     // No longer need old terminator.
461     InsertPt->eraseFromParent();
462     CGF.Builder.SetInsertPoint(AfterFreeBB);
463   }
464   explicit CallCoroDelete(Stmt *DeallocStmt) : Deallocate(DeallocStmt) {}
465 };
466 }
467 
468 namespace {
469 struct GetReturnObjectManager {
470   CodeGenFunction &CGF;
471   CGBuilderTy &Builder;
472   const CoroutineBodyStmt &S;
473 
474   Address GroActiveFlag;
475   CodeGenFunction::AutoVarEmission GroEmission;
476 
477   GetReturnObjectManager(CodeGenFunction &CGF, const CoroutineBodyStmt &S)
478       : CGF(CGF), Builder(CGF.Builder), S(S), GroActiveFlag(Address::invalid()),
479         GroEmission(CodeGenFunction::AutoVarEmission::invalid()) {}
480 
481   // The gro variable has to outlive coroutine frame and coroutine promise, but,
482   // it can only be initialized after coroutine promise was created, thus, we
483   // split its emission in two parts. EmitGroAlloca emits an alloca and sets up
484   // cleanups. Later when coroutine promise is available we initialize the gro
485   // and sets the flag that the cleanup is now active.
486 
487   void EmitGroAlloca() {
488     auto *GroDeclStmt = dyn_cast<DeclStmt>(S.getResultDecl());
489     if (!GroDeclStmt) {
490       // If get_return_object returns void, no need to do an alloca.
491       return;
492     }
493 
494     auto *GroVarDecl = cast<VarDecl>(GroDeclStmt->getSingleDecl());
495 
496     // Set GRO flag that it is not initialized yet
497     GroActiveFlag =
498       CGF.CreateTempAlloca(Builder.getInt1Ty(), CharUnits::One(), "gro.active");
499     Builder.CreateStore(Builder.getFalse(), GroActiveFlag);
500 
501     GroEmission = CGF.EmitAutoVarAlloca(*GroVarDecl);
502 
503     // Remember the top of EHStack before emitting the cleanup.
504     auto old_top = CGF.EHStack.stable_begin();
505     CGF.EmitAutoVarCleanups(GroEmission);
506     auto top = CGF.EHStack.stable_begin();
507 
508     // Make the cleanup conditional on gro.active
509     for (auto b = CGF.EHStack.find(top), e = CGF.EHStack.find(old_top);
510       b != e; b++) {
511       if (auto *Cleanup = dyn_cast<EHCleanupScope>(&*b)) {
512         assert(!Cleanup->hasActiveFlag() && "cleanup already has active flag?");
513         Cleanup->setActiveFlag(GroActiveFlag);
514         Cleanup->setTestFlagInEHCleanup();
515         Cleanup->setTestFlagInNormalCleanup();
516       }
517     }
518   }
519 
520   void EmitGroInit() {
521     if (!GroActiveFlag.isValid()) {
522       // No Gro variable was allocated. Simply emit the call to
523       // get_return_object.
524       CGF.EmitStmt(S.getResultDecl());
525       return;
526     }
527 
528     CGF.EmitAutoVarInit(GroEmission);
529     Builder.CreateStore(Builder.getTrue(), GroActiveFlag);
530   }
531 };
532 }
533 
534 static void emitBodyAndFallthrough(CodeGenFunction &CGF,
535                                    const CoroutineBodyStmt &S, Stmt *Body) {
536   CGF.EmitStmt(Body);
537   const bool CanFallthrough = CGF.Builder.GetInsertBlock();
538   if (CanFallthrough)
539     if (Stmt *OnFallthrough = S.getFallthroughHandler())
540       CGF.EmitStmt(OnFallthrough);
541 }
542 
543 void CodeGenFunction::EmitCoroutineBody(const CoroutineBodyStmt &S) {
544   auto *NullPtr = llvm::ConstantPointerNull::get(Builder.getInt8PtrTy());
545   auto &TI = CGM.getContext().getTargetInfo();
546   unsigned NewAlign = TI.getNewAlign() / TI.getCharWidth();
547 
548   auto *EntryBB = Builder.GetInsertBlock();
549   auto *AllocBB = createBasicBlock("coro.alloc");
550   auto *InitBB = createBasicBlock("coro.init");
551   auto *FinalBB = createBasicBlock("coro.final");
552   auto *RetBB = createBasicBlock("coro.ret");
553 
554   auto *CoroId = Builder.CreateCall(
555       CGM.getIntrinsic(llvm::Intrinsic::coro_id),
556       {Builder.getInt32(NewAlign), NullPtr, NullPtr, NullPtr});
557   createCoroData(*this, CurCoro, CoroId);
558   CurCoro.Data->SuspendBB = RetBB;
559 
560   // Backend is allowed to elide memory allocations, to help it, emit
561   // auto mem = coro.alloc() ? 0 : ... allocation code ...;
562   auto *CoroAlloc = Builder.CreateCall(
563       CGM.getIntrinsic(llvm::Intrinsic::coro_alloc), {CoroId});
564 
565   Builder.CreateCondBr(CoroAlloc, AllocBB, InitBB);
566 
567   EmitBlock(AllocBB);
568   auto *AllocateCall = EmitScalarExpr(S.getAllocate());
569   auto *AllocOrInvokeContBB = Builder.GetInsertBlock();
570 
571   // Handle allocation failure if 'ReturnStmtOnAllocFailure' was provided.
572   if (auto *RetOnAllocFailure = S.getReturnStmtOnAllocFailure()) {
573     auto *RetOnFailureBB = createBasicBlock("coro.ret.on.failure");
574 
575     // See if allocation was successful.
576     auto *NullPtr = llvm::ConstantPointerNull::get(Int8PtrTy);
577     auto *Cond = Builder.CreateICmpNE(AllocateCall, NullPtr);
578     Builder.CreateCondBr(Cond, InitBB, RetOnFailureBB);
579 
580     // If not, return OnAllocFailure object.
581     EmitBlock(RetOnFailureBB);
582     EmitStmt(RetOnAllocFailure);
583   }
584   else {
585     Builder.CreateBr(InitBB);
586   }
587 
588   EmitBlock(InitBB);
589 
590   // Pass the result of the allocation to coro.begin.
591   auto *Phi = Builder.CreatePHI(VoidPtrTy, 2);
592   Phi->addIncoming(NullPtr, EntryBB);
593   Phi->addIncoming(AllocateCall, AllocOrInvokeContBB);
594   auto *CoroBegin = Builder.CreateCall(
595       CGM.getIntrinsic(llvm::Intrinsic::coro_begin), {CoroId, Phi});
596   CurCoro.Data->CoroBegin = CoroBegin;
597 
598   GetReturnObjectManager GroManager(*this, S);
599   GroManager.EmitGroAlloca();
600 
601   CurCoro.Data->CleanupJD = getJumpDestInCurrentScope(RetBB);
602   {
603     ParamReferenceReplacerRAII ParamReplacer(LocalDeclMap);
604     CodeGenFunction::RunCleanupsScope ResumeScope(*this);
605     EHStack.pushCleanup<CallCoroDelete>(NormalAndEHCleanup, S.getDeallocate());
606 
607     // Create parameter copies. We do it before creating a promise, since an
608     // evolution of coroutine TS may allow promise constructor to observe
609     // parameter copies.
610     for (auto *PM : S.getParamMoves()) {
611       EmitStmt(PM);
612       ParamReplacer.addCopy(cast<DeclStmt>(PM));
613       // TODO: if(CoroParam(...)) need to surround ctor and dtor
614       // for the copy, so that llvm can elide it if the copy is
615       // not needed.
616     }
617 
618     EmitStmt(S.getPromiseDeclStmt());
619 
620     Address PromiseAddr = GetAddrOfLocalVar(S.getPromiseDecl());
621     auto *PromiseAddrVoidPtr =
622         new llvm::BitCastInst(PromiseAddr.getPointer(), VoidPtrTy, "", CoroId);
623     // Update CoroId to refer to the promise. We could not do it earlier because
624     // promise local variable was not emitted yet.
625     CoroId->setArgOperand(1, PromiseAddrVoidPtr);
626 
627     // Now we have the promise, initialize the GRO
628     GroManager.EmitGroInit();
629 
630     EHStack.pushCleanup<CallCoroEnd>(EHCleanup);
631 
632     CurCoro.Data->CurrentAwaitKind = AwaitKind::Init;
633     CurCoro.Data->ExceptionHandler = S.getExceptionHandler();
634     EmitStmt(S.getInitSuspendStmt());
635     CurCoro.Data->FinalJD = getJumpDestInCurrentScope(FinalBB);
636 
637     CurCoro.Data->CurrentAwaitKind = AwaitKind::Normal;
638 
639     if (CurCoro.Data->ExceptionHandler) {
640       // If we generated IR to record whether an exception was thrown from
641       // 'await_resume', then use that IR to determine whether the coroutine
642       // body should be skipped.
643       // If we didn't generate the IR (perhaps because 'await_resume' was marked
644       // as 'noexcept'), then we skip this check.
645       BasicBlock *ContBB = nullptr;
646       if (CurCoro.Data->ResumeEHVar) {
647         BasicBlock *BodyBB = createBasicBlock("coro.resumed.body");
648         ContBB = createBasicBlock("coro.resumed.cont");
649         Value *SkipBody = Builder.CreateFlagLoad(CurCoro.Data->ResumeEHVar,
650                                                  "coro.resumed.eh");
651         Builder.CreateCondBr(SkipBody, ContBB, BodyBB);
652         EmitBlock(BodyBB);
653       }
654 
655       auto Loc = S.getBeginLoc();
656       CXXCatchStmt Catch(Loc, /*exDecl=*/nullptr,
657                          CurCoro.Data->ExceptionHandler);
658       auto *TryStmt =
659           CXXTryStmt::Create(getContext(), Loc, S.getBody(), &Catch);
660 
661       EnterCXXTryStmt(*TryStmt);
662       emitBodyAndFallthrough(*this, S, TryStmt->getTryBlock());
663       ExitCXXTryStmt(*TryStmt);
664 
665       if (ContBB)
666         EmitBlock(ContBB);
667     }
668     else {
669       emitBodyAndFallthrough(*this, S, S.getBody());
670     }
671 
672     // See if we need to generate final suspend.
673     const bool CanFallthrough = Builder.GetInsertBlock();
674     const bool HasCoreturns = CurCoro.Data->CoreturnCount > 0;
675     if (CanFallthrough || HasCoreturns) {
676       EmitBlock(FinalBB);
677       CurCoro.Data->CurrentAwaitKind = AwaitKind::Final;
678       EmitStmt(S.getFinalSuspendStmt());
679     } else {
680       // We don't need FinalBB. Emit it to make sure the block is deleted.
681       EmitBlock(FinalBB, /*IsFinished=*/true);
682     }
683   }
684 
685   EmitBlock(RetBB);
686   // Emit coro.end before getReturnStmt (and parameter destructors), since
687   // resume and destroy parts of the coroutine should not include them.
688   llvm::Function *CoroEnd = CGM.getIntrinsic(llvm::Intrinsic::coro_end);
689   Builder.CreateCall(CoroEnd, {NullPtr, Builder.getFalse()});
690 
691   if (Stmt *Ret = S.getReturnStmt())
692     EmitStmt(Ret);
693 }
694 
695 // Emit coroutine intrinsic and patch up arguments of the token type.
696 RValue CodeGenFunction::EmitCoroutineIntrinsic(const CallExpr *E,
697                                                unsigned int IID) {
698   SmallVector<llvm::Value *, 8> Args;
699   switch (IID) {
700   default:
701     break;
702   // The coro.frame builtin is replaced with an SSA value of the coro.begin
703   // intrinsic.
704   case llvm::Intrinsic::coro_frame: {
705     if (CurCoro.Data && CurCoro.Data->CoroBegin) {
706       return RValue::get(CurCoro.Data->CoroBegin);
707     }
708     CGM.Error(E->getBeginLoc(), "this builtin expect that __builtin_coro_begin "
709                                 "has been used earlier in this function");
710     auto NullPtr = llvm::ConstantPointerNull::get(Builder.getInt8PtrTy());
711     return RValue::get(NullPtr);
712   }
713   // The following three intrinsics take a token parameter referring to a token
714   // returned by earlier call to @llvm.coro.id. Since we cannot represent it in
715   // builtins, we patch it up here.
716   case llvm::Intrinsic::coro_alloc:
717   case llvm::Intrinsic::coro_begin:
718   case llvm::Intrinsic::coro_free: {
719     if (CurCoro.Data && CurCoro.Data->CoroId) {
720       Args.push_back(CurCoro.Data->CoroId);
721       break;
722     }
723     CGM.Error(E->getBeginLoc(), "this builtin expect that __builtin_coro_id has"
724                                 " been used earlier in this function");
725     // Fallthrough to the next case to add TokenNone as the first argument.
726     LLVM_FALLTHROUGH;
727   }
728   // @llvm.coro.suspend takes a token parameter. Add token 'none' as the first
729   // argument.
730   case llvm::Intrinsic::coro_suspend:
731     Args.push_back(llvm::ConstantTokenNone::get(getLLVMContext()));
732     break;
733   }
734   for (const Expr *Arg : E->arguments())
735     Args.push_back(EmitScalarExpr(Arg));
736 
737   llvm::Function *F = CGM.getIntrinsic(IID);
738   llvm::CallInst *Call = Builder.CreateCall(F, Args);
739 
740   // Note: The following code is to enable to emit coro.id and coro.begin by
741   // hand to experiment with coroutines in C.
742   // If we see @llvm.coro.id remember it in the CoroData. We will update
743   // coro.alloc, coro.begin and coro.free intrinsics to refer to it.
744   if (IID == llvm::Intrinsic::coro_id) {
745     createCoroData(*this, CurCoro, Call, E);
746   }
747   else if (IID == llvm::Intrinsic::coro_begin) {
748     if (CurCoro.Data)
749       CurCoro.Data->CoroBegin = Call;
750   }
751   else if (IID == llvm::Intrinsic::coro_free) {
752     // Remember the last coro_free as we need it to build the conditional
753     // deletion of the coroutine frame.
754     if (CurCoro.Data)
755       CurCoro.Data->LastCoroFree = Call;
756   }
757   return RValue::get(Call);
758 }
759