1 //===----- CGCoroutine.cpp - Emit LLVM Code for C++ coroutines ------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This contains code dealing with C++ code generation of coroutines.
10 //
11 //===----------------------------------------------------------------------===//
12
13 #include "CGCleanup.h"
14 #include "CodeGenFunction.h"
15 #include "llvm/ADT/ScopeExit.h"
16 #include "clang/AST/StmtCXX.h"
17 #include "clang/AST/StmtVisitor.h"
18
19 using namespace clang;
20 using namespace CodeGen;
21
22 using llvm::Value;
23 using llvm::BasicBlock;
24
25 namespace {
26 enum class AwaitKind { Init, Normal, Yield, Final };
27 static constexpr llvm::StringLiteral AwaitKindStr[] = {"init", "await", "yield",
28 "final"};
29 }
30
31 struct clang::CodeGen::CGCoroData {
32 // What is the current await expression kind and how many
33 // await/yield expressions were encountered so far.
34 // These are used to generate pretty labels for await expressions in LLVM IR.
35 AwaitKind CurrentAwaitKind = AwaitKind::Init;
36 unsigned AwaitNum = 0;
37 unsigned YieldNum = 0;
38
39 // How many co_return statements are in the coroutine. Used to decide whether
40 // we need to add co_return; equivalent at the end of the user authored body.
41 unsigned CoreturnCount = 0;
42
43 // A branch to this block is emitted when coroutine needs to suspend.
44 llvm::BasicBlock *SuspendBB = nullptr;
45
46 // The promise type's 'unhandled_exception' handler, if it defines one.
47 Stmt *ExceptionHandler = nullptr;
48
49 // A temporary i1 alloca that stores whether 'await_resume' threw an
50 // exception. If it did, 'true' is stored in this variable, and the coroutine
51 // body must be skipped. If the promise type does not define an exception
52 // handler, this is null.
53 llvm::Value *ResumeEHVar = nullptr;
54
55 // Stores the jump destination just before the coroutine memory is freed.
56 // This is the destination that every suspend point jumps to for the cleanup
57 // branch.
58 CodeGenFunction::JumpDest CleanupJD;
59
60 // Stores the jump destination just before the final suspend. The co_return
61 // statements jumps to this point after calling return_xxx promise member.
62 CodeGenFunction::JumpDest FinalJD;
63
64 // Stores the llvm.coro.id emitted in the function so that we can supply it
65 // as the first argument to coro.begin, coro.alloc and coro.free intrinsics.
66 // Note: llvm.coro.id returns a token that cannot be directly expressed in a
67 // builtin.
68 llvm::CallInst *CoroId = nullptr;
69
70 // Stores the llvm.coro.begin emitted in the function so that we can replace
71 // all coro.frame intrinsics with direct SSA value of coro.begin that returns
72 // the address of the coroutine frame of the current coroutine.
73 llvm::CallInst *CoroBegin = nullptr;
74
75 // Stores the last emitted coro.free for the deallocate expressions, we use it
76 // to wrap dealloc code with if(auto mem = coro.free) dealloc(mem).
77 llvm::CallInst *LastCoroFree = nullptr;
78
79 // If coro.id came from the builtin, remember the expression to give better
80 // diagnostic. If CoroIdExpr is nullptr, the coro.id was created by
81 // EmitCoroutineBody.
82 CallExpr const *CoroIdExpr = nullptr;
83 };
84
85 // Defining these here allows to keep CGCoroData private to this file.
CGCoroInfo()86 clang::CodeGen::CodeGenFunction::CGCoroInfo::CGCoroInfo() {}
~CGCoroInfo()87 CodeGenFunction::CGCoroInfo::~CGCoroInfo() {}
88
createCoroData(CodeGenFunction & CGF,CodeGenFunction::CGCoroInfo & CurCoro,llvm::CallInst * CoroId,CallExpr const * CoroIdExpr=nullptr)89 static void createCoroData(CodeGenFunction &CGF,
90 CodeGenFunction::CGCoroInfo &CurCoro,
91 llvm::CallInst *CoroId,
92 CallExpr const *CoroIdExpr = nullptr) {
93 if (CurCoro.Data) {
94 if (CurCoro.Data->CoroIdExpr)
95 CGF.CGM.Error(CoroIdExpr->getBeginLoc(),
96 "only one __builtin_coro_id can be used in a function");
97 else if (CoroIdExpr)
98 CGF.CGM.Error(CoroIdExpr->getBeginLoc(),
99 "__builtin_coro_id shall not be used in a C++ coroutine");
100 else
101 llvm_unreachable("EmitCoroutineBodyStatement called twice?");
102
103 return;
104 }
105
106 CurCoro.Data = std::make_unique<CGCoroData>();
107 CurCoro.Data->CoroId = CoroId;
108 CurCoro.Data->CoroIdExpr = CoroIdExpr;
109 }
110
111 // Synthesize a pretty name for a suspend point.
buildSuspendPrefixStr(CGCoroData & Coro,AwaitKind Kind)112 static SmallString<32> buildSuspendPrefixStr(CGCoroData &Coro, AwaitKind Kind) {
113 unsigned No = 0;
114 switch (Kind) {
115 case AwaitKind::Init:
116 case AwaitKind::Final:
117 break;
118 case AwaitKind::Normal:
119 No = ++Coro.AwaitNum;
120 break;
121 case AwaitKind::Yield:
122 No = ++Coro.YieldNum;
123 break;
124 }
125 SmallString<32> Prefix(AwaitKindStr[static_cast<unsigned>(Kind)]);
126 if (No > 1) {
127 Twine(No).toVector(Prefix);
128 }
129 return Prefix;
130 }
131
132 // Check if function can throw based on prototype noexcept, also works for
133 // destructors which are implicitly noexcept but can be marked noexcept(false).
FunctionCanThrow(const FunctionDecl * D)134 static bool FunctionCanThrow(const FunctionDecl *D) {
135 const auto *Proto = D->getType()->getAs<FunctionProtoType>();
136 if (!Proto) {
137 // Function proto is not found, we conservatively assume throwing.
138 return true;
139 }
140 return !isNoexceptExceptionSpec(Proto->getExceptionSpecType()) ||
141 Proto->canThrow() != CT_Cannot;
142 }
143
StmtCanThrow(const Stmt * S)144 static bool StmtCanThrow(const Stmt *S) {
145 if (const auto *CE = dyn_cast<CallExpr>(S)) {
146 const auto *Callee = CE->getDirectCallee();
147 if (!Callee)
148 // We don't have direct callee. Conservatively assume throwing.
149 return true;
150
151 if (FunctionCanThrow(Callee))
152 return true;
153
154 // Fall through to visit the children.
155 }
156
157 if (const auto *TE = dyn_cast<CXXBindTemporaryExpr>(S)) {
158 // Special handling of CXXBindTemporaryExpr here as calling of Dtor of the
159 // temporary is not part of `children()` as covered in the fall through.
160 // We need to mark entire statement as throwing if the destructor of the
161 // temporary throws.
162 const auto *Dtor = TE->getTemporary()->getDestructor();
163 if (FunctionCanThrow(Dtor))
164 return true;
165
166 // Fall through to visit the children.
167 }
168
169 for (const auto *child : S->children())
170 if (StmtCanThrow(child))
171 return true;
172
173 return false;
174 }
175
176 // Emit suspend expression which roughly looks like:
177 //
178 // auto && x = CommonExpr();
179 // if (!x.await_ready()) {
180 // llvm_coro_save();
181 // llvm_coro_await_suspend(&x, frame, wrapper) (*) (**)
182 // llvm_coro_suspend(); (***)
183 // }
184 // x.await_resume();
185 //
186 // where the result of the entire expression is the result of x.await_resume()
187 //
188 // (*) llvm_coro_await_suspend_{void, bool, handle} is lowered to
189 // wrapper(&x, frame) when it's certain not to interfere with
190 // coroutine transform. await_suspend expression is
191 // asynchronous to the coroutine body and not all analyses
192 // and transformations can handle it correctly at the moment.
193 //
194 // Wrapper function encapsulates x.await_suspend(...) call and looks like:
195 //
196 // auto __await_suspend_wrapper(auto& awaiter, void* frame) {
197 // std::coroutine_handle<> handle(frame);
198 // return awaiter.await_suspend(handle);
199 // }
200 //
201 // (**) If x.await_suspend return type is bool, it allows to veto a suspend:
202 // if (x.await_suspend(...))
203 // llvm_coro_suspend();
204 //
205 // (***) llvm_coro_suspend() encodes three possible continuations as
206 // a switch instruction:
207 //
208 // %where-to = call i8 @llvm.coro.suspend(...)
209 // switch i8 %where-to, label %coro.ret [ ; jump to epilogue to suspend
210 // i8 0, label %yield.ready ; go here when resumed
211 // i8 1, label %yield.cleanup ; go here when destroyed
212 // ]
213 //
214 // See llvm's docs/Coroutines.rst for more details.
215 //
216 namespace {
217 struct LValueOrRValue {
218 LValue LV;
219 RValue RV;
220 };
221 }
emitSuspendExpression(CodeGenFunction & CGF,CGCoroData & Coro,CoroutineSuspendExpr const & S,AwaitKind Kind,AggValueSlot aggSlot,bool ignoreResult,bool forLValue)222 static LValueOrRValue emitSuspendExpression(CodeGenFunction &CGF, CGCoroData &Coro,
223 CoroutineSuspendExpr const &S,
224 AwaitKind Kind, AggValueSlot aggSlot,
225 bool ignoreResult, bool forLValue) {
226 auto *E = S.getCommonExpr();
227
228 auto CommonBinder =
229 CodeGenFunction::OpaqueValueMappingData::bind(CGF, S.getOpaqueValue(), E);
230 auto UnbindCommonOnExit =
231 llvm::make_scope_exit([&] { CommonBinder.unbind(CGF); });
232
233 auto Prefix = buildSuspendPrefixStr(Coro, Kind);
234 BasicBlock *ReadyBlock = CGF.createBasicBlock(Prefix + Twine(".ready"));
235 BasicBlock *SuspendBlock = CGF.createBasicBlock(Prefix + Twine(".suspend"));
236 BasicBlock *CleanupBlock = CGF.createBasicBlock(Prefix + Twine(".cleanup"));
237
238 // If expression is ready, no need to suspend.
239 CGF.EmitBranchOnBoolExpr(S.getReadyExpr(), ReadyBlock, SuspendBlock, 0);
240
241 // Otherwise, emit suspend logic.
242 CGF.EmitBlock(SuspendBlock);
243
244 auto &Builder = CGF.Builder;
245 llvm::Function *CoroSave = CGF.CGM.getIntrinsic(llvm::Intrinsic::coro_save);
246 auto *NullPtr = llvm::ConstantPointerNull::get(CGF.CGM.Int8PtrTy);
247 auto *SaveCall = Builder.CreateCall(CoroSave, {NullPtr});
248
249 auto SuspendWrapper = CodeGenFunction(CGF.CGM).generateAwaitSuspendWrapper(
250 CGF.CurFn->getName(), Prefix, S);
251
252 CGF.CurCoro.InSuspendBlock = true;
253
254 assert(CGF.CurCoro.Data && CGF.CurCoro.Data->CoroBegin &&
255 "expected to be called in coroutine context");
256
257 SmallVector<llvm::Value *, 3> SuspendIntrinsicCallArgs;
258 SuspendIntrinsicCallArgs.push_back(
259 CGF.getOrCreateOpaqueLValueMapping(S.getOpaqueValue()).getPointer(CGF));
260
261 SuspendIntrinsicCallArgs.push_back(CGF.CurCoro.Data->CoroBegin);
262 SuspendIntrinsicCallArgs.push_back(SuspendWrapper);
263
264 const auto SuspendReturnType = S.getSuspendReturnType();
265 llvm::Intrinsic::ID AwaitSuspendIID;
266
267 switch (SuspendReturnType) {
268 case CoroutineSuspendExpr::SuspendReturnType::SuspendVoid:
269 AwaitSuspendIID = llvm::Intrinsic::coro_await_suspend_void;
270 break;
271 case CoroutineSuspendExpr::SuspendReturnType::SuspendBool:
272 AwaitSuspendIID = llvm::Intrinsic::coro_await_suspend_bool;
273 break;
274 case CoroutineSuspendExpr::SuspendReturnType::SuspendHandle:
275 AwaitSuspendIID = llvm::Intrinsic::coro_await_suspend_handle;
276 break;
277 }
278
279 llvm::Function *AwaitSuspendIntrinsic = CGF.CGM.getIntrinsic(AwaitSuspendIID);
280
281 // SuspendHandle might throw since it also resumes the returned handle.
282 const bool AwaitSuspendCanThrow =
283 SuspendReturnType ==
284 CoroutineSuspendExpr::SuspendReturnType::SuspendHandle ||
285 StmtCanThrow(S.getSuspendExpr());
286
287 llvm::CallBase *SuspendRet = nullptr;
288 // FIXME: add call attributes?
289 if (AwaitSuspendCanThrow)
290 SuspendRet =
291 CGF.EmitCallOrInvoke(AwaitSuspendIntrinsic, SuspendIntrinsicCallArgs);
292 else
293 SuspendRet = CGF.EmitNounwindRuntimeCall(AwaitSuspendIntrinsic,
294 SuspendIntrinsicCallArgs);
295
296 assert(SuspendRet);
297 CGF.CurCoro.InSuspendBlock = false;
298
299 switch (SuspendReturnType) {
300 case CoroutineSuspendExpr::SuspendReturnType::SuspendVoid:
301 assert(SuspendRet->getType()->isVoidTy());
302 break;
303 case CoroutineSuspendExpr::SuspendReturnType::SuspendBool: {
304 assert(SuspendRet->getType()->isIntegerTy());
305
306 // Veto suspension if requested by bool returning await_suspend.
307 BasicBlock *RealSuspendBlock =
308 CGF.createBasicBlock(Prefix + Twine(".suspend.bool"));
309 CGF.Builder.CreateCondBr(SuspendRet, RealSuspendBlock, ReadyBlock);
310 CGF.EmitBlock(RealSuspendBlock);
311 break;
312 }
313 case CoroutineSuspendExpr::SuspendReturnType::SuspendHandle: {
314 assert(SuspendRet->getType()->isVoidTy());
315 break;
316 }
317 }
318
319 // Emit the suspend point.
320 const bool IsFinalSuspend = (Kind == AwaitKind::Final);
321 llvm::Function *CoroSuspend =
322 CGF.CGM.getIntrinsic(llvm::Intrinsic::coro_suspend);
323 auto *SuspendResult = Builder.CreateCall(
324 CoroSuspend, {SaveCall, Builder.getInt1(IsFinalSuspend)});
325
326 // Create a switch capturing three possible continuations.
327 auto *Switch = Builder.CreateSwitch(SuspendResult, Coro.SuspendBB, 2);
328 Switch->addCase(Builder.getInt8(0), ReadyBlock);
329 Switch->addCase(Builder.getInt8(1), CleanupBlock);
330
331 // Emit cleanup for this suspend point.
332 CGF.EmitBlock(CleanupBlock);
333 CGF.EmitBranchThroughCleanup(Coro.CleanupJD);
334
335 // Emit await_resume expression.
336 CGF.EmitBlock(ReadyBlock);
337
338 // Exception handling requires additional IR. If the 'await_resume' function
339 // is marked as 'noexcept', we avoid generating this additional IR.
340 CXXTryStmt *TryStmt = nullptr;
341 if (Coro.ExceptionHandler && Kind == AwaitKind::Init &&
342 StmtCanThrow(S.getResumeExpr())) {
343 Coro.ResumeEHVar =
344 CGF.CreateTempAlloca(Builder.getInt1Ty(), Prefix + Twine("resume.eh"));
345 Builder.CreateFlagStore(true, Coro.ResumeEHVar);
346
347 auto Loc = S.getResumeExpr()->getExprLoc();
348 auto *Catch = new (CGF.getContext())
349 CXXCatchStmt(Loc, /*exDecl=*/nullptr, Coro.ExceptionHandler);
350 auto *TryBody = CompoundStmt::Create(CGF.getContext(), S.getResumeExpr(),
351 FPOptionsOverride(), Loc, Loc);
352 TryStmt = CXXTryStmt::Create(CGF.getContext(), Loc, TryBody, Catch);
353 CGF.EnterCXXTryStmt(*TryStmt);
354 CGF.EmitStmt(TryBody);
355 // We don't use EmitCXXTryStmt here. We need to store to ResumeEHVar that
356 // doesn't exist in the body.
357 Builder.CreateFlagStore(false, Coro.ResumeEHVar);
358 CGF.ExitCXXTryStmt(*TryStmt);
359 LValueOrRValue Res;
360 // We are not supposed to obtain the value from init suspend await_resume().
361 Res.RV = RValue::getIgnored();
362 return Res;
363 }
364
365 LValueOrRValue Res;
366 if (forLValue)
367 Res.LV = CGF.EmitLValue(S.getResumeExpr());
368 else
369 Res.RV = CGF.EmitAnyExpr(S.getResumeExpr(), aggSlot, ignoreResult);
370
371 return Res;
372 }
373
EmitCoawaitExpr(const CoawaitExpr & E,AggValueSlot aggSlot,bool ignoreResult)374 RValue CodeGenFunction::EmitCoawaitExpr(const CoawaitExpr &E,
375 AggValueSlot aggSlot,
376 bool ignoreResult) {
377 return emitSuspendExpression(*this, *CurCoro.Data, E,
378 CurCoro.Data->CurrentAwaitKind, aggSlot,
379 ignoreResult, /*forLValue*/false).RV;
380 }
EmitCoyieldExpr(const CoyieldExpr & E,AggValueSlot aggSlot,bool ignoreResult)381 RValue CodeGenFunction::EmitCoyieldExpr(const CoyieldExpr &E,
382 AggValueSlot aggSlot,
383 bool ignoreResult) {
384 return emitSuspendExpression(*this, *CurCoro.Data, E, AwaitKind::Yield,
385 aggSlot, ignoreResult, /*forLValue*/false).RV;
386 }
387
EmitCoreturnStmt(CoreturnStmt const & S)388 void CodeGenFunction::EmitCoreturnStmt(CoreturnStmt const &S) {
389 ++CurCoro.Data->CoreturnCount;
390 const Expr *RV = S.getOperand();
391 if (RV && RV->getType()->isVoidType() && !isa<InitListExpr>(RV)) {
392 // Make sure to evaluate the non initlist expression of a co_return
393 // with a void expression for side effects.
394 RunCleanupsScope cleanupScope(*this);
395 EmitIgnoredExpr(RV);
396 }
397 EmitStmt(S.getPromiseCall());
398 EmitBranchThroughCleanup(CurCoro.Data->FinalJD);
399 }
400
401
402 #ifndef NDEBUG
getCoroutineSuspendExprReturnType(const ASTContext & Ctx,const CoroutineSuspendExpr * E)403 static QualType getCoroutineSuspendExprReturnType(const ASTContext &Ctx,
404 const CoroutineSuspendExpr *E) {
405 const auto *RE = E->getResumeExpr();
406 // Is it possible for RE to be a CXXBindTemporaryExpr wrapping
407 // a MemberCallExpr?
408 assert(isa<CallExpr>(RE) && "unexpected suspend expression type");
409 return cast<CallExpr>(RE)->getCallReturnType(Ctx);
410 }
411 #endif
412
413 llvm::Function *
generateAwaitSuspendWrapper(Twine const & CoroName,Twine const & SuspendPointName,CoroutineSuspendExpr const & S)414 CodeGenFunction::generateAwaitSuspendWrapper(Twine const &CoroName,
415 Twine const &SuspendPointName,
416 CoroutineSuspendExpr const &S) {
417 std::string FuncName =
418 (CoroName + ".__await_suspend_wrapper__" + SuspendPointName).str();
419
420 ASTContext &C = getContext();
421
422 FunctionArgList args;
423
424 ImplicitParamDecl AwaiterDecl(C, C.VoidPtrTy, ImplicitParamKind::Other);
425 ImplicitParamDecl FrameDecl(C, C.VoidPtrTy, ImplicitParamKind::Other);
426 QualType ReturnTy = S.getSuspendExpr()->getType();
427
428 args.push_back(&AwaiterDecl);
429 args.push_back(&FrameDecl);
430
431 const CGFunctionInfo &FI =
432 CGM.getTypes().arrangeBuiltinFunctionDeclaration(ReturnTy, args);
433
434 llvm::FunctionType *LTy = CGM.getTypes().GetFunctionType(FI);
435
436 llvm::Function *Fn = llvm::Function::Create(
437 LTy, llvm::GlobalValue::PrivateLinkage, FuncName, &CGM.getModule());
438
439 Fn->addParamAttr(0, llvm::Attribute::AttrKind::NonNull);
440 Fn->addParamAttr(0, llvm::Attribute::AttrKind::NoUndef);
441
442 Fn->addParamAttr(1, llvm::Attribute::AttrKind::NoUndef);
443
444 Fn->setMustProgress();
445 Fn->addFnAttr(llvm::Attribute::AttrKind::AlwaysInline);
446
447 StartFunction(GlobalDecl(), ReturnTy, Fn, FI, args);
448
449 // FIXME: add TBAA metadata to the loads
450 llvm::Value *AwaiterPtr = Builder.CreateLoad(GetAddrOfLocalVar(&AwaiterDecl));
451 auto AwaiterLValue =
452 MakeNaturalAlignAddrLValue(AwaiterPtr, AwaiterDecl.getType());
453
454 CurAwaitSuspendWrapper.FramePtr =
455 Builder.CreateLoad(GetAddrOfLocalVar(&FrameDecl));
456
457 auto AwaiterBinder = CodeGenFunction::OpaqueValueMappingData::bind(
458 *this, S.getOpaqueValue(), AwaiterLValue);
459
460 auto *SuspendRet = EmitScalarExpr(S.getSuspendExpr());
461
462 auto UnbindCommonOnExit =
463 llvm::make_scope_exit([&] { AwaiterBinder.unbind(*this); });
464 if (SuspendRet != nullptr) {
465 Fn->addRetAttr(llvm::Attribute::AttrKind::NoUndef);
466 Builder.CreateStore(SuspendRet, ReturnValue);
467 }
468
469 CurAwaitSuspendWrapper.FramePtr = nullptr;
470 FinishFunction();
471 return Fn;
472 }
473
474 LValue
EmitCoawaitLValue(const CoawaitExpr * E)475 CodeGenFunction::EmitCoawaitLValue(const CoawaitExpr *E) {
476 assert(getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() &&
477 "Can't have a scalar return unless the return type is a "
478 "reference type!");
479 return emitSuspendExpression(*this, *CurCoro.Data, *E,
480 CurCoro.Data->CurrentAwaitKind, AggValueSlot::ignored(),
481 /*ignoreResult*/false, /*forLValue*/true).LV;
482 }
483
484 LValue
EmitCoyieldLValue(const CoyieldExpr * E)485 CodeGenFunction::EmitCoyieldLValue(const CoyieldExpr *E) {
486 assert(getCoroutineSuspendExprReturnType(getContext(), E)->isReferenceType() &&
487 "Can't have a scalar return unless the return type is a "
488 "reference type!");
489 return emitSuspendExpression(*this, *CurCoro.Data, *E,
490 AwaitKind::Yield, AggValueSlot::ignored(),
491 /*ignoreResult*/false, /*forLValue*/true).LV;
492 }
493
494 // Hunts for the parameter reference in the parameter copy/move declaration.
495 namespace {
496 struct GetParamRef : public StmtVisitor<GetParamRef> {
497 public:
498 DeclRefExpr *Expr = nullptr;
GetParamRef__anon89ab4d200511::GetParamRef499 GetParamRef() {}
VisitDeclRefExpr__anon89ab4d200511::GetParamRef500 void VisitDeclRefExpr(DeclRefExpr *E) {
501 assert(Expr == nullptr && "multilple declref in param move");
502 Expr = E;
503 }
VisitStmt__anon89ab4d200511::GetParamRef504 void VisitStmt(Stmt *S) {
505 for (auto *C : S->children()) {
506 if (C)
507 Visit(C);
508 }
509 }
510 };
511 }
512
513 // This class replaces references to parameters to their copies by changing
514 // the addresses in CGF.LocalDeclMap and restoring back the original values in
515 // its destructor.
516
517 namespace {
518 struct ParamReferenceReplacerRAII {
519 CodeGenFunction::DeclMapTy SavedLocals;
520 CodeGenFunction::DeclMapTy& LocalDeclMap;
521
ParamReferenceReplacerRAII__anon89ab4d200611::ParamReferenceReplacerRAII522 ParamReferenceReplacerRAII(CodeGenFunction::DeclMapTy &LocalDeclMap)
523 : LocalDeclMap(LocalDeclMap) {}
524
addCopy__anon89ab4d200611::ParamReferenceReplacerRAII525 void addCopy(DeclStmt const *PM) {
526 // Figure out what param it refers to.
527
528 assert(PM->isSingleDecl());
529 VarDecl const*VD = static_cast<VarDecl const*>(PM->getSingleDecl());
530 Expr const *InitExpr = VD->getInit();
531 GetParamRef Visitor;
532 Visitor.Visit(const_cast<Expr*>(InitExpr));
533 assert(Visitor.Expr);
534 DeclRefExpr *DREOrig = Visitor.Expr;
535 auto *PD = DREOrig->getDecl();
536
537 auto it = LocalDeclMap.find(PD);
538 assert(it != LocalDeclMap.end() && "parameter is not found");
539 SavedLocals.insert({ PD, it->second });
540
541 auto copyIt = LocalDeclMap.find(VD);
542 assert(copyIt != LocalDeclMap.end() && "parameter copy is not found");
543 it->second = copyIt->getSecond();
544 }
545
~ParamReferenceReplacerRAII__anon89ab4d200611::ParamReferenceReplacerRAII546 ~ParamReferenceReplacerRAII() {
547 for (auto&& SavedLocal : SavedLocals) {
548 LocalDeclMap.insert({SavedLocal.first, SavedLocal.second});
549 }
550 }
551 };
552 }
553
554 // For WinEH exception representation backend needs to know what funclet coro.end
555 // belongs to. That information is passed in a funclet bundle.
556 static SmallVector<llvm::OperandBundleDef, 1>
getBundlesForCoroEnd(CodeGenFunction & CGF)557 getBundlesForCoroEnd(CodeGenFunction &CGF) {
558 SmallVector<llvm::OperandBundleDef, 1> BundleList;
559
560 if (llvm::Instruction *EHPad = CGF.CurrentFuncletPad)
561 BundleList.emplace_back("funclet", EHPad);
562
563 return BundleList;
564 }
565
566 namespace {
567 // We will insert coro.end to cut any of the destructors for objects that
568 // do not need to be destroyed once the coroutine is resumed.
569 // See llvm/docs/Coroutines.rst for more details about coro.end.
570 struct CallCoroEnd final : public EHScopeStack::Cleanup {
Emit__anon89ab4d200711::CallCoroEnd571 void Emit(CodeGenFunction &CGF, Flags flags) override {
572 auto &CGM = CGF.CGM;
573 auto *NullPtr = llvm::ConstantPointerNull::get(CGF.Int8PtrTy);
574 llvm::Function *CoroEndFn = CGM.getIntrinsic(llvm::Intrinsic::coro_end);
575 // See if we have a funclet bundle to associate coro.end with. (WinEH)
576 auto Bundles = getBundlesForCoroEnd(CGF);
577 auto *CoroEnd =
578 CGF.Builder.CreateCall(CoroEndFn,
579 {NullPtr, CGF.Builder.getTrue(),
580 llvm::ConstantTokenNone::get(CoroEndFn->getContext())},
581 Bundles);
582 if (Bundles.empty()) {
583 // Otherwise, (landingpad model), create a conditional branch that leads
584 // either to a cleanup block or a block with EH resume instruction.
585 auto *ResumeBB = CGF.getEHResumeBlock(/*isCleanup=*/true);
586 auto *CleanupContBB = CGF.createBasicBlock("cleanup.cont");
587 CGF.Builder.CreateCondBr(CoroEnd, ResumeBB, CleanupContBB);
588 CGF.EmitBlock(CleanupContBB);
589 }
590 }
591 };
592 }
593
594 namespace {
595 // Make sure to call coro.delete on scope exit.
596 struct CallCoroDelete final : public EHScopeStack::Cleanup {
597 Stmt *Deallocate;
598
599 // Emit "if (coro.free(CoroId, CoroBegin)) Deallocate;"
600
601 // Note: That deallocation will be emitted twice: once for a normal exit and
602 // once for exceptional exit. This usage is safe because Deallocate does not
603 // contain any declarations. The SubStmtBuilder::makeNewAndDeleteExpr()
604 // builds a single call to a deallocation function which is safe to emit
605 // multiple times.
Emit__anon89ab4d200811::CallCoroDelete606 void Emit(CodeGenFunction &CGF, Flags) override {
607 // Remember the current point, as we are going to emit deallocation code
608 // first to get to coro.free instruction that is an argument to a delete
609 // call.
610 BasicBlock *SaveInsertBlock = CGF.Builder.GetInsertBlock();
611
612 auto *FreeBB = CGF.createBasicBlock("coro.free");
613 CGF.EmitBlock(FreeBB);
614 CGF.EmitStmt(Deallocate);
615
616 auto *AfterFreeBB = CGF.createBasicBlock("after.coro.free");
617 CGF.EmitBlock(AfterFreeBB);
618
619 // We should have captured coro.free from the emission of deallocate.
620 auto *CoroFree = CGF.CurCoro.Data->LastCoroFree;
621 if (!CoroFree) {
622 CGF.CGM.Error(Deallocate->getBeginLoc(),
623 "Deallocation expressoin does not refer to coro.free");
624 return;
625 }
626
627 // Get back to the block we were originally and move coro.free there.
628 auto *InsertPt = SaveInsertBlock->getTerminator();
629 CoroFree->moveBefore(InsertPt);
630 CGF.Builder.SetInsertPoint(InsertPt);
631
632 // Add if (auto *mem = coro.free) Deallocate;
633 auto *NullPtr = llvm::ConstantPointerNull::get(CGF.Int8PtrTy);
634 auto *Cond = CGF.Builder.CreateICmpNE(CoroFree, NullPtr);
635 CGF.Builder.CreateCondBr(Cond, FreeBB, AfterFreeBB);
636
637 // No longer need old terminator.
638 InsertPt->eraseFromParent();
639 CGF.Builder.SetInsertPoint(AfterFreeBB);
640 }
CallCoroDelete__anon89ab4d200811::CallCoroDelete641 explicit CallCoroDelete(Stmt *DeallocStmt) : Deallocate(DeallocStmt) {}
642 };
643 }
644
645 namespace {
646 struct GetReturnObjectManager {
647 CodeGenFunction &CGF;
648 CGBuilderTy &Builder;
649 const CoroutineBodyStmt &S;
650 // When true, performs RVO for the return object.
651 bool DirectEmit = false;
652
653 Address GroActiveFlag;
654 CodeGenFunction::AutoVarEmission GroEmission;
655
GetReturnObjectManager__anon89ab4d200911::GetReturnObjectManager656 GetReturnObjectManager(CodeGenFunction &CGF, const CoroutineBodyStmt &S)
657 : CGF(CGF), Builder(CGF.Builder), S(S), GroActiveFlag(Address::invalid()),
658 GroEmission(CodeGenFunction::AutoVarEmission::invalid()) {
659 // The call to get_return_object is sequenced before the call to
660 // initial_suspend and is invoked at most once, but there are caveats
661 // regarding on whether the prvalue result object may be initialized
662 // directly/eager or delayed, depending on the types involved.
663 //
664 // More info at https://github.com/cplusplus/papers/issues/1414
665 //
666 // The general cases:
667 // 1. Same type of get_return_object and coroutine return type (direct
668 // emission):
669 // - Constructed in the return slot.
670 // 2. Different types (delayed emission):
671 // - Constructed temporary object prior to initial suspend initialized with
672 // a call to get_return_object()
673 // - When coroutine needs to to return to the caller and needs to construct
674 // return value for the coroutine it is initialized with expiring value of
675 // the temporary obtained above.
676 //
677 // Direct emission for void returning coroutines or GROs.
678 DirectEmit = [&]() {
679 auto *RVI = S.getReturnValueInit();
680 assert(RVI && "expected RVI");
681 auto GroType = RVI->getType();
682 return CGF.getContext().hasSameType(GroType, CGF.FnRetTy);
683 }();
684 }
685
686 // The gro variable has to outlive coroutine frame and coroutine promise, but,
687 // it can only be initialized after coroutine promise was created, thus, we
688 // split its emission in two parts. EmitGroAlloca emits an alloca and sets up
689 // cleanups. Later when coroutine promise is available we initialize the gro
690 // and sets the flag that the cleanup is now active.
EmitGroAlloca__anon89ab4d200911::GetReturnObjectManager691 void EmitGroAlloca() {
692 if (DirectEmit)
693 return;
694
695 auto *GroDeclStmt = dyn_cast_or_null<DeclStmt>(S.getResultDecl());
696 if (!GroDeclStmt) {
697 // If get_return_object returns void, no need to do an alloca.
698 return;
699 }
700
701 auto *GroVarDecl = cast<VarDecl>(GroDeclStmt->getSingleDecl());
702
703 // Set GRO flag that it is not initialized yet
704 GroActiveFlag = CGF.CreateTempAlloca(Builder.getInt1Ty(), CharUnits::One(),
705 "gro.active");
706 Builder.CreateStore(Builder.getFalse(), GroActiveFlag);
707
708 GroEmission = CGF.EmitAutoVarAlloca(*GroVarDecl);
709 auto *GroAlloca = dyn_cast_or_null<llvm::AllocaInst>(
710 GroEmission.getOriginalAllocatedAddress().getPointer());
711 assert(GroAlloca && "expected alloca to be emitted");
712 GroAlloca->setMetadata(llvm::LLVMContext::MD_coro_outside_frame,
713 llvm::MDNode::get(CGF.CGM.getLLVMContext(), {}));
714
715 // Remember the top of EHStack before emitting the cleanup.
716 auto old_top = CGF.EHStack.stable_begin();
717 CGF.EmitAutoVarCleanups(GroEmission);
718 auto top = CGF.EHStack.stable_begin();
719
720 // Make the cleanup conditional on gro.active
721 for (auto b = CGF.EHStack.find(top), e = CGF.EHStack.find(old_top); b != e;
722 b++) {
723 if (auto *Cleanup = dyn_cast<EHCleanupScope>(&*b)) {
724 assert(!Cleanup->hasActiveFlag() && "cleanup already has active flag?");
725 Cleanup->setActiveFlag(GroActiveFlag);
726 Cleanup->setTestFlagInEHCleanup();
727 Cleanup->setTestFlagInNormalCleanup();
728 }
729 }
730 }
731
EmitGroInit__anon89ab4d200911::GetReturnObjectManager732 void EmitGroInit() {
733 if (DirectEmit) {
734 // ReturnValue should be valid as long as the coroutine's return type
735 // is not void. The assertion could help us to reduce the check later.
736 assert(CGF.ReturnValue.isValid() == (bool)S.getReturnStmt());
737 // Now we have the promise, initialize the GRO.
738 // We need to emit `get_return_object` first. According to:
739 // [dcl.fct.def.coroutine]p7
740 // The call to get_return_object is sequenced before the call to
741 // initial_suspend and is invoked at most once.
742 //
743 // So we couldn't emit return value when we emit return statment,
744 // otherwise the call to get_return_object wouldn't be in front
745 // of initial_suspend.
746 if (CGF.ReturnValue.isValid()) {
747 CGF.EmitAnyExprToMem(S.getReturnValue(), CGF.ReturnValue,
748 S.getReturnValue()->getType().getQualifiers(),
749 /*IsInit*/ true);
750 }
751 return;
752 }
753
754 if (!GroActiveFlag.isValid()) {
755 // No Gro variable was allocated. Simply emit the call to
756 // get_return_object.
757 CGF.EmitStmt(S.getResultDecl());
758 return;
759 }
760
761 CGF.EmitAutoVarInit(GroEmission);
762 Builder.CreateStore(Builder.getTrue(), GroActiveFlag);
763 }
764 };
765 } // namespace
766
emitBodyAndFallthrough(CodeGenFunction & CGF,const CoroutineBodyStmt & S,Stmt * Body)767 static void emitBodyAndFallthrough(CodeGenFunction &CGF,
768 const CoroutineBodyStmt &S, Stmt *Body) {
769 CGF.EmitStmt(Body);
770 const bool CanFallthrough = CGF.Builder.GetInsertBlock();
771 if (CanFallthrough)
772 if (Stmt *OnFallthrough = S.getFallthroughHandler())
773 CGF.EmitStmt(OnFallthrough);
774 }
775
EmitCoroutineBody(const CoroutineBodyStmt & S)776 void CodeGenFunction::EmitCoroutineBody(const CoroutineBodyStmt &S) {
777 auto *NullPtr = llvm::ConstantPointerNull::get(Builder.getPtrTy());
778 auto &TI = CGM.getContext().getTargetInfo();
779 unsigned NewAlign = TI.getNewAlign() / TI.getCharWidth();
780
781 auto *EntryBB = Builder.GetInsertBlock();
782 auto *AllocBB = createBasicBlock("coro.alloc");
783 auto *InitBB = createBasicBlock("coro.init");
784 auto *FinalBB = createBasicBlock("coro.final");
785 auto *RetBB = createBasicBlock("coro.ret");
786
787 auto *CoroId = Builder.CreateCall(
788 CGM.getIntrinsic(llvm::Intrinsic::coro_id),
789 {Builder.getInt32(NewAlign), NullPtr, NullPtr, NullPtr});
790 createCoroData(*this, CurCoro, CoroId);
791 CurCoro.Data->SuspendBB = RetBB;
792 assert(ShouldEmitLifetimeMarkers &&
793 "Must emit lifetime intrinsics for coroutines");
794
795 // Backend is allowed to elide memory allocations, to help it, emit
796 // auto mem = coro.alloc() ? 0 : ... allocation code ...;
797 auto *CoroAlloc = Builder.CreateCall(
798 CGM.getIntrinsic(llvm::Intrinsic::coro_alloc), {CoroId});
799
800 Builder.CreateCondBr(CoroAlloc, AllocBB, InitBB);
801
802 EmitBlock(AllocBB);
803 auto *AllocateCall = EmitScalarExpr(S.getAllocate());
804 auto *AllocOrInvokeContBB = Builder.GetInsertBlock();
805
806 // Handle allocation failure if 'ReturnStmtOnAllocFailure' was provided.
807 if (auto *RetOnAllocFailure = S.getReturnStmtOnAllocFailure()) {
808 auto *RetOnFailureBB = createBasicBlock("coro.ret.on.failure");
809
810 // See if allocation was successful.
811 auto *NullPtr = llvm::ConstantPointerNull::get(Int8PtrTy);
812 auto *Cond = Builder.CreateICmpNE(AllocateCall, NullPtr);
813 // Expect the allocation to be successful.
814 emitCondLikelihoodViaExpectIntrinsic(Cond, Stmt::LH_Likely);
815 Builder.CreateCondBr(Cond, InitBB, RetOnFailureBB);
816
817 // If not, return OnAllocFailure object.
818 EmitBlock(RetOnFailureBB);
819 EmitStmt(RetOnAllocFailure);
820 }
821 else {
822 Builder.CreateBr(InitBB);
823 }
824
825 EmitBlock(InitBB);
826
827 // Pass the result of the allocation to coro.begin.
828 auto *Phi = Builder.CreatePHI(VoidPtrTy, 2);
829 Phi->addIncoming(NullPtr, EntryBB);
830 Phi->addIncoming(AllocateCall, AllocOrInvokeContBB);
831 auto *CoroBegin = Builder.CreateCall(
832 CGM.getIntrinsic(llvm::Intrinsic::coro_begin), {CoroId, Phi});
833 CurCoro.Data->CoroBegin = CoroBegin;
834
835 GetReturnObjectManager GroManager(*this, S);
836 GroManager.EmitGroAlloca();
837
838 CurCoro.Data->CleanupJD = getJumpDestInCurrentScope(RetBB);
839 {
840 CGDebugInfo *DI = getDebugInfo();
841 ParamReferenceReplacerRAII ParamReplacer(LocalDeclMap);
842 CodeGenFunction::RunCleanupsScope ResumeScope(*this);
843 EHStack.pushCleanup<CallCoroDelete>(NormalAndEHCleanup, S.getDeallocate());
844
845 // Create mapping between parameters and copy-params for coroutine function.
846 llvm::ArrayRef<const Stmt *> ParamMoves = S.getParamMoves();
847 assert(
848 (ParamMoves.size() == 0 || (ParamMoves.size() == FnArgs.size())) &&
849 "ParamMoves and FnArgs should be the same size for coroutine function");
850 if (ParamMoves.size() == FnArgs.size() && DI)
851 for (const auto Pair : llvm::zip(FnArgs, ParamMoves))
852 DI->getCoroutineParameterMappings().insert(
853 {std::get<0>(Pair), std::get<1>(Pair)});
854
855 // Create parameter copies. We do it before creating a promise, since an
856 // evolution of coroutine TS may allow promise constructor to observe
857 // parameter copies.
858 for (auto *PM : S.getParamMoves()) {
859 EmitStmt(PM);
860 ParamReplacer.addCopy(cast<DeclStmt>(PM));
861 // TODO: if(CoroParam(...)) need to surround ctor and dtor
862 // for the copy, so that llvm can elide it if the copy is
863 // not needed.
864 }
865
866 EmitStmt(S.getPromiseDeclStmt());
867
868 Address PromiseAddr = GetAddrOfLocalVar(S.getPromiseDecl());
869 auto *PromiseAddrVoidPtr = new llvm::BitCastInst(
870 PromiseAddr.emitRawPointer(*this), VoidPtrTy, "", CoroId);
871 // Update CoroId to refer to the promise. We could not do it earlier because
872 // promise local variable was not emitted yet.
873 CoroId->setArgOperand(1, PromiseAddrVoidPtr);
874
875 // Now we have the promise, initialize the GRO
876 GroManager.EmitGroInit();
877
878 EHStack.pushCleanup<CallCoroEnd>(EHCleanup);
879
880 CurCoro.Data->CurrentAwaitKind = AwaitKind::Init;
881 CurCoro.Data->ExceptionHandler = S.getExceptionHandler();
882 EmitStmt(S.getInitSuspendStmt());
883 CurCoro.Data->FinalJD = getJumpDestInCurrentScope(FinalBB);
884
885 CurCoro.Data->CurrentAwaitKind = AwaitKind::Normal;
886
887 if (CurCoro.Data->ExceptionHandler) {
888 // If we generated IR to record whether an exception was thrown from
889 // 'await_resume', then use that IR to determine whether the coroutine
890 // body should be skipped.
891 // If we didn't generate the IR (perhaps because 'await_resume' was marked
892 // as 'noexcept'), then we skip this check.
893 BasicBlock *ContBB = nullptr;
894 if (CurCoro.Data->ResumeEHVar) {
895 BasicBlock *BodyBB = createBasicBlock("coro.resumed.body");
896 ContBB = createBasicBlock("coro.resumed.cont");
897 Value *SkipBody = Builder.CreateFlagLoad(CurCoro.Data->ResumeEHVar,
898 "coro.resumed.eh");
899 Builder.CreateCondBr(SkipBody, ContBB, BodyBB);
900 EmitBlock(BodyBB);
901 }
902
903 auto Loc = S.getBeginLoc();
904 CXXCatchStmt Catch(Loc, /*exDecl=*/nullptr,
905 CurCoro.Data->ExceptionHandler);
906 auto *TryStmt =
907 CXXTryStmt::Create(getContext(), Loc, S.getBody(), &Catch);
908
909 EnterCXXTryStmt(*TryStmt);
910 emitBodyAndFallthrough(*this, S, TryStmt->getTryBlock());
911 ExitCXXTryStmt(*TryStmt);
912
913 if (ContBB)
914 EmitBlock(ContBB);
915 }
916 else {
917 emitBodyAndFallthrough(*this, S, S.getBody());
918 }
919
920 // See if we need to generate final suspend.
921 const bool CanFallthrough = Builder.GetInsertBlock();
922 const bool HasCoreturns = CurCoro.Data->CoreturnCount > 0;
923 if (CanFallthrough || HasCoreturns) {
924 EmitBlock(FinalBB);
925 CurCoro.Data->CurrentAwaitKind = AwaitKind::Final;
926 EmitStmt(S.getFinalSuspendStmt());
927 } else {
928 // We don't need FinalBB. Emit it to make sure the block is deleted.
929 EmitBlock(FinalBB, /*IsFinished=*/true);
930 }
931 }
932
933 EmitBlock(RetBB);
934 // Emit coro.end before getReturnStmt (and parameter destructors), since
935 // resume and destroy parts of the coroutine should not include them.
936 llvm::Function *CoroEnd = CGM.getIntrinsic(llvm::Intrinsic::coro_end);
937 Builder.CreateCall(CoroEnd,
938 {NullPtr, Builder.getFalse(),
939 llvm::ConstantTokenNone::get(CoroEnd->getContext())});
940
941 if (Stmt *Ret = S.getReturnStmt()) {
942 // Since we already emitted the return value above, so we shouldn't
943 // emit it again here.
944 if (GroManager.DirectEmit)
945 cast<ReturnStmt>(Ret)->setRetValue(nullptr);
946 EmitStmt(Ret);
947 }
948
949 // LLVM require the frontend to mark the coroutine.
950 CurFn->setPresplitCoroutine();
951
952 if (CXXRecordDecl *RD = FnRetTy->getAsCXXRecordDecl();
953 RD && RD->hasAttr<CoroOnlyDestroyWhenCompleteAttr>())
954 CurFn->setCoroDestroyOnlyWhenComplete();
955 }
956
957 // Emit coroutine intrinsic and patch up arguments of the token type.
EmitCoroutineIntrinsic(const CallExpr * E,unsigned int IID)958 RValue CodeGenFunction::EmitCoroutineIntrinsic(const CallExpr *E,
959 unsigned int IID) {
960 SmallVector<llvm::Value *, 8> Args;
961 switch (IID) {
962 default:
963 break;
964 // The coro.frame builtin is replaced with an SSA value of the coro.begin
965 // intrinsic.
966 case llvm::Intrinsic::coro_frame: {
967 if (CurCoro.Data && CurCoro.Data->CoroBegin) {
968 return RValue::get(CurCoro.Data->CoroBegin);
969 }
970
971 if (CurAwaitSuspendWrapper.FramePtr) {
972 return RValue::get(CurAwaitSuspendWrapper.FramePtr);
973 }
974
975 CGM.Error(E->getBeginLoc(), "this builtin expect that __builtin_coro_begin "
976 "has been used earlier in this function");
977 auto *NullPtr = llvm::ConstantPointerNull::get(Builder.getPtrTy());
978 return RValue::get(NullPtr);
979 }
980 case llvm::Intrinsic::coro_size: {
981 auto &Context = getContext();
982 CanQualType SizeTy = Context.getSizeType();
983 llvm::IntegerType *T = Builder.getIntNTy(Context.getTypeSize(SizeTy));
984 llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::coro_size, T);
985 return RValue::get(Builder.CreateCall(F));
986 }
987 case llvm::Intrinsic::coro_align: {
988 auto &Context = getContext();
989 CanQualType SizeTy = Context.getSizeType();
990 llvm::IntegerType *T = Builder.getIntNTy(Context.getTypeSize(SizeTy));
991 llvm::Function *F = CGM.getIntrinsic(llvm::Intrinsic::coro_align, T);
992 return RValue::get(Builder.CreateCall(F));
993 }
994 // The following three intrinsics take a token parameter referring to a token
995 // returned by earlier call to @llvm.coro.id. Since we cannot represent it in
996 // builtins, we patch it up here.
997 case llvm::Intrinsic::coro_alloc:
998 case llvm::Intrinsic::coro_begin:
999 case llvm::Intrinsic::coro_free: {
1000 if (CurCoro.Data && CurCoro.Data->CoroId) {
1001 Args.push_back(CurCoro.Data->CoroId);
1002 break;
1003 }
1004 CGM.Error(E->getBeginLoc(), "this builtin expect that __builtin_coro_id has"
1005 " been used earlier in this function");
1006 // Fallthrough to the next case to add TokenNone as the first argument.
1007 [[fallthrough]];
1008 }
1009 // @llvm.coro.suspend takes a token parameter. Add token 'none' as the first
1010 // argument.
1011 case llvm::Intrinsic::coro_suspend:
1012 Args.push_back(llvm::ConstantTokenNone::get(getLLVMContext()));
1013 break;
1014 }
1015 for (const Expr *Arg : E->arguments())
1016 Args.push_back(EmitScalarExpr(Arg));
1017 // @llvm.coro.end takes a token parameter. Add token 'none' as the last
1018 // argument.
1019 if (IID == llvm::Intrinsic::coro_end)
1020 Args.push_back(llvm::ConstantTokenNone::get(getLLVMContext()));
1021
1022 llvm::Function *F = CGM.getIntrinsic(IID);
1023 llvm::CallInst *Call = Builder.CreateCall(F, Args);
1024
1025 // Note: The following code is to enable to emit coro.id and coro.begin by
1026 // hand to experiment with coroutines in C.
1027 // If we see @llvm.coro.id remember it in the CoroData. We will update
1028 // coro.alloc, coro.begin and coro.free intrinsics to refer to it.
1029 if (IID == llvm::Intrinsic::coro_id) {
1030 createCoroData(*this, CurCoro, Call, E);
1031 }
1032 else if (IID == llvm::Intrinsic::coro_begin) {
1033 if (CurCoro.Data)
1034 CurCoro.Data->CoroBegin = Call;
1035 }
1036 else if (IID == llvm::Intrinsic::coro_free) {
1037 // Remember the last coro_free as we need it to build the conditional
1038 // deletion of the coroutine frame.
1039 if (CurCoro.Data)
1040 CurCoro.Data->LastCoroFree = Call;
1041 }
1042 return RValue::get(Call);
1043 }
1044