73#include <initializer_list>
78#define DEBUG_TYPE "coro-split"
110 Value *NewFramePtr =
nullptr;
122 : OrigF(OrigF), NewF(
nullptr), Suffix(Suffix), Shape(Shape), FKind(FKind),
131 : OrigF(OrigF), NewF(NewF), Suffix(Suffix), Shape(Shape),
132 FKind(Shape.
ABI == coro::ABI::Async ? Kind::Async : Kind::Continuation),
135 Shape.
ABI == coro::ABI::RetconOnce || Shape.
ABI == coro::ABI::Async);
136 assert(NewF &&
"need existing function for continuation");
137 assert(ActiveSuspend &&
"need active suspend point for continuation");
141 assert(NewF !=
nullptr &&
"declaration not yet set");
148 bool isSwitchDestroyFunction() {
151 case Kind::Continuation:
152 case Kind::SwitchResume:
154 case Kind::SwitchUnwind:
155 case Kind::SwitchCleanup:
161 void replaceEntryBlock();
162 Value *deriveNewFramePointer();
163 void replaceRetconOrAsyncSuspendUses();
164 void replaceCoroSuspends();
165 void replaceCoroEnds();
168 void handleFinalSuspend();
192 if (
auto Invoke = dyn_cast<InvokeInst>(CB)) {
195 Invoke->getUnwindDest(), {Awaiter, FramePtr});
198 std::copy(Invoke->bundle_op_info_begin(), Invoke->bundle_op_info_end(),
199 WrapperInvoke->bundle_op_info_begin());
200 WrapperInvoke->setAttributes(NewAttributes);
201 WrapperInvoke->setDebugLoc(Invoke->getDebugLoc());
202 NewCall = WrapperInvoke;
203 }
else if (
auto Call = dyn_cast<CallInst>(CB)) {
207 WrapperCall->setDebugLoc(Call->getDebugLoc());
208 NewCall = WrapperCall;
214 Intrinsic::coro_await_suspend_handle) {
217 if (
auto *Invoke = dyn_cast<InvokeInst>(CB)) {
219 Builder.
SetInsertPoint(Invoke->getNormalDest()->getFirstInsertionPt());
229 auto *ResumeCall = Builder.
CreateCall(ResumeTy, ResumeAddr, {NewCall});
235 NewCall = ResumeCall;
251 assert(Shape.
ABI == coro::ABI::Retcon || Shape.
ABI == coro::ABI::RetconOnce);
264 auto *EndAsync = dyn_cast<CoroAsyncEndInst>(
End);
270 auto *MustTailCallFunc = EndAsync->getMustTailCallFunction();
271 if (!MustTailCallFunc) {
277 auto *CoroEndBlock =
End->getParent();
278 auto *MustTailCallFuncBlock = CoroEndBlock->getSinglePredecessor();
279 assert(MustTailCallFuncBlock &&
"Must have a single predecessor block");
280 auto It = MustTailCallFuncBlock->getTerminator()->getIterator();
281 auto *MustTailCall = cast<CallInst>(&*std::prev(It));
282 CoroEndBlock->splice(
End->getIterator(), MustTailCallFuncBlock,
283 MustTailCall->getIterator());
291 auto *BB =
End->getParent();
292 BB->splitBasicBlock(
End);
293 BB->getTerminator()->eraseFromParent();
296 assert(InlineRes.isSuccess() &&
"Expected inlining to succeed");
313 case coro::ABI::Switch:
314 assert(!cast<CoroEndInst>(
End)->hasResults() &&
315 "switch coroutine should not return any values");
324 case coro::ABI::Async: {
326 if (!CoroEndBlockNeedsCleanup)
333 case coro::ABI::RetconOnce: {
335 auto *CoroEnd = cast<CoroEndInst>(
End);
338 if (!CoroEnd->hasResults()) {
344 auto *CoroResults = CoroEnd->getResults();
345 unsigned NumReturns = CoroResults->numReturns();
347 if (
auto *RetStructTy = dyn_cast<StructType>(
RetTy)) {
348 assert(RetStructTy->getNumElements() == NumReturns &&
349 "numbers of returns should match resume function singature");
352 for (
Value *RetValEl : CoroResults->return_values())
355 }
else if (NumReturns == 0) {
360 Builder.
CreateRet(*CoroResults->retval_begin());
364 CoroResults->eraseFromParent();
370 case coro::ABI::Retcon: {
371 assert(!cast<CoroEndInst>(
End)->hasResults() &&
372 "retcon coroutine should not return any values");
375 auto RetStructTy = dyn_cast<StructType>(
RetTy);
377 cast<PointerType>(RetStructTy ? RetStructTy->getElementType(0) :
RetTy);
390 auto *BB =
End->getParent();
391 BB->splitBasicBlock(
End);
392 BB->getTerminator()->eraseFromParent();
406 Shape.
ABI == coro::ABI::Switch &&
407 "markCoroutineAsDone is only supported for Switch-Resumed ABI for now.");
426 "The final suspend should only live in the last position of "
444 case coro::ABI::Switch: {
457 case coro::ABI::Async:
460 case coro::ABI::Retcon:
461 case coro::ABI::RetconOnce:
468 auto *FromPad = cast<CleanupPadInst>(Bundle->Inputs[0]);
470 End->getParent()->splitBasicBlock(
End);
471 CleanupRet->getParent()->getTerminator()->eraseFromParent();
485 End->eraseFromParent();
497void CoroCloner::handleFinalSuspend() {
498 assert(Shape.ABI == coro::ABI::Switch &&
499 Shape.SwitchLowering.HasFinalSuspend);
501 if (isSwitchDestroyFunction() && Shape.SwitchLowering.HasUnwindCoroEnd)
504 auto *
Switch = cast<SwitchInst>(VMap[Shape.SwitchLowering.ResumeSwitch]);
505 auto FinalCaseIt = std::prev(
Switch->case_end());
506 BasicBlock *ResumeBB = FinalCaseIt->getCaseSuccessor();
507 Switch->removeCase(FinalCaseIt);
508 if (isSwitchDestroyFunction()) {
513 if (NewF->isCoroOnlyDestroyWhenComplete()) {
516 Builder.CreateBr(ResumeBB);
518 auto *GepIndex = Builder.CreateStructGEP(
522 Builder.CreateLoad(Shape.getSwitchResumePointerType(), GepIndex);
523 auto *
Cond = Builder.CreateIsNull(Load);
524 Builder.CreateCondBr(
Cond, ResumeBB, NewSwitchBB);
532 auto *AsyncSuspend = cast<CoroSuspendAsyncInst>(Suspend);
533 auto *StructTy = cast<StructType>(AsyncSuspend->getType());
536 return FunctionType::get(VoidTy, StructTy->elements(),
false);
544 auto *FnTy = (Shape.
ABI != coro::ABI::Async)
552 M->getFunctionList().insert(InsertBefore, NewF);
561void CoroCloner::replaceRetconOrAsyncSuspendUses() {
562 assert(Shape.ABI == coro::ABI::Retcon || Shape.ABI == coro::ABI::RetconOnce ||
563 Shape.ABI == coro::ABI::Async);
565 auto NewS = VMap[ActiveSuspend];
566 if (NewS->use_empty())
573 bool IsAsyncABI = Shape.ABI == coro::ABI::Async;
574 for (
auto I = IsAsyncABI ? NewF->arg_begin() : std::next(NewF->arg_begin()),
581 if (!isa<StructType>(NewS->getType())) {
583 NewS->replaceAllUsesWith(
Args.front());
589 auto *EVI = dyn_cast<ExtractValueInst>(
U.getUser());
590 if (!EVI || EVI->getNumIndices() != 1)
593 EVI->replaceAllUsesWith(Args[EVI->getIndices().front()]);
594 EVI->eraseFromParent();
598 if (NewS->use_empty())
603 for (
size_t I = 0, E =
Args.size();
I != E; ++
I)
604 Agg = Builder.CreateInsertValue(Agg, Args[
I],
I);
609void CoroCloner::replaceCoroSuspends() {
610 Value *SuspendResult;
619 case coro::ABI::Switch:
620 SuspendResult = Builder.getInt8(isSwitchDestroyFunction() ? 1 : 0);
624 case coro::ABI::Async:
630 case coro::ABI::RetconOnce:
631 case coro::ABI::Retcon:
637 if (CS == ActiveSuspend)
640 auto *MappedCS = cast<AnyCoroSuspendInst>(VMap[CS]);
641 MappedCS->replaceAllUsesWith(SuspendResult);
642 MappedCS->eraseFromParent();
646void CoroCloner::replaceCoroEnds() {
650 auto *NewCE = cast<AnyCoroEndInst>(VMap[CE]);
659 Value *CachedSlot =
nullptr;
660 auto getSwiftErrorSlot = [&](
Type *ValueTy) ->
Value * {
665 for (
auto &Arg :
F.args()) {
666 if (Arg.isSwiftError()) {
673 IRBuilder<> Builder(
F.getEntryBlock().getFirstNonPHIOrDbg());
682 auto MappedOp = VMap ? cast<CallInst>((*VMap)[
Op]) :
Op;
687 if (
Op->arg_empty()) {
688 auto ValueTy =
Op->getType();
689 auto Slot = getSwiftErrorSlot(ValueTy);
690 MappedResult = Builder.
CreateLoad(ValueTy, Slot);
693 auto Value = MappedOp->getArgOperand(0);
695 auto Slot = getSwiftErrorSlot(ValueTy);
700 MappedOp->replaceAllUsesWith(MappedResult);
701 MappedOp->eraseFromParent();
705 if (VMap ==
nullptr) {
711static std::pair<SmallVector<DbgVariableIntrinsic *, 8>,
719 if (
auto *DVI = dyn_cast<DbgVariableIntrinsic>(&
I))
722 return {Intrinsics, DbgVariableRecords};
725void CoroCloner::replaceSwiftErrorOps() {
729void CoroCloner::salvageDebugInfo() {
746 auto IsUnreachableBlock = [&](
BasicBlock *BB) {
750 auto RemoveOne = [&](
auto *DVI) {
751 if (IsUnreachableBlock(DVI->getParent()))
752 DVI->eraseFromParent();
753 else if (isa_and_nonnull<AllocaInst>(DVI->getVariableLocationOp(0))) {
756 for (
auto *
User : DVI->getVariableLocationOp(0)->
users())
757 if (
auto *
I = dyn_cast<Instruction>(
User))
758 if (!isa<AllocaInst>(
I) && !IsUnreachableBlock(
I->getParent()))
761 DVI->eraseFromParent();
765 for_each(DbgVariableRecords, RemoveOne);
768void CoroCloner::replaceEntryBlock() {
774 auto *Entry = cast<BasicBlock>(VMap[Shape.AllocaSpillBlock]);
775 auto *OldEntry = &NewF->getEntryBlock();
776 Entry->setName(
"entry" + Suffix);
777 Entry->moveBefore(OldEntry);
778 Entry->getTerminator()->eraseFromParent();
783 assert(Entry->hasOneUse());
784 auto BranchToEntry = cast<BranchInst>(Entry->user_back());
785 assert(BranchToEntry->isUnconditional());
786 Builder.SetInsertPoint(BranchToEntry);
787 Builder.CreateUnreachable();
788 BranchToEntry->eraseFromParent();
791 Builder.SetInsertPoint(Entry);
793 case coro::ABI::Switch: {
797 cast<BasicBlock>(VMap[Shape.SwitchLowering.ResumeEntryBlock]);
798 Builder.CreateBr(SwitchBB);
801 case coro::ABI::Async:
802 case coro::ABI::Retcon:
803 case coro::ABI::RetconOnce: {
807 assert((Shape.ABI == coro::ABI::Async &&
808 isa<CoroSuspendAsyncInst>(ActiveSuspend)) ||
809 ((Shape.ABI == coro::ABI::Retcon ||
810 Shape.ABI == coro::ABI::RetconOnce) &&
811 isa<CoroSuspendRetconInst>(ActiveSuspend)));
812 auto *MappedCS = cast<AnyCoroSuspendInst>(VMap[ActiveSuspend]);
813 auto Branch = cast<BranchInst>(MappedCS->getNextNode());
815 Builder.CreateBr(
Branch->getSuccessor(0));
825 auto *Alloca = dyn_cast<AllocaInst>(&
I);
826 if (!Alloca ||
I.use_empty())
828 if (DT.isReachableFromEntry(
I.getParent()) ||
829 !isa<ConstantInt>(Alloca->getArraySize()))
831 I.moveBefore(*Entry, Entry->getFirstInsertionPt());
836Value *CoroCloner::deriveNewFramePointer() {
841 case coro::ABI::Switch:
842 return &*NewF->arg_begin();
848 case coro::ABI::Async: {
849 auto *ActiveAsyncSuspend = cast<CoroSuspendAsyncInst>(ActiveSuspend);
850 auto ContextIdx = ActiveAsyncSuspend->getStorageArgumentIndex() & 0xff;
851 auto *CalleeContext = NewF->getArg(ContextIdx);
852 auto *ProjectionFunc =
853 ActiveAsyncSuspend->getAsyncContextProjectionFunction();
855 cast<CoroSuspendAsyncInst>(VMap[ActiveSuspend])->getDebugLoc();
857 auto *CallerContext = Builder.CreateCall(ProjectionFunc->getFunctionType(),
858 ProjectionFunc, CalleeContext);
859 CallerContext->setCallingConv(ProjectionFunc->getCallingConv());
860 CallerContext->setDebugLoc(DbgLoc);
862 auto &
Context = Builder.getContext();
863 auto *FramePtrAddr = Builder.CreateConstInBoundsGEP1_32(
865 Shape.AsyncLowering.FrameOffset,
"async.ctx.frameptr");
869 assert(InlineRes.isSuccess());
874 case coro::ABI::Retcon:
875 case coro::ABI::RetconOnce: {
876 Argument *NewStorage = &*NewF->arg_begin();
877 auto FramePtrTy = PointerType::getUnqual(Shape.FrameTy->getContext());
880 if (Shape.RetconLowering.IsFrameInlineInStorage)
884 return Builder.CreateLoad(FramePtrTy, NewStorage);
892 Align Alignment,
bool NoAlias) {
902 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
906 unsigned ParamIndex) {
909 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
913 unsigned ParamIndex) {
916 Attrs = Attrs.addParamAttributes(
Context, ParamIndex, ParamAttrs);
921void CoroCloner::create() {
925 OrigF.getParent()->end(), ActiveSuspend);
936 VMap[&
A] = DummyArgs.
back();
943 auto savedVisibility = NewF->getVisibility();
944 auto savedUnnamedAddr = NewF->getUnnamedAddr();
945 auto savedDLLStorageClass = NewF->getDLLStorageClass();
950 auto savedLinkage = NewF->getLinkage();
954 CloneFunctionChangeType::LocalChangesOnly, Returns);
956 auto &
Context = NewF->getContext();
965 assert(SP != OrigF.getSubprogram() && SP->isDistinct());
967 if (
auto DL = ActiveSuspend->getDebugLoc())
968 if (SP->getFile() ==
DL->getFile())
969 SP->setScopeLine(
DL->getLine());
979 SP->getUnit()->getSourceLanguage() == dwarf::DW_LANG_Swift) {
980 SP->replaceLinkageName(
MDString::get(Context, NewF->getName()));
981 if (
auto *Decl = SP->getDeclaration()) {
982 auto *NewDecl = DISubprogram::get(
983 Decl->getContext(), Decl->getScope(), Decl->getName(),
984 NewF->getName(), Decl->getFile(), Decl->getLine(), Decl->getType(),
985 Decl->getScopeLine(), Decl->getContainingType(),
986 Decl->getVirtualIndex(), Decl->getThisAdjustment(),
987 Decl->getFlags(), Decl->getSPFlags(), Decl->getUnit(),
988 Decl->getTemplateParams(),
nullptr, Decl->getRetainedNodes(),
989 Decl->getThrownTypes(), Decl->getAnnotations(),
990 Decl->getTargetFuncName());
991 SP->replaceDeclaration(NewDecl);
996 NewF->setLinkage(savedLinkage);
997 NewF->setVisibility(savedVisibility);
998 NewF->setUnnamedAddr(savedUnnamedAddr);
999 NewF->setDLLStorageClass(savedDLLStorageClass);
1003 if (Shape.ABI == coro::ABI::Switch &&
1004 NewF->hasMetadata(LLVMContext::MD_func_sanitize))
1005 NewF->eraseMetadata(LLVMContext::MD_func_sanitize);
1008 auto OrigAttrs = NewF->getAttributes();
1011 switch (Shape.ABI) {
1012 case coro::ABI::Switch:
1015 NewAttrs = NewAttrs.addFnAttributes(
1016 Context,
AttrBuilder(Context, OrigAttrs.getFnAttrs()));
1019 Shape.FrameAlign,
false);
1021 case coro::ABI::Async: {
1022 auto *ActiveAsyncSuspend = cast<CoroSuspendAsyncInst>(ActiveSuspend);
1023 if (OrigF.hasParamAttribute(Shape.AsyncLowering.ContextArgNo,
1024 Attribute::SwiftAsync)) {
1026 ActiveAsyncSuspend->getStorageArgumentIndex();
1027 auto ContextArgIndex = ArgAttributeIndices & 0xff;
1032 auto SwiftSelfIndex = ArgAttributeIndices >> 8;
1038 auto FnAttrs = OrigF.getAttributes().getFnAttrs();
1039 NewAttrs = NewAttrs.addFnAttributes(Context,
AttrBuilder(Context, FnAttrs));
1042 case coro::ABI::Retcon:
1043 case coro::ABI::RetconOnce:
1046 NewAttrs = Shape.RetconLowering.ResumePrototype->getAttributes();
1050 Shape.getRetconCoroId()->getStorageSize(),
1051 Shape.getRetconCoroId()->getStorageAlignment(),
1057 switch (Shape.ABI) {
1062 case coro::ABI::Switch:
1063 case coro::ABI::RetconOnce:
1072 case coro::ABI::Retcon:
1078 case coro::ABI::Async:
1082 NewF->setAttributes(NewAttrs);
1083 NewF->setCallingConv(Shape.getResumeFunctionCC());
1086 replaceEntryBlock();
1089 for (
CallInst *ResumeCall : Shape.SymmetricTransfers) {
1090 ResumeCall = cast<CallInst>(VMap[ResumeCall]);
1103 Builder.CreateRetVoid();
1107 Builder.SetInsertPoint(&NewF->getEntryBlock().front());
1108 NewFramePtr = deriveNewFramePointer();
1111 Value *OldFramePtr = VMap[Shape.FramePtr];
1112 NewFramePtr->
takeName(OldFramePtr);
1116 auto *NewVFrame = Builder.CreateBitCast(
1117 NewFramePtr, PointerType::getUnqual(Builder.getContext()),
"vFrame");
1118 Value *OldVFrame = cast<Value>(VMap[Shape.CoroBegin]);
1119 if (OldVFrame != NewVFrame)
1126 DummyArg->deleteValue();
1129 switch (Shape.ABI) {
1130 case coro::ABI::Switch:
1134 if (Shape.SwitchLowering.HasFinalSuspend)
1135 handleFinalSuspend();
1137 case coro::ABI::Async:
1138 case coro::ABI::Retcon:
1139 case coro::ABI::RetconOnce:
1142 assert(ActiveSuspend !=
nullptr &&
1143 "no active suspend when lowering a continuation-style coroutine");
1144 replaceRetconOrAsyncSuspendUses();
1149 replaceCoroSuspends();
1162 if (Shape.ABI == coro::ABI::Switch)
1164 FKind == CoroCloner::Kind::SwitchCleanup);
1170 auto *FuncPtrStruct = cast<ConstantStruct>(
1172 auto *OrigRelativeFunOffset = FuncPtrStruct->getOperand(0);
1173 auto *OrigContextSize = FuncPtrStruct->getOperand(1);
1174 auto *NewContextSize = ConstantInt::get(OrigContextSize->getType(),
1177 FuncPtrStruct->getType(), OrigRelativeFunOffset, NewContextSize);
1183 if (Shape.
ABI == coro::ABI::Async)
1196 auto *SizeIntrin = Shape.
CoroSizes.back();
1197 Module *M = SizeIntrin->getModule();
1200 auto *SizeConstant = ConstantInt::get(SizeIntrin->getType(),
Size);
1224 auto *CoroId = CoroBegin->
getId();
1226 switch (Shape.
ABI) {
1227 case coro::ABI::Switch: {
1228 auto SwitchId = cast<CoroIdInst>(CoroId);
1234 AllocInst->replaceAllUsesWith(Builder.
getFalse());
1235 AllocInst->eraseFromParent();
1236 CoroBegin->replaceAllUsesWith(Frame);
1238 CoroBegin->replaceAllUsesWith(CoroBegin->getMem());
1243 case coro::ABI::Async:
1244 case coro::ABI::Retcon:
1245 case coro::ABI::RetconOnce:
1250 CoroBegin->eraseFromParent();
1259 if (isa<IntrinsicInst>(
I))
1262 if (isa<CallBase>(
I))
1278 while (!Worklist.
empty()) {
1288 Set.
erase(ResDesBB);
1290 for (
auto *BB : Set)
1299 auto *ResumeOrDestroyBB = ResumeOrDestroy->
getParent();
1301 if (SaveBB == ResumeOrDestroyBB)
1329 Prev = Pred->getTerminator();
1332 CallBase *CB = dyn_cast<CallBase>(Prev);
1339 auto *SubFn = dyn_cast<CoroSubFnInst>(Callee);
1344 if (SubFn->getFrame() != CoroBegin)
1358 Save->eraseFromParent();
1361 if (
auto *Invoke = dyn_cast<InvokeInst>(CB)) {
1370 if (CalledValue != SubFn && CalledValue->user_empty())
1371 if (
auto *
I = dyn_cast<Instruction>(CalledValue))
1372 I->eraseFromParent();
1375 if (SubFn->user_empty())
1376 SubFn->eraseFromParent();
1384 if (Shape.
ABI != coro::ABI::Switch)
1388 size_t I = 0,
N = S.size();
1392 size_t ChangedFinalIndex = std::numeric_limits<size_t>::max();
1394 auto SI = cast<CoroSuspendInst>(S[
I]);
1403 if (cast<CoroSuspendInst>(S[
I])->isFinal()) {
1405 ChangedFinalIndex =
I;
1417 if (ChangedFinalIndex <
N) {
1418 assert(cast<CoroSuspendInst>(S[ChangedFinalIndex])->isFinal());
1419 std::swap(S[ChangedFinalIndex], S.back());
1425struct SwitchCoroutineSplitter {
1431 createResumeEntryBlock(
F, Shape);
1433 createClone(
F,
".resume", Shape, CoroCloner::Kind::SwitchResume,
TTI);
1434 auto *DestroyClone =
1435 createClone(
F,
".destroy", Shape, CoroCloner::Kind::SwitchUnwind,
TTI);
1436 auto *CleanupClone =
1437 createClone(
F,
".cleanup", Shape, CoroCloner::Kind::SwitchCleanup,
TTI);
1444 updateCoroFrame(Shape, ResumeClone, DestroyClone, CleanupClone);
1454 setCoroInfo(
F, Shape, Clones);
1464 CoroCloner Cloner(
F, Suffix, Shape, FKind,
TTI);
1466 return Cloner.getFunction();
1488 auto *FrameTy = Shape.
FrameTy;
1489 auto *GepIndex = Builder.CreateStructGEP(
1496 size_t SuspendIndex = 0;
1498 auto *S = cast<CoroSuspendInst>(AnyS);
1504 auto *Save = S->getCoroSave();
1505 Builder.SetInsertPoint(Save);
1511 auto *GepIndex = Builder.CreateStructGEP(
1513 Builder.CreateStore(IndexVal, GepIndex);
1517 Save->eraseFromParent();
1542 auto *SuspendBB = S->getParent();
1546 S->getNextNode(), ResumeBB->
getName() +
Twine(
".landing"));
1547 Switch->addCase(IndexVal, ResumeBB);
1549 cast<BranchInst>(SuspendBB->getTerminator())->setSuccessor(0, LandingBB);
1551 PN->insertBefore(LandingBB->begin());
1552 S->replaceAllUsesWith(PN);
1553 PN->addIncoming(Builder.getInt8(-1), SuspendBB);
1554 PN->addIncoming(S, ResumeBB);
1559 Builder.SetInsertPoint(UnreachBB);
1560 Builder.CreateUnreachable();
1570 auto *ResumeAddr = Builder.CreateStructGEP(
1573 Builder.CreateStore(ResumeFn, ResumeAddr);
1575 Value *DestroyOrCleanupFn = DestroyFn;
1581 DestroyOrCleanupFn = Builder.CreateSelect(CA, DestroyFn, CleanupFn);
1584 auto *DestroyAddr = Builder.CreateStructGEP(
1587 Builder.CreateStore(DestroyOrCleanupFn, DestroyAddr);
1611 auto *ArrTy = ArrayType::get(Part->
getType(),
Args.size());
1615 GlobalVariable::PrivateLinkage, ConstVal,
1616 F.getName() +
Twine(
".resumers"));
1628 Value *Continuation) {
1631 auto *Int8PtrTy = PointerType::getUnqual(
Context);
1636 ResumeIntrinsic->eraseFromParent();
1646 for (
auto *paramTy : FnTy->params()) {
1648 if (paramTy != FnArgs[ArgIdx]->
getType())
1667 auto *TailCall = Builder.
CreateCall(FnTy, MustTailCallFn, CallArgs);
1672 TailCall->setDebugLoc(Loc);
1684 F.removeFnAttr(Attribute::NoReturn);
1685 F.removeRetAttr(Attribute::NoAlias);
1686 F.removeRetAttr(Attribute::NonNull);
1689 auto *Int8PtrTy = PointerType::getUnqual(
Context);
1698 "async.ctx.frameptr");
1709 auto NextF = std::next(
F.getIterator());
1717 auto ResumeNameSuffix =
".resume.";
1718 auto ProjectionFunctionName =
1719 Suspend->getAsyncContextProjectionFunction()->getName();
1720 bool UseSwiftMangling =
false;
1721 if (ProjectionFunctionName ==
"__swift_async_resume_project_context") {
1722 ResumeNameSuffix =
"TQ";
1723 UseSwiftMangling =
true;
1724 }
else if (ProjectionFunctionName ==
"__swift_async_resume_get_context") {
1725 ResumeNameSuffix =
"TY";
1726 UseSwiftMangling =
true;
1730 UseSwiftMangling ? ResumeNameSuffix +
Twine(
Idx) +
"_"
1737 auto *SuspendBB = Suspend->getParent();
1738 auto *NewSuspendBB = SuspendBB->splitBasicBlock(Suspend);
1739 auto *Branch = cast<BranchInst>(SuspendBB->getTerminator());
1744 Branch->setSuccessor(0, ReturnBB);
1749 auto *Fn = Suspend->getMustTailCallFunction();
1766 auto *Clone = Clones[
Idx];
1768 CoroCloner(
F,
"resume." +
Twine(
Idx), Shape, Clone, Suspend,
TTI).create();
1775 assert(Shape.
ABI == coro::ABI::Retcon || Shape.
ABI == coro::ABI::RetconOnce);
1780 F.removeFnAttr(Attribute::NoReturn);
1781 F.removeRetAttr(Attribute::NoAlias);
1782 F.removeRetAttr(Attribute::NonNull);
1788 RawFramePtr = Id->getStorage();
1804 Builder.
CreateStore(RawFramePtr, Id->getStorage());
1820 auto NextF = std::next(
F.getIterator());
1824 for (
size_t i = 0, e = Shape.
CoroSuspends.size(); i != e; ++i) {
1825 auto Suspend = cast<CoroSuspendRetconInst>(Shape.
CoroSuspends[i]);
1834 auto SuspendBB = Suspend->getParent();
1835 auto NewSuspendBB = SuspendBB->splitBasicBlock(Suspend);
1836 auto Branch = cast<BranchInst>(SuspendBB->getTerminator());
1860 auto RetTy =
F.getReturnType();
1865 auto CastedContinuationTy =
1866 (ReturnPHIs.
size() == 1 ?
RetTy :
RetTy->getStructElementType(0));
1867 auto *CastedContinuation =
1871 if (ReturnPHIs.
size() == 1) {
1872 RetV = CastedContinuation;
1876 for (
size_t I = 1, E = ReturnPHIs.
size();
I != E; ++
I)
1884 Branch->setSuccessor(0, ReturnBB);
1885 ReturnPHIs[0]->addIncoming(Continuation, SuspendBB);
1886 size_t NextPHIIndex = 1;
1887 for (
auto &VUse : Suspend->value_operands())
1888 ReturnPHIs[NextPHIIndex++]->addIncoming(&*VUse, SuspendBB);
1893 for (
size_t i = 0, e = Shape.
CoroSuspends.size(); i != e; ++i) {
1895 auto Clone = Clones[i];
1897 CoroCloner(
F,
"resume." +
Twine(i), Shape, Clone, Suspend,
TTI).create();
1906 PrettyStackTraceFunction(
Function &F) :
F(
F) {}
1908 OS <<
"While splitting coroutine ";
1909 F.printAsOperand(
OS,
false,
F.getParent());
1918 std::function<
bool(
Instruction &)> MaterializableCallback) {
1919 PrettyStackTraceFunction prettyStackTrace(
F);
1932 buildCoroutineFrame(
F, Shape,
TTI, MaterializableCallback);
1940 switch (Shape.
ABI) {
1941 case coro::ABI::Switch:
1942 SwitchCoroutineSplitter::split(
F, Shape, Clones,
TTI);
1944 case coro::ABI::Async:
1947 case coro::ABI::Retcon:
1948 case coro::ABI::RetconOnce:
1963 for (
auto *DDI : DbgInsts)
1987 if (Shape.
ABI != coro::ABI::Switch)
1993 End->eraseFromParent();
1997 if (!Clones.
empty()) {
1998 switch (Shape.
ABI) {
1999 case coro::ABI::Switch:
2005 case coro::ABI::Async:
2006 case coro::ABI::Retcon:
2007 case coro::ABI::RetconOnce:
2010 if (!Clones.empty())
2039 auto *Cast = dyn_cast<BitCastInst>(U.getUser());
2040 if (!Cast || Cast->getType() != Fn->getType())
2044 Cast->replaceAllUsesWith(Fn);
2045 Cast->eraseFromParent();
2054 while (
auto *Cast = dyn_cast<BitCastInst>(CastFn)) {
2055 if (!Cast->use_empty())
2057 CastFn = Cast->getOperand(0);
2058 Cast->eraseFromParent();
2064 bool Changed =
false;
2067 auto *Prepare = cast<CallInst>(
P.getUser());
2078 auto *PrepareFn = M.getFunction(
Name);
2079 if (PrepareFn && !PrepareFn->use_empty())
2084 : MaterializableCallback(coro::defaultMaterializable),
2085 OptimizeFrame(OptimizeFrame) {}
2093 Module &M = *
C.begin()->getFunction().getParent();
2105 if (
N.getFunction().isPresplitCoroutine())
2108 if (Coroutines.
empty() && PrepareFns.
empty())
2111 if (Coroutines.
empty()) {
2112 for (
auto *PrepareFn : PrepareFns) {
2120 LLVM_DEBUG(
dbgs() <<
"CoroSplit: Processing coroutine '" <<
F.getName()
2122 F.setSplittedCoroutine();
2133 <<
"Split '" <<
ore::NV(
"function",
F.getName())
2134 <<
"' (frame_size=" <<
ore::NV(
"frame_size", Shape.FrameSize)
2135 <<
", align=" <<
ore::NV(
"align", Shape.FrameAlign.value()) <<
")";
2138 if (!Shape.CoroSuspends.empty()) {
2146 if (!PrepareFns.
empty()) {
2147 for (
auto *PrepareFn : PrepareFns) {
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
amdgpu aa AMDGPU Address space based Alias Analysis Wrapper
AMDGPU Lower Kernel Arguments
Expand Atomic instructions
This file contains the simple types necessary to represent the attributes associated with functions a...
BlockVerifier::State From
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
This file provides interfaces used to manipulate a call graph, regardless if it is a "old style" Call...
This file provides interfaces used to build and manipulate a call graph, which is a very useful tool ...
This file contains the declarations for the subclasses of Constant, which represent the different fla...
Remove calls to llvm coro end in the original static function void removeCoroEnds(const coro::Shape &Shape)
static void addSwiftSelfAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex)
static bool hasCallsBetween(Instruction *Save, Instruction *ResumeOrDestroy)
static std::pair< SmallVector< DbgVariableIntrinsic *, 8 >, SmallVector< DbgVariableRecord * > > collectDbgVariableIntrinsics(Function &F)
Returns all DbgVariableIntrinsic in F.
static void replaceSwiftErrorOps(Function &F, coro::Shape &Shape, ValueToValueMapTy *VMap)
static void addAsyncContextAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex)
static void maybeFreeRetconStorage(IRBuilder<> &Builder, const coro::Shape &Shape, Value *FramePtr, CallGraph *CG)
static bool hasCallsInBlocksBetween(BasicBlock *SaveBB, BasicBlock *ResDesBB)
static Function * createCloneDeclaration(Function &OrigF, coro::Shape &Shape, const Twine &Suffix, Module::iterator InsertBefore, AnyCoroSuspendInst *ActiveSuspend)
static FunctionType * getFunctionTypeFromAsyncSuspend(AnyCoroSuspendInst *Suspend)
static void addPrepareFunction(const Module &M, SmallVectorImpl< Function * > &Fns, StringRef Name)
static void updateCallGraphAfterCoroutineSplit(LazyCallGraph::Node &N, const coro::Shape &Shape, const SmallVectorImpl< Function * > &Clones, LazyCallGraph::SCC &C, LazyCallGraph &CG, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
static void simplifySuspendPoints(coro::Shape &Shape)
static void addFramePointerAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex, uint64_t Size, Align Alignment, bool NoAlias)
static bool replaceAllPrepares(Function *PrepareFn, LazyCallGraph &CG, LazyCallGraph::SCC &C)
static void replaceFallthroughCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
Replace a non-unwind call to llvm.coro.end.
static void replaceFrameSizeAndAlignment(coro::Shape &Shape)
static bool replaceCoroEndAsync(AnyCoroEndInst *End)
Replace an llvm.coro.end.async.
static void splitRetconCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI)
Replace a call to llvm coro prepare static retcon void replacePrepare(CallInst *Prepare, LazyCallGraph &CG, LazyCallGraph::SCC &C)
static void replaceUnwindCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
Replace an unwind call to llvm.coro.end.
static bool simplifySuspendPoint(CoroSuspendInst *Suspend, CoroBeginInst *CoroBegin)
static bool hasCallsInBlockBetween(Instruction *From, Instruction *To)
static void markCoroutineAsDone(IRBuilder<> &Builder, const coro::Shape &Shape, Value *FramePtr)
static void updateAsyncFuncPointerContextSize(coro::Shape &Shape)
static void replaceCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
static void lowerAwaitSuspend(IRBuilder<> &Builder, CoroAwaitSuspendInst *CB, coro::Shape &Shape)
static void lowerAwaitSuspends(Function &F, coro::Shape &Shape)
static void handleNoSuspendCoroutine(coro::Shape &Shape)
static coro::Shape splitCoroutine(Function &F, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI, bool OptimizeFrame, std::function< bool(Instruction &)> MaterializableCallback)
static void postSplitCleanup(Function &F)
static void splitAsyncCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI)
Coerce the arguments in p FnArgs according to p FnTy in p static CallArgs void coerceArguments(IRBuilder<> &Builder, FunctionType *FnTy, ArrayRef< Value * > FnArgs, SmallVectorImpl< Value * > &CallArgs)
static void replaceAsyncResumeFunction(CoroSuspendAsyncInst *Suspend, Value *Continuation)
static Error split(StringRef Str, char Separator, std::pair< StringRef, StringRef > &Split)
Checked version of split, to ensure mandatory subparts.
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
This file contains constants used for implementing Dwarf debug support.
static Function * getFunction(Constant *C)
Rewrite Partial Register Uses
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
Implements a lazy call graph analysis and related passes for the new pass manager.
Module.h This file contains the declarations for the Module class.
FunctionAnalysisManager FAM
This file provides a priority worklist.
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
static SymbolRef::Type getType(const Symbol *Sym)
static const unsigned FramePtr
void setSwiftError(bool V)
Specify whether this alloca is used to represent a swifterror.
void setAlignment(Align Align)
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
CoroAllocInst * getCoroAlloc()
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
ArrayRef< T > drop_front(size_t N=1) const
Drop the first N elements of the array.
size_t size() const
size - Get the array size.
AttrBuilder & addAlignmentAttr(MaybeAlign Align)
This turns an alignment into the form used internally in Attribute.
AttrBuilder & addAttribute(Attribute::AttrKind Val)
Add an attribute to the builder.
AttrBuilder & addDereferenceableAttr(uint64_t Bytes)
This turns the number of dereferenceable bytes into the form used internally in Attribute.
AttributeList removeParamAttributes(LLVMContext &C, unsigned ArgNo, const AttributeMask &AttrsToRemove) const
Remove the specified attribute at the specified arg index from this attribute list.
LLVM Basic Block Representation.
const Instruction * getFirstNonPHI() const
Returns a pointer to the first instruction in this block that is not a PHINode instruction.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="", bool Before=false)
Split the basic block into two basic blocks at the specified instruction.
const BasicBlock * getSinglePredecessor() const
Return the predecessor of this block if it has a single predecessor block.
const Function * getParent() const
Return the enclosing method, or null if none.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
static BranchInst * Create(BasicBlock *IfTrue, BasicBlock::iterator InsertBefore)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
void setCallingConv(CallingConv::ID CC)
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
Value * getCalledOperand() const
void setAttributes(AttributeList A)
Set the parameter attributes for this call.
Value * getArgOperand(unsigned i) const
AttributeList getAttributes() const
Return the parameter attributes for this call.
The basic data container for the call graph of a Module of IR.
This class represents a function call, abstracting a target machine's calling convention.
void setTailCallKind(TailCallKind TCK)
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
static ConstantInt * getFalse(LLVMContext &Context)
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * get(StructType *T, ArrayRef< Constant * > V)
static ConstantTokenNone * get(LLVMContext &Context)
Return the ConstantTokenNone.
This represents the llvm.coro.align instruction.
This represents the llvm.coro.alloc instruction.
This represents the llvm.coro.await.suspend.{void,bool,handle} instructions.
Value * getAwaiter() const
Function * getWrapperFunction() const
This class represents the llvm.coro.begin instruction.
AnyCoroIdInst * getId() const
This represents the llvm.coro.id instruction.
void setInfo(Constant *C)
This represents the llvm.coro.size instruction.
This represents the llvm.coro.suspend.async instruction.
CoroAsyncResumeInst * getResumeFunction() const
This represents the llvm.coro.suspend instruction.
CoroSaveInst * getCoroSave() const
DISubprogram * getSubprogram() const
Get the subprogram for this scope.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
This is the common base class for debug info intrinsics for variables.
Record of a variable value-assignment, aka a non instruction representation of the dbg....
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
This class represents a freeze function that returns random concrete value if an operand is either a ...
A proxy from a FunctionAnalysisManager to an SCC.
Type * getReturnType() const
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
FunctionType * getFunctionType() const
Returns the FunctionType for me.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
CallingConv::ID getCallingConv() const
getCallingConv()/setCallingConv(CC) - These method get and set the calling convention of this functio...
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Module * getParent()
Get the module that this global value is contained inside of...
PointerType * getType() const
Global values are always pointers.
@ ExternalLinkage
Externally visible function.
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void setInitializer(Constant *InitVal)
setInitializer - Sets the initializer for this global variable, removing any existing initializer if ...
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
Value * CreateInsertValue(Value *Agg, Value *Val, ArrayRef< unsigned > Idxs, const Twine &Name="")
InvokeInst * CreateInvoke(FunctionType *Ty, Value *Callee, BasicBlock *NormalDest, BasicBlock *UnwindDest, ArrayRef< Value * > Args, ArrayRef< OperandBundleDef > OpBundles, const Twine &Name="")
Create an invoke instruction.
BasicBlock::iterator GetInsertPoint() const
Value * CreateStructGEP(Type *Ty, Value *Ptr, unsigned Idx, const Twine &Name="")
Value * CreateConstInBoundsGEP1_32(Type *Ty, Value *Ptr, unsigned Idx0, const Twine &Name="")
CleanupReturnInst * CreateCleanupRet(CleanupPadInst *CleanupPad, BasicBlock *UnwindBB=nullptr)
ReturnInst * CreateRet(Value *V)
Create a 'ret <val>' instruction.
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
Value * CreateBitOrPointerCast(Value *V, Type *DestTy, const Twine &Name="")
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateBitCast(Value *V, Type *DestTy, const Twine &Name="")
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
LLVMContext & getContext() const
ReturnInst * CreateRetVoid()
Create a 'ret void' instruction.
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
ConstantInt * getFalse()
Get the constant value for i1 false.
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args=std::nullopt, const Twine &Name="", MDNode *FPMathTag=nullptr)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
This class captures the data input to the InlineFunction call, and records the auxiliary results prod...
const BasicBlock * getParent() const
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
This is an important class for using LLVM in a threaded context.
A node in the call graph.
An SCC of the call graph.
A lazily constructed view of the call graph of a module.
void addSplitFunction(Function &OriginalFunction, Function &NewFunction)
Add a new function split/outlined from an existing function.
void addSplitRefRecursiveFunctions(Function &OriginalFunction, ArrayRef< Function * > NewFunctions)
Add new ref-recursive functions split/outlined from an existing function.
Node & get(Function &F)
Get a graph node for a given function, scanning it to populate the graph data as necessary.
SCC * lookupSCC(Node &N) const
Lookup a function's SCC in the graph.
static MDString * get(LLVMContext &Context, StringRef Str)
A Module instance is used to store all the information related to an LLVM module.
FunctionListType::iterator iterator
The Function iterators.
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr, BasicBlock::iterator InsertBefore)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PrettyStackTraceEntry - This class is used to represent a frame of the "pretty" stack trace that is d...
virtual void print(raw_ostream &OS) const =0
print - Emit information about this stack frame to OS.
Return a value (possibly void), from a function.
bool erase(PtrType Ptr)
erase - If the set contains the specified pointer, remove it and return true, otherwise return false.
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
bool contains(ConstPtrType Ptr) const
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
Type * getTypeAtIndex(const Value *V) const
Given an index value into the type, return the type of the element.
Analysis pass providing the TargetTransformInfo.
Value handle that tracks a Value across RAUW.
ValueTy * getValPtr() const
Triple - Helper class for working with autoconf configuration names.
bool isArch64Bit() const
Test whether the architecture is 64-bit.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
static Type * getVoidTy(LLVMContext &C)
static IntegerType * getInt8Ty(LLVMContext &C)
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
void setOperand(unsigned i, Value *Val)
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ C
The default llvm calling convention, compatible with C.
void salvageDebugInfo(SmallDenseMap< Argument *, AllocaInst *, 4 > &ArgToAllocaMap, DbgVariableIntrinsic &DVI, bool OptimizeFrame, bool IsEntryPoint)
Attempts to rewrite the location operand of debug intrinsics in terms of the coroutine frame pointer,...
@ Switch
The "resume-switch" lowering, where there are separate resume and destroy functions that are shared b...
CallInst * createMustTailCall(DebugLoc Loc, Function *MustTailCallFn, TargetTransformInfo &TTI, ArrayRef< Value * > Arguments, IRBuilder<> &)
void replaceCoroFree(CoroIdInst *CoroId, bool Elide)
DiagnosticInfoOptimizationBase::Argument NV
This is an optimization pass for GlobalISel generic memory operations.
UnaryFunction for_each(R &&Range, UnaryFunction F)
Provide wrappers to std::for_each which take ranges instead of having to pass begin/end explicitly.
bool verifyFunction(const Function &F, raw_ostream *OS=nullptr)
Check a function for errors, useful for use when debugging a pass.
void salvageDebugInfo(const MachineRegisterInfo &MRI, MachineInstr &MI)
Assuming the instruction MI is going to be deleted, attempt to salvage debug users of MI by writing t...
LazyCallGraph::SCC & updateCGAndAnalysisManagerForFunctionPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
Helper to update the call graph after running a function pass.
LazyCallGraph::SCC & updateCGAndAnalysisManagerForCGSCCPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
Helper to update the call graph after running a CGSCC pass.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
@ Async
"Asynchronous" unwind tables (instr precise)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
unsigned changeToUnreachable(Instruction *I, bool PreserveLCSSA=false, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Insert an unreachable instruction before the specified instruction, making it and the rest of the cod...
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
DWARFExpression::Operation Op
InlineResult InlineFunction(CallBase &CB, InlineFunctionInfo &IFI, bool MergeAttributes=false, AAResults *CalleeAAR=nullptr, bool InsertLifetime=true, Function *ForwardVarArgsTo=nullptr)
This function inlines the called function into the basic block of the caller.
void CloneFunctionInto(Function *NewFunc, const Function *OldFunc, ValueToValueMapTy &VMap, CloneFunctionChangeType Changes, SmallVectorImpl< ReturnInst * > &Returns, const char *NameSuffix="", ClonedCodeInfo *CodeInfo=nullptr, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Clone OldFunc into NewFunc, transforming the old arguments into references to VMap values.
auto predecessors(const MachineBasicBlock *BB)
static auto filterDbgVars(iterator_range< simple_ilist< DbgRecord >::iterator > R)
Filter the DbgRecord range to DbgVariableRecord types only and downcast.
bool removeUnreachableBlocks(Function &F, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Remove all blocks that can not be reached from the function's entry.
bool isPotentiallyReachable(const Instruction *From, const Instruction *To, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet=nullptr, const DominatorTree *DT=nullptr, const LoopInfo *LI=nullptr)
Determine whether instruction 'To' is reachable from 'From', without passing through any blocks in Ex...
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Support structure for SCC passes to communicate updates the call graph back to the CGSCC pass manager...
SmallPriorityWorklist< LazyCallGraph::SCC *, 1 > & CWorklist
Worklist of the SCCs queued for processing.
const std::function< bool(Instruction &)> MaterializableCallback
PreservedAnalyses run(LazyCallGraph::SCC &C, CGSCCAnalysisManager &AM, LazyCallGraph &CG, CGSCCUpdateResult &UR)
CoroSplitPass(bool OptimizeFrame=false)
CallInst * makeSubFnCall(Value *Arg, int Index, Instruction *InsertPt)
GlobalVariable * AsyncFuncPointer
bool IsFrameInlineInStorage
SwitchInst * ResumeSwitch
BasicBlock * ResumeEntryBlock
SmallVector< CallInst *, 2 > SymmetricTransfers
SmallVector< CoroAwaitSuspendInst *, 4 > CoroAwaitSuspends
AsyncLoweringStorage AsyncLowering
FunctionType * getResumeFunctionType() const
IntegerType * getIndexType() const
CoroIdInst * getSwitchCoroId() const
SmallVector< CoroSizeInst *, 2 > CoroSizes
SmallVector< AnyCoroSuspendInst *, 4 > CoroSuspends
Value * emitAlloc(IRBuilder<> &Builder, Value *Size, CallGraph *CG) const
Allocate memory according to the rules of the active lowering.
SmallVector< CallInst *, 2 > SwiftErrorOps
ConstantInt * getIndex(uint64_t Value) const
bool OptimizeFrame
This would only be true if optimization are enabled.
SwitchLoweringStorage SwitchLowering
CoroBeginInst * CoroBegin
BasicBlock::iterator getInsertPtAfterFramePtr() const
ArrayRef< Type * > getRetconResultTypes() const
void emitDealloc(IRBuilder<> &Builder, Value *Ptr, CallGraph *CG) const
Deallocate memory according to the rules of the active lowering.
RetconLoweringStorage RetconLowering
SmallVector< CoroAlignInst *, 2 > CoroAligns
SmallVector< AnyCoroEndInst *, 4 > CoroEnds
unsigned getSwitchIndexField() const