73#include <initializer_list>
78#define DEBUG_TYPE "coro-split"
110 Value *NewFramePtr =
nullptr;
122 : OrigF(OrigF), NewF(
nullptr), Suffix(Suffix), Shape(Shape), FKind(FKind),
131 : OrigF(OrigF), NewF(NewF), Suffix(Suffix), Shape(Shape),
132 FKind(Shape.
ABI == coro::ABI::Async ? Kind::Async : Kind::Continuation),
135 Shape.
ABI == coro::ABI::RetconOnce || Shape.
ABI == coro::ABI::Async);
136 assert(NewF &&
"need existing function for continuation");
137 assert(ActiveSuspend &&
"need active suspend point for continuation");
141 assert(NewF !=
nullptr &&
"declaration not yet set");
148 bool isSwitchDestroyFunction() {
151 case Kind::Continuation:
152 case Kind::SwitchResume:
154 case Kind::SwitchUnwind:
155 case Kind::SwitchCleanup:
161 void replaceEntryBlock();
162 Value *deriveNewFramePointer();
163 void replaceRetconOrAsyncSuspendUses();
164 void replaceCoroSuspends();
165 void replaceCoroEnds();
168 void handleFinalSuspend();
192 if (
auto Invoke = dyn_cast<InvokeInst>(CB)) {
195 Invoke->getUnwindDest(), {Awaiter, FramePtr});
198 std::copy(Invoke->bundle_op_info_begin(), Invoke->bundle_op_info_end(),
199 WrapperInvoke->bundle_op_info_begin());
200 WrapperInvoke->setAttributes(NewAttributes);
201 WrapperInvoke->setDebugLoc(Invoke->getDebugLoc());
202 NewCall = WrapperInvoke;
203 }
else if (
auto Call = dyn_cast<CallInst>(CB)) {
207 WrapperCall->setDebugLoc(Call->getDebugLoc());
208 NewCall = WrapperCall;
214 Intrinsic::coro_await_suspend_handle) {
217 if (
auto *Invoke = dyn_cast<InvokeInst>(CB)) {
219 Builder.
SetInsertPoint(Invoke->getNormalDest()->getFirstInsertionPt());
229 auto *ResumeCall = Builder.
CreateCall(ResumeTy, ResumeAddr, {NewCall});
236 NewCall = ResumeCall;
252 assert(Shape.
ABI == coro::ABI::Retcon || Shape.
ABI == coro::ABI::RetconOnce);
265 auto *EndAsync = dyn_cast<CoroAsyncEndInst>(
End);
271 auto *MustTailCallFunc = EndAsync->getMustTailCallFunction();
272 if (!MustTailCallFunc) {
278 auto *CoroEndBlock =
End->getParent();
279 auto *MustTailCallFuncBlock = CoroEndBlock->getSinglePredecessor();
280 assert(MustTailCallFuncBlock &&
"Must have a single predecessor block");
281 auto It = MustTailCallFuncBlock->getTerminator()->getIterator();
282 auto *MustTailCall = cast<CallInst>(&*std::prev(It));
283 CoroEndBlock->splice(
End->getIterator(), MustTailCallFuncBlock,
284 MustTailCall->getIterator());
292 auto *BB =
End->getParent();
293 BB->splitBasicBlock(
End);
294 BB->getTerminator()->eraseFromParent();
297 assert(InlineRes.isSuccess() &&
"Expected inlining to succeed");
314 case coro::ABI::Switch:
315 assert(!cast<CoroEndInst>(
End)->hasResults() &&
316 "switch coroutine should not return any values");
325 case coro::ABI::Async: {
327 if (!CoroEndBlockNeedsCleanup)
334 case coro::ABI::RetconOnce: {
336 auto *CoroEnd = cast<CoroEndInst>(
End);
339 if (!CoroEnd->hasResults()) {
345 auto *CoroResults = CoroEnd->getResults();
346 unsigned NumReturns = CoroResults->numReturns();
348 if (
auto *RetStructTy = dyn_cast<StructType>(
RetTy)) {
349 assert(RetStructTy->getNumElements() == NumReturns &&
350 "numbers of returns should match resume function singature");
353 for (
Value *RetValEl : CoroResults->return_values())
356 }
else if (NumReturns == 0) {
361 Builder.
CreateRet(*CoroResults->retval_begin());
365 CoroResults->eraseFromParent();
371 case coro::ABI::Retcon: {
372 assert(!cast<CoroEndInst>(
End)->hasResults() &&
373 "retcon coroutine should not return any values");
376 auto RetStructTy = dyn_cast<StructType>(
RetTy);
378 cast<PointerType>(RetStructTy ? RetStructTy->getElementType(0) :
RetTy);
391 auto *BB =
End->getParent();
392 BB->splitBasicBlock(
End);
393 BB->getTerminator()->eraseFromParent();
407 Shape.
ABI == coro::ABI::Switch &&
408 "markCoroutineAsDone is only supported for Switch-Resumed ABI for now.");
427 "The final suspend should only live in the last position of "
445 case coro::ABI::Switch: {
458 case coro::ABI::Async:
461 case coro::ABI::Retcon:
462 case coro::ABI::RetconOnce:
469 auto *FromPad = cast<CleanupPadInst>(Bundle->Inputs[0]);
471 End->getParent()->splitBasicBlock(
End);
472 CleanupRet->getParent()->getTerminator()->eraseFromParent();
483 auto &Context =
End->getContext();
486 End->eraseFromParent();
498void CoroCloner::handleFinalSuspend() {
499 assert(Shape.ABI == coro::ABI::Switch &&
500 Shape.SwitchLowering.HasFinalSuspend);
502 if (isSwitchDestroyFunction() && Shape.SwitchLowering.HasUnwindCoroEnd)
505 auto *
Switch = cast<SwitchInst>(VMap[Shape.SwitchLowering.ResumeSwitch]);
506 auto FinalCaseIt = std::prev(
Switch->case_end());
507 BasicBlock *ResumeBB = FinalCaseIt->getCaseSuccessor();
508 Switch->removeCase(FinalCaseIt);
509 if (isSwitchDestroyFunction()) {
514 if (NewF->isCoroOnlyDestroyWhenComplete()) {
517 Builder.CreateBr(ResumeBB);
519 auto *GepIndex = Builder.CreateStructGEP(
523 Builder.CreateLoad(Shape.getSwitchResumePointerType(), GepIndex);
524 auto *
Cond = Builder.CreateIsNull(Load);
525 Builder.CreateCondBr(
Cond, ResumeBB, NewSwitchBB);
533 auto *AsyncSuspend = cast<CoroSuspendAsyncInst>(Suspend);
534 auto *StructTy = cast<StructType>(AsyncSuspend->getType());
535 auto &Context = Suspend->
getParent()->getParent()->getContext();
537 return FunctionType::get(VoidTy, StructTy->elements(),
false);
545 auto *FnTy = (Shape.
ABI != coro::ABI::Async)
553 M->getFunctionList().insert(InsertBefore, NewF);
562void CoroCloner::replaceRetconOrAsyncSuspendUses() {
563 assert(Shape.ABI == coro::ABI::Retcon || Shape.ABI == coro::ABI::RetconOnce ||
564 Shape.ABI == coro::ABI::Async);
566 auto NewS = VMap[ActiveSuspend];
567 if (NewS->use_empty())
574 bool IsAsyncABI = Shape.ABI == coro::ABI::Async;
575 for (
auto I = IsAsyncABI ? NewF->arg_begin() : std::next(NewF->arg_begin()),
582 if (!isa<StructType>(NewS->getType())) {
584 NewS->replaceAllUsesWith(
Args.front());
590 auto *EVI = dyn_cast<ExtractValueInst>(
U.getUser());
591 if (!EVI || EVI->getNumIndices() != 1)
594 EVI->replaceAllUsesWith(Args[EVI->getIndices().front()]);
595 EVI->eraseFromParent();
599 if (NewS->use_empty())
604 for (
size_t I = 0, E =
Args.size();
I != E; ++
I)
605 Agg = Builder.CreateInsertValue(Agg, Args[
I],
I);
610void CoroCloner::replaceCoroSuspends() {
611 Value *SuspendResult;
620 case coro::ABI::Switch:
621 SuspendResult = Builder.getInt8(isSwitchDestroyFunction() ? 1 : 0);
625 case coro::ABI::Async:
631 case coro::ABI::RetconOnce:
632 case coro::ABI::Retcon:
638 if (CS == ActiveSuspend)
641 auto *MappedCS = cast<AnyCoroSuspendInst>(VMap[CS]);
642 MappedCS->replaceAllUsesWith(SuspendResult);
643 MappedCS->eraseFromParent();
647void CoroCloner::replaceCoroEnds() {
651 auto *NewCE = cast<AnyCoroEndInst>(VMap[CE]);
660 Value *CachedSlot =
nullptr;
661 auto getSwiftErrorSlot = [&](
Type *ValueTy) ->
Value * {
666 for (
auto &Arg :
F.args()) {
667 if (Arg.isSwiftError()) {
674 IRBuilder<> Builder(
F.getEntryBlock().getFirstNonPHIOrDbg());
683 auto MappedOp = VMap ? cast<CallInst>((*VMap)[
Op]) :
Op;
688 if (
Op->arg_empty()) {
689 auto ValueTy =
Op->getType();
690 auto Slot = getSwiftErrorSlot(ValueTy);
691 MappedResult = Builder.
CreateLoad(ValueTy, Slot);
694 auto Value = MappedOp->getArgOperand(0);
696 auto Slot = getSwiftErrorSlot(ValueTy);
701 MappedOp->replaceAllUsesWith(MappedResult);
702 MappedOp->eraseFromParent();
706 if (VMap ==
nullptr) {
712static std::pair<SmallVector<DbgVariableIntrinsic *, 8>,
720 if (
auto *DVI = dyn_cast<DbgVariableIntrinsic>(&
I))
723 return {Intrinsics, DbgVariableRecords};
726void CoroCloner::replaceSwiftErrorOps() {
730void CoroCloner::salvageDebugInfo() {
747 auto IsUnreachableBlock = [&](
BasicBlock *BB) {
751 auto RemoveOne = [&](
auto *DVI) {
752 if (IsUnreachableBlock(DVI->getParent()))
753 DVI->eraseFromParent();
754 else if (isa_and_nonnull<AllocaInst>(DVI->getVariableLocationOp(0))) {
757 for (
auto *
User : DVI->getVariableLocationOp(0)->
users())
758 if (
auto *
I = dyn_cast<Instruction>(
User))
759 if (!isa<AllocaInst>(
I) && !IsUnreachableBlock(
I->getParent()))
762 DVI->eraseFromParent();
766 for_each(DbgVariableRecords, RemoveOne);
769void CoroCloner::replaceEntryBlock() {
775 auto *
Entry = cast<BasicBlock>(VMap[Shape.AllocaSpillBlock]);
776 auto *OldEntry = &NewF->getEntryBlock();
777 Entry->setName(
"entry" + Suffix);
778 Entry->moveBefore(OldEntry);
779 Entry->getTerminator()->eraseFromParent();
785 auto BranchToEntry = cast<BranchInst>(
Entry->user_back());
786 assert(BranchToEntry->isUnconditional());
787 Builder.SetInsertPoint(BranchToEntry);
788 Builder.CreateUnreachable();
789 BranchToEntry->eraseFromParent();
792 Builder.SetInsertPoint(Entry);
794 case coro::ABI::Switch: {
798 cast<BasicBlock>(VMap[Shape.SwitchLowering.ResumeEntryBlock]);
799 Builder.CreateBr(SwitchBB);
802 case coro::ABI::Async:
803 case coro::ABI::Retcon:
804 case coro::ABI::RetconOnce: {
808 assert((Shape.ABI == coro::ABI::Async &&
809 isa<CoroSuspendAsyncInst>(ActiveSuspend)) ||
810 ((Shape.ABI == coro::ABI::Retcon ||
811 Shape.ABI == coro::ABI::RetconOnce) &&
812 isa<CoroSuspendRetconInst>(ActiveSuspend)));
813 auto *MappedCS = cast<AnyCoroSuspendInst>(VMap[ActiveSuspend]);
814 auto Branch = cast<BranchInst>(MappedCS->getNextNode());
816 Builder.CreateBr(
Branch->getSuccessor(0));
826 auto *Alloca = dyn_cast<AllocaInst>(&
I);
827 if (!Alloca ||
I.use_empty())
829 if (DT.isReachableFromEntry(
I.getParent()) ||
830 !isa<ConstantInt>(Alloca->getArraySize()))
832 I.moveBefore(*Entry,
Entry->getFirstInsertionPt());
837Value *CoroCloner::deriveNewFramePointer() {
842 case coro::ABI::Switch:
843 return &*NewF->arg_begin();
849 case coro::ABI::Async: {
850 auto *ActiveAsyncSuspend = cast<CoroSuspendAsyncInst>(ActiveSuspend);
851 auto ContextIdx = ActiveAsyncSuspend->getStorageArgumentIndex() & 0xff;
852 auto *CalleeContext = NewF->getArg(ContextIdx);
853 auto *ProjectionFunc =
854 ActiveAsyncSuspend->getAsyncContextProjectionFunction();
856 cast<CoroSuspendAsyncInst>(VMap[ActiveSuspend])->getDebugLoc();
858 auto *CallerContext = Builder.CreateCall(ProjectionFunc->getFunctionType(),
859 ProjectionFunc, CalleeContext);
860 CallerContext->setCallingConv(ProjectionFunc->getCallingConv());
861 CallerContext->setDebugLoc(DbgLoc);
863 auto &Context = Builder.getContext();
864 auto *FramePtrAddr = Builder.CreateConstInBoundsGEP1_32(
866 Shape.AsyncLowering.FrameOffset,
"async.ctx.frameptr");
870 assert(InlineRes.isSuccess());
875 case coro::ABI::Retcon:
876 case coro::ABI::RetconOnce: {
877 Argument *NewStorage = &*NewF->arg_begin();
878 auto FramePtrTy = PointerType::getUnqual(Shape.FrameTy->getContext());
881 if (Shape.RetconLowering.IsFrameInlineInStorage)
885 return Builder.CreateLoad(FramePtrTy, NewStorage);
893 Align Alignment,
bool NoAlias) {
903 Attrs = Attrs.addParamAttributes(Context, ParamIndex, ParamAttrs);
907 unsigned ParamIndex) {
910 Attrs = Attrs.addParamAttributes(Context, ParamIndex, ParamAttrs);
914 unsigned ParamIndex) {
917 Attrs = Attrs.addParamAttributes(Context, ParamIndex, ParamAttrs);
922void CoroCloner::create() {
926 OrigF.getParent()->end(), ActiveSuspend);
937 VMap[&
A] = DummyArgs.
back();
944 auto savedVisibility = NewF->getVisibility();
945 auto savedUnnamedAddr = NewF->getUnnamedAddr();
946 auto savedDLLStorageClass = NewF->getDLLStorageClass();
951 auto savedLinkage = NewF->getLinkage();
955 CloneFunctionChangeType::LocalChangesOnly, Returns);
957 auto &Context = NewF->getContext();
966 assert(SP != OrigF.getSubprogram() && SP->isDistinct());
968 if (
auto DL = ActiveSuspend->getDebugLoc())
969 if (SP->getFile() ==
DL->getFile())
970 SP->setScopeLine(
DL->getLine());
980 SP->getUnit()->getSourceLanguage() == dwarf::DW_LANG_Swift) {
981 SP->replaceLinkageName(
MDString::get(Context, NewF->getName()));
982 if (
auto *Decl = SP->getDeclaration()) {
983 auto *NewDecl = DISubprogram::get(
984 Decl->getContext(), Decl->getScope(), Decl->getName(),
985 NewF->getName(), Decl->getFile(), Decl->getLine(), Decl->getType(),
986 Decl->getScopeLine(), Decl->getContainingType(),
987 Decl->getVirtualIndex(), Decl->getThisAdjustment(),
988 Decl->getFlags(), Decl->getSPFlags(), Decl->getUnit(),
989 Decl->getTemplateParams(),
nullptr, Decl->getRetainedNodes(),
990 Decl->getThrownTypes(), Decl->getAnnotations(),
991 Decl->getTargetFuncName());
992 SP->replaceDeclaration(NewDecl);
997 NewF->setLinkage(savedLinkage);
998 NewF->setVisibility(savedVisibility);
999 NewF->setUnnamedAddr(savedUnnamedAddr);
1000 NewF->setDLLStorageClass(savedDLLStorageClass);
1004 if (Shape.ABI == coro::ABI::Switch &&
1005 NewF->hasMetadata(LLVMContext::MD_func_sanitize))
1006 NewF->eraseMetadata(LLVMContext::MD_func_sanitize);
1009 auto OrigAttrs = NewF->getAttributes();
1012 switch (Shape.ABI) {
1013 case coro::ABI::Switch:
1016 NewAttrs = NewAttrs.addFnAttributes(
1017 Context,
AttrBuilder(Context, OrigAttrs.getFnAttrs()));
1020 Shape.FrameAlign,
false);
1022 case coro::ABI::Async: {
1023 auto *ActiveAsyncSuspend = cast<CoroSuspendAsyncInst>(ActiveSuspend);
1024 if (OrigF.hasParamAttribute(Shape.AsyncLowering.ContextArgNo,
1025 Attribute::SwiftAsync)) {
1027 ActiveAsyncSuspend->getStorageArgumentIndex();
1028 auto ContextArgIndex = ArgAttributeIndices & 0xff;
1033 auto SwiftSelfIndex = ArgAttributeIndices >> 8;
1039 auto FnAttrs = OrigF.getAttributes().getFnAttrs();
1040 NewAttrs = NewAttrs.addFnAttributes(Context,
AttrBuilder(Context, FnAttrs));
1043 case coro::ABI::Retcon:
1044 case coro::ABI::RetconOnce:
1047 NewAttrs = Shape.RetconLowering.ResumePrototype->getAttributes();
1051 Shape.getRetconCoroId()->getStorageSize(),
1052 Shape.getRetconCoroId()->getStorageAlignment(),
1058 switch (Shape.ABI) {
1063 case coro::ABI::Switch:
1064 case coro::ABI::RetconOnce:
1073 case coro::ABI::Retcon:
1079 case coro::ABI::Async:
1083 NewF->setAttributes(NewAttrs);
1084 NewF->setCallingConv(Shape.getResumeFunctionCC());
1087 replaceEntryBlock();
1090 for (
CallInst *ResumeCall : Shape.SymmetricTransfers) {
1091 ResumeCall = cast<CallInst>(VMap[ResumeCall]);
1103 Builder.CreateRetVoid();
1107 Builder.SetInsertPoint(&NewF->getEntryBlock().front());
1108 NewFramePtr = deriveNewFramePointer();
1111 Value *OldFramePtr = VMap[Shape.FramePtr];
1112 NewFramePtr->
takeName(OldFramePtr);
1116 auto *NewVFrame = Builder.CreateBitCast(
1117 NewFramePtr, PointerType::getUnqual(Builder.getContext()),
"vFrame");
1118 Value *OldVFrame = cast<Value>(VMap[Shape.CoroBegin]);
1119 if (OldVFrame != NewVFrame)
1126 DummyArg->deleteValue();
1129 switch (Shape.ABI) {
1130 case coro::ABI::Switch:
1134 if (Shape.SwitchLowering.HasFinalSuspend)
1135 handleFinalSuspend();
1137 case coro::ABI::Async:
1138 case coro::ABI::Retcon:
1139 case coro::ABI::RetconOnce:
1142 assert(ActiveSuspend !=
nullptr &&
1143 "no active suspend when lowering a continuation-style coroutine");
1144 replaceRetconOrAsyncSuspendUses();
1149 replaceCoroSuspends();
1162 if (Shape.ABI == coro::ABI::Switch)
1164 FKind == CoroCloner::Kind::SwitchCleanup);
1170 auto *FuncPtrStruct = cast<ConstantStruct>(
1172 auto *OrigRelativeFunOffset = FuncPtrStruct->getOperand(0);
1173 auto *OrigContextSize = FuncPtrStruct->getOperand(1);
1174 auto *NewContextSize = ConstantInt::get(OrigContextSize->getType(),
1177 FuncPtrStruct->getType(), OrigRelativeFunOffset, NewContextSize);
1183 if (Shape.
ABI == coro::ABI::Async)
1196 auto *SizeIntrin = Shape.
CoroSizes.back();
1197 Module *M = SizeIntrin->getModule();
1200 auto *SizeConstant = ConstantInt::get(SizeIntrin->getType(),
Size);
1224 auto *CoroId = CoroBegin->
getId();
1226 switch (Shape.
ABI) {
1227 case coro::ABI::Switch: {
1228 auto SwitchId = cast<CoroIdInst>(CoroId);
1234 AllocInst->replaceAllUsesWith(Builder.
getFalse());
1235 AllocInst->eraseFromParent();
1236 CoroBegin->replaceAllUsesWith(Frame);
1238 CoroBegin->replaceAllUsesWith(CoroBegin->getMem());
1243 case coro::ABI::Async:
1244 case coro::ABI::Retcon:
1245 case coro::ABI::RetconOnce:
1250 CoroBegin->eraseFromParent();
1260 if (isa<IntrinsicInst>(
I))
1263 if (isa<CallBase>(
I))
1279 while (!Worklist.
empty()) {
1283 if (!Set.contains(Pred))
1289 Set.erase(ResDesBB);
1291 for (
auto *BB : Set)
1300 auto *ResumeOrDestroyBB = ResumeOrDestroy->
getParent();
1302 if (SaveBB == ResumeOrDestroyBB)
1327 auto *Pred = Suspend->
getParent()->getSinglePredecessor();
1330 Prev = Pred->getTerminator();
1333 CallBase *CB = dyn_cast<CallBase>(Prev);
1340 auto *SubFn = dyn_cast<CoroSubFnInst>(Callee);
1345 if (SubFn->getFrame() != CoroBegin)
1359 Save->eraseFromParent();
1362 if (
auto *Invoke = dyn_cast<InvokeInst>(CB)) {
1371 if (CalledValue != SubFn && CalledValue->user_empty())
1372 if (
auto *
I = dyn_cast<Instruction>(CalledValue))
1373 I->eraseFromParent();
1376 if (SubFn->user_empty())
1377 SubFn->eraseFromParent();
1385 if (Shape.
ABI != coro::ABI::Switch)
1389 size_t I = 0,
N = S.size();
1393 size_t ChangedFinalIndex = std::numeric_limits<size_t>::max();
1395 auto SI = cast<CoroSuspendInst>(S[
I]);
1404 if (cast<CoroSuspendInst>(S[
I])->isFinal()) {
1406 ChangedFinalIndex =
I;
1418 if (ChangedFinalIndex <
N) {
1419 assert(cast<CoroSuspendInst>(S[ChangedFinalIndex])->isFinal());
1420 std::swap(S[ChangedFinalIndex], S.back());
1426struct SwitchCoroutineSplitter {
1432 createResumeEntryBlock(
F, Shape);
1434 createClone(
F,
".resume", Shape, CoroCloner::Kind::SwitchResume,
TTI);
1435 auto *DestroyClone =
1436 createClone(
F,
".destroy", Shape, CoroCloner::Kind::SwitchUnwind,
TTI);
1437 auto *CleanupClone =
1438 createClone(
F,
".cleanup", Shape, CoroCloner::Kind::SwitchCleanup,
TTI);
1445 updateCoroFrame(Shape, ResumeClone, DestroyClone, CleanupClone);
1455 setCoroInfo(
F, Shape, Clones);
1465 CoroCloner Cloner(
F, Suffix, Shape, FKind,
TTI);
1467 return Cloner.getFunction();
1489 auto *FrameTy = Shape.
FrameTy;
1490 auto *GepIndex = Builder.CreateStructGEP(
1497 size_t SuspendIndex = 0;
1499 auto *S = cast<CoroSuspendInst>(AnyS);
1505 auto *Save = S->getCoroSave();
1506 Builder.SetInsertPoint(Save);
1512 auto *GepIndex = Builder.CreateStructGEP(
1514 Builder.CreateStore(IndexVal, GepIndex);
1518 Save->eraseFromParent();
1543 auto *SuspendBB = S->getParent();
1547 S->getNextNode(), ResumeBB->
getName() +
Twine(
".landing"));
1548 Switch->addCase(IndexVal, ResumeBB);
1550 cast<BranchInst>(SuspendBB->getTerminator())->setSuccessor(0, LandingBB);
1552 PN->insertBefore(LandingBB->begin());
1553 S->replaceAllUsesWith(PN);
1554 PN->addIncoming(Builder.getInt8(-1), SuspendBB);
1555 PN->addIncoming(S, ResumeBB);
1560 Builder.SetInsertPoint(UnreachBB);
1561 Builder.CreateUnreachable();
1571 auto *ResumeAddr = Builder.CreateStructGEP(
1574 Builder.CreateStore(ResumeFn, ResumeAddr);
1576 Value *DestroyOrCleanupFn = DestroyFn;
1582 DestroyOrCleanupFn = Builder.CreateSelect(CA, DestroyFn, CleanupFn);
1585 auto *DestroyAddr = Builder.CreateStructGEP(
1588 Builder.CreateStore(DestroyOrCleanupFn, DestroyAddr);
1612 auto *ArrTy = ArrayType::get(Part->
getType(),
Args.size());
1616 GlobalVariable::PrivateLinkage, ConstVal,
1617 F.getName() +
Twine(
".resumers"));
1629 Value *Continuation) {
1631 auto &Context = Suspend->
getParent()->getParent()->getContext();
1632 auto *Int8PtrTy = PointerType::getUnqual(Context);
1637 ResumeIntrinsic->eraseFromParent();
1647 for (
auto *paramTy : FnTy->params()) {
1649 if (paramTy != FnArgs[ArgIdx]->
getType())
1668 auto *TailCall = Builder.
CreateCall(FnTy, MustTailCallFn, CallArgs);
1673 TailCall->setDebugLoc(Loc);
1685 F.removeFnAttr(Attribute::NoReturn);
1686 F.removeRetAttr(Attribute::NoAlias);
1687 F.removeRetAttr(Attribute::NonNull);
1689 auto &Context =
F.getContext();
1690 auto *Int8PtrTy = PointerType::getUnqual(Context);
1699 "async.ctx.frameptr");
1710 auto NextF = std::next(
F.getIterator());
1718 auto ResumeNameSuffix =
".resume.";
1719 auto ProjectionFunctionName =
1720 Suspend->getAsyncContextProjectionFunction()->getName();
1721 bool UseSwiftMangling =
false;
1722 if (ProjectionFunctionName ==
"__swift_async_resume_project_context") {
1723 ResumeNameSuffix =
"TQ";
1724 UseSwiftMangling =
true;
1725 }
else if (ProjectionFunctionName ==
"__swift_async_resume_get_context") {
1726 ResumeNameSuffix =
"TY";
1727 UseSwiftMangling =
true;
1731 UseSwiftMangling ? ResumeNameSuffix +
Twine(
Idx) +
"_"
1738 auto *SuspendBB = Suspend->getParent();
1739 auto *NewSuspendBB = SuspendBB->splitBasicBlock(Suspend);
1740 auto *Branch = cast<BranchInst>(SuspendBB->getTerminator());
1745 Branch->setSuccessor(0, ReturnBB);
1750 auto *Fn = Suspend->getMustTailCallFunction();
1767 auto *Clone = Clones[
Idx];
1769 CoroCloner(
F,
"resume." +
Twine(
Idx), Shape, Clone, Suspend,
TTI).create();
1776 assert(Shape.
ABI == coro::ABI::Retcon || Shape.
ABI == coro::ABI::RetconOnce);
1781 F.removeFnAttr(Attribute::NoReturn);
1782 F.removeRetAttr(Attribute::NoAlias);
1783 F.removeRetAttr(Attribute::NonNull);
1789 RawFramePtr = Id->getStorage();
1805 Builder.
CreateStore(RawFramePtr, Id->getStorage());
1821 auto NextF = std::next(
F.getIterator());
1825 for (
size_t i = 0, e = Shape.
CoroSuspends.size(); i != e; ++i) {
1826 auto Suspend = cast<CoroSuspendRetconInst>(Shape.
CoroSuspends[i]);
1835 auto SuspendBB = Suspend->getParent();
1836 auto NewSuspendBB = SuspendBB->splitBasicBlock(Suspend);
1837 auto Branch = cast<BranchInst>(SuspendBB->getTerminator());
1861 auto RetTy =
F.getReturnType();
1866 auto CastedContinuationTy =
1867 (ReturnPHIs.
size() == 1 ?
RetTy :
RetTy->getStructElementType(0));
1868 auto *CastedContinuation =
1872 if (ReturnPHIs.
size() == 1) {
1873 RetV = CastedContinuation;
1877 for (
size_t I = 1, E = ReturnPHIs.
size();
I != E; ++
I)
1885 Branch->setSuccessor(0, ReturnBB);
1886 ReturnPHIs[0]->addIncoming(Continuation, SuspendBB);
1887 size_t NextPHIIndex = 1;
1888 for (
auto &VUse : Suspend->value_operands())
1889 ReturnPHIs[NextPHIIndex++]->addIncoming(&*VUse, SuspendBB);
1894 for (
size_t i = 0, e = Shape.
CoroSuspends.size(); i != e; ++i) {
1896 auto Clone = Clones[i];
1898 CoroCloner(
F,
"resume." +
Twine(i), Shape, Clone, Suspend,
TTI).create();
1907 PrettyStackTraceFunction(
Function &F) :
F(
F) {}
1909 OS <<
"While splitting coroutine ";
1910 F.printAsOperand(
OS,
false,
F.getParent());
1919 std::function<
bool(
Instruction &)> MaterializableCallback) {
1920 PrettyStackTraceFunction prettyStackTrace(
F);
1933 buildCoroutineFrame(
F, Shape,
TTI, MaterializableCallback);
1941 switch (Shape.
ABI) {
1942 case coro::ABI::Switch:
1943 SwitchCoroutineSplitter::split(
F, Shape, Clones,
TTI);
1945 case coro::ABI::Async:
1948 case coro::ABI::Retcon:
1949 case coro::ABI::RetconOnce:
1964 for (
auto *DDI : DbgInsts)
1975 if (Shape.
ABI != coro::ABI::Switch) {
1981 auto &Context =
End->getContext();
1983 End->eraseFromParent();
1994 if (!Clones.
empty()) {
1995 switch (Shape.
ABI) {
1996 case coro::ABI::Switch:
2002 case coro::ABI::Async:
2003 case coro::ABI::Retcon:
2004 case coro::ABI::RetconOnce:
2007 if (!Clones.empty())
2036 auto *Cast = dyn_cast<BitCastInst>(U.getUser());
2037 if (!Cast || Cast->getType() != Fn->getType())
2041 Cast->replaceAllUsesWith(Fn);
2042 Cast->eraseFromParent();
2051 while (
auto *Cast = dyn_cast<BitCastInst>(CastFn)) {
2052 if (!Cast->use_empty())
2054 CastFn = Cast->getOperand(0);
2055 Cast->eraseFromParent();
2061 bool Changed =
false;
2064 auto *Prepare = cast<CallInst>(
P.getUser());
2075 auto *PrepareFn = M.getFunction(
Name);
2076 if (PrepareFn && !PrepareFn->use_empty())
2081 : MaterializableCallback(coro::defaultMaterializable),
2082 OptimizeFrame(OptimizeFrame) {}
2090 Module &M = *
C.begin()->getFunction().getParent();
2102 if (
N.getFunction().isPresplitCoroutine())
2105 if (Coroutines.
empty() && PrepareFns.
empty())
2111 LLVM_DEBUG(
dbgs() <<
"CoroSplit: Processing coroutine '" <<
F.getName()
2113 F.setSplittedCoroutine();
2125 <<
"Split '" <<
ore::NV(
"function",
F.getName())
2126 <<
"' (frame_size=" <<
ore::NV(
"frame_size", Shape.FrameSize)
2127 <<
", align=" <<
ore::NV(
"align", Shape.FrameAlign.value()) <<
")";
2130 if (!Shape.CoroSuspends.empty()) {
2138 for (
auto *PrepareFn : PrepareFns) {
amdgpu aa AMDGPU Address space based Alias Analysis Wrapper
AMDGPU Lower Kernel Arguments
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
This file contains the simple types necessary to represent the attributes associated with functions a...
BlockVerifier::State From
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
This file provides interfaces used to manipulate a call graph, regardless if it is a "old style" Call...
This file provides interfaces used to build and manipulate a call graph, which is a very useful tool ...
This file contains the declarations for the subclasses of Constant, which represent the different fla...
static void addSwiftSelfAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex)
static bool hasCallsBetween(Instruction *Save, Instruction *ResumeOrDestroy)
static std::pair< SmallVector< DbgVariableIntrinsic *, 8 >, SmallVector< DbgVariableRecord * > > collectDbgVariableIntrinsics(Function &F)
Returns all DbgVariableIntrinsic in F.
static void replaceSwiftErrorOps(Function &F, coro::Shape &Shape, ValueToValueMapTy *VMap)
static void addAsyncContextAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex)
static void maybeFreeRetconStorage(IRBuilder<> &Builder, const coro::Shape &Shape, Value *FramePtr, CallGraph *CG)
static bool hasCallsInBlocksBetween(BasicBlock *SaveBB, BasicBlock *ResDesBB)
static Function * createCloneDeclaration(Function &OrigF, coro::Shape &Shape, const Twine &Suffix, Module::iterator InsertBefore, AnyCoroSuspendInst *ActiveSuspend)
Remove calls to llvm coro end in the original static function void removeCoroEndsFromRampFunction(const coro::Shape &Shape)
static FunctionType * getFunctionTypeFromAsyncSuspend(AnyCoroSuspendInst *Suspend)
static void addPrepareFunction(const Module &M, SmallVectorImpl< Function * > &Fns, StringRef Name)
static void updateCallGraphAfterCoroutineSplit(LazyCallGraph::Node &N, const coro::Shape &Shape, const SmallVectorImpl< Function * > &Clones, LazyCallGraph::SCC &C, LazyCallGraph &CG, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
static void simplifySuspendPoints(coro::Shape &Shape)
static void addFramePointerAttrs(AttributeList &Attrs, LLVMContext &Context, unsigned ParamIndex, uint64_t Size, Align Alignment, bool NoAlias)
static bool replaceAllPrepares(Function *PrepareFn, LazyCallGraph &CG, LazyCallGraph::SCC &C)
static void replaceFallthroughCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
Replace a non-unwind call to llvm.coro.end.
static void replaceFrameSizeAndAlignment(coro::Shape &Shape)
static bool replaceCoroEndAsync(AnyCoroEndInst *End)
Replace an llvm.coro.end.async.
static void splitRetconCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI)
Replace a call to llvm coro prepare static retcon void replacePrepare(CallInst *Prepare, LazyCallGraph &CG, LazyCallGraph::SCC &C)
static void replaceUnwindCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
Replace an unwind call to llvm.coro.end.
static bool simplifySuspendPoint(CoroSuspendInst *Suspend, CoroBeginInst *CoroBegin)
static bool hasCallsInBlockBetween(Instruction *From, Instruction *To)
static void markCoroutineAsDone(IRBuilder<> &Builder, const coro::Shape &Shape, Value *FramePtr)
static void updateAsyncFuncPointerContextSize(coro::Shape &Shape)
static void replaceCoroEnd(AnyCoroEndInst *End, const coro::Shape &Shape, Value *FramePtr, bool InResume, CallGraph *CG)
static void lowerAwaitSuspend(IRBuilder<> &Builder, CoroAwaitSuspendInst *CB, coro::Shape &Shape)
static void lowerAwaitSuspends(Function &F, coro::Shape &Shape)
static void handleNoSuspendCoroutine(coro::Shape &Shape)
static coro::Shape splitCoroutine(Function &F, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI, bool OptimizeFrame, std::function< bool(Instruction &)> MaterializableCallback)
static void postSplitCleanup(Function &F)
static void splitAsyncCoroutine(Function &F, coro::Shape &Shape, SmallVectorImpl< Function * > &Clones, TargetTransformInfo &TTI)
Coerce the arguments in p FnArgs according to p FnTy in p static CallArgs void coerceArguments(IRBuilder<> &Builder, FunctionType *FnTy, ArrayRef< Value * > FnArgs, SmallVectorImpl< Value * > &CallArgs)
static void replaceAsyncResumeFunction(CoroSuspendAsyncInst *Suspend, Value *Continuation)
static Error split(StringRef Str, char Separator, std::pair< StringRef, StringRef > &Split)
Checked version of split, to ensure mandatory subparts.
Returns the sub type a function will return at a given Idx Should correspond to the result type of an ExtractValue instruction executed with just that one unsigned Idx
This file defines the DenseMap class.
This file contains constants used for implementing Dwarf debug support.
static Function * getFunction(Constant *C)
Rewrite Partial Register Uses
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
Implements a lazy call graph analysis and related passes for the new pass manager.
Module.h This file contains the declarations for the Module class.
FunctionAnalysisManager FAM
This file provides a priority worklist.
const SmallVectorImpl< MachineOperand > & Cond
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
This file defines the SmallPtrSet class.
This file defines the SmallVector class.
static SymbolRef::Type getType(const Symbol *Sym)
static const unsigned FramePtr
void setSwiftError(bool V)
Specify whether this alloca is used to represent a swifterror.
void setAlignment(Align Align)
A container for analyses that lazily runs them and caches their results.
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
CoroAllocInst * getCoroAlloc()
This class represents an incoming formal argument to a Function.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
ArrayRef< T > drop_front(size_t N=1) const
Drop the first N elements of the array.
size_t size() const
size - Get the array size.
AttrBuilder & addAlignmentAttr(MaybeAlign Align)
This turns an alignment into the form used internally in Attribute.
AttrBuilder & addAttribute(Attribute::AttrKind Val)
Add an attribute to the builder.
AttrBuilder & addDereferenceableAttr(uint64_t Bytes)
This turns the number of dereferenceable bytes into the form used internally in Attribute.
AttributeList removeParamAttributes(LLVMContext &C, unsigned ArgNo, const AttributeMask &AttrsToRemove) const
Remove the specified attribute at the specified arg index from this attribute list.
LLVM Basic Block Representation.
const Instruction * getFirstNonPHI() const
Returns a pointer to the first instruction in this block that is not a PHINode instruction.
static BasicBlock * Create(LLVMContext &Context, const Twine &Name="", Function *Parent=nullptr, BasicBlock *InsertBefore=nullptr)
Creates a new BasicBlock.
BasicBlock * splitBasicBlock(iterator I, const Twine &BBName="", bool Before=false)
Split the basic block into two basic blocks at the specified instruction.
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
static BranchInst * Create(BasicBlock *IfTrue, InsertPosition InsertBefore=nullptr)
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
void setCallingConv(CallingConv::ID CC)
Function * getCalledFunction() const
Returns the function called, or null if this is an indirect function invocation or the function signa...
Value * getCalledOperand() const
void setAttributes(AttributeList A)
Set the parameter attributes for this call.
Value * getArgOperand(unsigned i) const
AttributeList getAttributes() const
Return the parameter attributes for this call.
The basic data container for the call graph of a Module of IR.
This class represents a function call, abstracting a target machine's calling convention.
void setTailCallKind(TailCallKind TCK)
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
static Constant * getPointerCast(Constant *C, Type *Ty)
Create a BitCast, AddrSpaceCast, or a PtrToInt cast constant expression.
This is the shared class of boolean and integer constants.
static ConstantInt * getTrue(LLVMContext &Context)
static ConstantInt * getFalse(LLVMContext &Context)
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
static Constant * get(StructType *T, ArrayRef< Constant * > V)
static ConstantTokenNone * get(LLVMContext &Context)
Return the ConstantTokenNone.
This represents the llvm.coro.align instruction.
This represents the llvm.coro.alloc instruction.
This represents the llvm.coro.await.suspend.{void,bool,handle} instructions.
Value * getAwaiter() const
Function * getWrapperFunction() const
This class represents the llvm.coro.begin instruction.
AnyCoroIdInst * getId() const
This represents the llvm.coro.id instruction.
void setInfo(Constant *C)
This represents the llvm.coro.size instruction.
This represents the llvm.coro.suspend.async instruction.
CoroAsyncResumeInst * getResumeFunction() const
This represents the llvm.coro.suspend instruction.
CoroSaveInst * getCoroSave() const
DISubprogram * getSubprogram() const
Get the subprogram for this scope.
This class represents an Operation in the Expression.
A parsed version of the target data layout string in and methods for querying it.
This is the common base class for debug info intrinsics for variables.
Record of a variable value-assignment, aka a non instruction representation of the dbg....
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
This class represents a freeze function that returns random concrete value if an operand is either a ...
A proxy from a FunctionAnalysisManager to an SCC.
Type * getReturnType() const
static Function * Create(FunctionType *Ty, LinkageTypes Linkage, unsigned AddrSpace, const Twine &N="", Module *M=nullptr)
FunctionType * getFunctionType() const
Returns the FunctionType for me.
Intrinsic::ID getIntrinsicID() const LLVM_READONLY
getIntrinsicID - This method returns the ID number of the specified function, or Intrinsic::not_intri...
CallingConv::ID getCallingConv() const
getCallingConv()/setCallingConv(CC) - These method get and set the calling convention of this functio...
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function.
Module * getParent()
Get the module that this global value is contained inside of...
PointerType * getType() const
Global values are always pointers.
@ ExternalLinkage
Externally visible function.
const Constant * getInitializer() const
getInitializer - Return the initializer for this global variable.
void setInitializer(Constant *InitVal)
setInitializer - Sets the initializer for this global variable, removing any existing initializer if ...
AllocaInst * CreateAlloca(Type *Ty, unsigned AddrSpace, Value *ArraySize=nullptr, const Twine &Name="")
Value * CreateInsertValue(Value *Agg, Value *Val, ArrayRef< unsigned > Idxs, const Twine &Name="")
InvokeInst * CreateInvoke(FunctionType *Ty, Value *Callee, BasicBlock *NormalDest, BasicBlock *UnwindDest, ArrayRef< Value * > Args, ArrayRef< OperandBundleDef > OpBundles, const Twine &Name="")
Create an invoke instruction.
BasicBlock::iterator GetInsertPoint() const
Value * CreateStructGEP(Type *Ty, Value *Ptr, unsigned Idx, const Twine &Name="")
Value * CreateConstInBoundsGEP1_32(Type *Ty, Value *Ptr, unsigned Idx0, const Twine &Name="")
CleanupReturnInst * CreateCleanupRet(CleanupPadInst *CleanupPad, BasicBlock *UnwindBB=nullptr)
ReturnInst * CreateRet(Value *V)
Create a 'ret <val>' instruction.
ConstantInt * getInt64(uint64_t C)
Get a constant 64-bit value.
Value * CreateBitOrPointerCast(Value *V, Type *DestTy, const Twine &Name="")
PHINode * CreatePHI(Type *Ty, unsigned NumReservedValues, const Twine &Name="")
Value * CreateBitCast(Value *V, Type *DestTy, const Twine &Name="")
LoadInst * CreateLoad(Type *Ty, Value *Ptr, const char *Name)
Provided to resolve 'CreateLoad(Ty, Ptr, "...")' correctly, instead of converting the string to 'bool...
LLVMContext & getContext() const
ReturnInst * CreateRetVoid()
Create a 'ret void' instruction.
StoreInst * CreateStore(Value *Val, Value *Ptr, bool isVolatile=false)
ConstantInt * getFalse()
Get the constant value for i1 false.
void SetInsertPoint(BasicBlock *TheBB)
This specifies that created instructions should be appended to the end of the specified block.
CallInst * CreateCall(FunctionType *FTy, Value *Callee, ArrayRef< Value * > Args=std::nullopt, const Twine &Name="", MDNode *FPMathTag=nullptr)
This provides a uniform API for creating instructions and inserting them into a basic block: either a...
This class captures the data input to the InlineFunction call, and records the auxiliary results prod...
InstListType::iterator eraseFromParent()
This method unlinks 'this' from the containing basic block and deletes it.
This is an important class for using LLVM in a threaded context.
A node in the call graph.
An SCC of the call graph.
A lazily constructed view of the call graph of a module.
void addSplitFunction(Function &OriginalFunction, Function &NewFunction)
Add a new function split/outlined from an existing function.
void addSplitRefRecursiveFunctions(Function &OriginalFunction, ArrayRef< Function * > NewFunctions)
Add new ref-recursive functions split/outlined from an existing function.
Node & get(Function &F)
Get a graph node for a given function, scanning it to populate the graph data as necessary.
SCC * lookupSCC(Node &N) const
Lookup a function's SCC in the graph.
static MDString * get(LLVMContext &Context, StringRef Str)
A Module instance is used to store all the information related to an LLVM module.
FunctionListType::iterator iterator
The Function iterators.
static PHINode * Create(Type *Ty, unsigned NumReservedValues, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
Constructors - NumReservedValues is a hint for the number of incoming edges that this phi node will h...
static PoisonValue * get(Type *T)
Static factory methods - Return an 'poison' object of the specified type.
A set of analyses that are preserved following a run of a transformation pass.
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
PrettyStackTraceEntry - This class is used to represent a frame of the "pretty" stack trace that is d...
virtual void print(raw_ostream &OS) const =0
print - Emit information about this stack frame to OS.
Return a value (possibly void), from a function.
SmallPtrSet - This class implements a set which is optimized for holding SmallSize or less elements.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void reserve(size_type N)
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
StringRef - Represent a constant reference to a string, i.e.
Type * getTypeAtIndex(const Value *V) const
Given an index value into the type, return the type of the element.
Analysis pass providing the TargetTransformInfo.
Value handle that tracks a Value across RAUW.
ValueTy * getValPtr() const
Triple - Helper class for working with autoconf configuration names.
bool isArch64Bit() const
Test whether the architecture is 64-bit.
Twine - A lightweight data structure for efficiently representing the concatenation of temporary valu...
The instances of the Type class are immutable: once they are created, they are never changed.
static Type * getVoidTy(LLVMContext &C)
static IntegerType * getInt8Ty(LLVMContext &C)
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
A Use represents the edge between a Value definition and its users.
void setOperand(unsigned i, Value *Val)
LLVM Value Representation.
Type * getType() const
All values are typed, get the type of this value.
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
iterator_range< user_iterator > users()
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
LLVMContext & getContext() const
All values hold a context through their type.
iterator_range< use_iterator > uses()
StringRef getName() const
Return a constant reference to the value's name.
void takeName(Value *V)
Transfer the name from V to this value.
const ParentTy * getParent() const
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
This class implements an extremely fast bulk output stream that can only output to a stream.
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
constexpr char Args[]
Key for Kernel::Metadata::mArgs.
@ Fast
Attempts to make calls as fast as possible (e.g.
@ C
The default llvm calling convention, compatible with C.
void salvageDebugInfo(SmallDenseMap< Argument *, AllocaInst *, 4 > &ArgToAllocaMap, DbgVariableIntrinsic &DVI, bool OptimizeFrame, bool IsEntryPoint)
Attempts to rewrite the location operand of debug intrinsics in terms of the coroutine frame pointer,...
@ Switch
The "resume-switch" lowering, where there are separate resume and destroy functions that are shared b...
CallInst * createMustTailCall(DebugLoc Loc, Function *MustTailCallFn, TargetTransformInfo &TTI, ArrayRef< Value * > Arguments, IRBuilder<> &)
void replaceCoroFree(CoroIdInst *CoroId, bool Elide)
DiagnosticInfoOptimizationBase::Argument NV
This is an optimization pass for GlobalISel generic memory operations.
UnaryFunction for_each(R &&Range, UnaryFunction F)
Provide wrappers to std::for_each which take ranges instead of having to pass begin/end explicitly.
bool verifyFunction(const Function &F, raw_ostream *OS=nullptr)
Check a function for errors, useful for use when debugging a pass.
void salvageDebugInfo(const MachineRegisterInfo &MRI, MachineInstr &MI)
Assuming the instruction MI is going to be deleted, attempt to salvage debug users of MI by writing t...
LazyCallGraph::SCC & updateCGAndAnalysisManagerForFunctionPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
Helper to update the call graph after running a function pass.
LazyCallGraph::SCC & updateCGAndAnalysisManagerForCGSCCPass(LazyCallGraph &G, LazyCallGraph::SCC &C, LazyCallGraph::Node &N, CGSCCAnalysisManager &AM, CGSCCUpdateResult &UR, FunctionAnalysisManager &FAM)
Helper to update the call graph after running a CGSCC pass.
iterator_range< early_inc_iterator_impl< detail::IterOfRange< RangeT > > > make_early_inc_range(RangeT &&Range)
Make a range that does early increment to allow mutation of the underlying range without disrupting i...
@ Async
"Asynchronous" unwind tables (instr precise)
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
unsigned changeToUnreachable(Instruction *I, bool PreserveLCSSA=false, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Insert an unreachable instruction before the specified instruction, making it and the rest of the cod...
raw_fd_ostream & errs()
This returns a reference to a raw_ostream for standard error.
DWARFExpression::Operation Op
InlineResult InlineFunction(CallBase &CB, InlineFunctionInfo &IFI, bool MergeAttributes=false, AAResults *CalleeAAR=nullptr, bool InsertLifetime=true, Function *ForwardVarArgsTo=nullptr)
This function inlines the called function into the basic block of the caller.
void CloneFunctionInto(Function *NewFunc, const Function *OldFunc, ValueToValueMapTy &VMap, CloneFunctionChangeType Changes, SmallVectorImpl< ReturnInst * > &Returns, const char *NameSuffix="", ClonedCodeInfo *CodeInfo=nullptr, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Clone OldFunc into NewFunc, transforming the old arguments into references to VMap values.
auto predecessors(const MachineBasicBlock *BB)
static auto filterDbgVars(iterator_range< simple_ilist< DbgRecord >::iterator > R)
Filter the DbgRecord range to DbgVariableRecord types only and downcast.
bool removeUnreachableBlocks(Function &F, DomTreeUpdater *DTU=nullptr, MemorySSAUpdater *MSSAU=nullptr)
Remove all blocks that can not be reached from the function's entry.
bool isPotentiallyReachable(const Instruction *From, const Instruction *To, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet=nullptr, const DominatorTree *DT=nullptr, const LoopInfo *LI=nullptr)
Determine whether instruction 'To' is reachable from 'From', without passing through any blocks in Ex...
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
This struct is a compact representation of a valid (non-zero power of two) alignment.
uint64_t value() const
This is a hole in the type system and should not be abused.
Support structure for SCC passes to communicate updates the call graph back to the CGSCC pass manager...
SmallPriorityWorklist< LazyCallGraph::SCC *, 1 > & CWorklist
Worklist of the SCCs queued for processing.
const std::function< bool(Instruction &)> MaterializableCallback
PreservedAnalyses run(LazyCallGraph::SCC &C, CGSCCAnalysisManager &AM, LazyCallGraph &CG, CGSCCUpdateResult &UR)
CoroSplitPass(bool OptimizeFrame=false)
CallInst * makeSubFnCall(Value *Arg, int Index, Instruction *InsertPt)
GlobalVariable * AsyncFuncPointer
bool IsFrameInlineInStorage
SwitchInst * ResumeSwitch
BasicBlock * ResumeEntryBlock
SmallVector< CallInst *, 2 > SymmetricTransfers
SmallVector< CoroAwaitSuspendInst *, 4 > CoroAwaitSuspends
AsyncLoweringStorage AsyncLowering
FunctionType * getResumeFunctionType() const
IntegerType * getIndexType() const
CoroIdInst * getSwitchCoroId() const
SmallVector< CoroSizeInst *, 2 > CoroSizes
SmallVector< AnyCoroSuspendInst *, 4 > CoroSuspends
Value * emitAlloc(IRBuilder<> &Builder, Value *Size, CallGraph *CG) const
Allocate memory according to the rules of the active lowering.
SmallVector< CallInst *, 2 > SwiftErrorOps
ConstantInt * getIndex(uint64_t Value) const
bool OptimizeFrame
This would only be true if optimization are enabled.
SwitchLoweringStorage SwitchLowering
CoroBeginInst * CoroBegin
BasicBlock::iterator getInsertPtAfterFramePtr() const
ArrayRef< Type * > getRetconResultTypes() const
void emitDealloc(IRBuilder<> &Builder, Value *Ptr, CallGraph *CG) const
Deallocate memory according to the rules of the active lowering.
RetconLoweringStorage RetconLowering
SmallVector< CoroAlignInst *, 2 > CoroAligns
SmallVector< AnyCoroEndInst *, 4 > CoroEnds
unsigned getSwitchIndexField() const