25typedef SmallPtrSet<BasicBlock *, 8> VisitedBlocksSet;
29static bool isCoroutineStructureIntrinsic(Instruction &
I) {
30 return isa<CoroIdInst>(&
I) || isa<CoroSaveInst>(&
I) ||
31 isa<CoroSuspendInst>(&
I);
36static bool isSuspendReachableFrom(BasicBlock *
From,
37 VisitedBlocksSet &VisitedOrFreeBBs) {
41 if (!VisitedOrFreeBBs.insert(
From).second)
50 if (isSuspendReachableFrom(Succ, VisitedOrFreeBBs))
59static bool isLocalAlloca(CoroAllocaAllocInst *AI) {
62 VisitedBlocksSet VisitedOrFreeBBs;
63 for (
auto *User : AI->users()) {
64 if (
auto FI = dyn_cast<CoroAllocaFreeInst>(User))
65 VisitedOrFreeBBs.insert(FI->getParent());
68 return !isSuspendReachableFrom(AI->getParent(), VisitedOrFreeBBs);
75lowerNonLocalAlloca(CoroAllocaAllocInst *AI,
const coro::Shape &Shape,
76 SmallVectorImpl<Instruction *> &DeadInsts) {
77 IRBuilder<> Builder(AI);
78 auto Alloc = Shape.emitAlloc(Builder, AI->getSize(),
nullptr);
80 for (User *U : AI->users()) {
81 if (isa<CoroAllocaGetInst>(U)) {
82 U->replaceAllUsesWith(
Alloc);
84 auto FI = cast<CoroAllocaFreeInst>(U);
85 Builder.SetInsertPoint(FI);
86 Shape.emitDealloc(Builder,
Alloc,
nullptr);
88 DeadInsts.push_back(cast<Instruction>(U));
92 DeadInsts.push_back(AI);
95 return cast<Instruction>(
Alloc);
107static Instruction *splitBeforeCatchSwitch(CatchSwitchInst *CatchSwitch) {
108 BasicBlock *CurrentBlock = CatchSwitch->getParent();
109 BasicBlock *NewBlock = CurrentBlock->splitBasicBlock(CatchSwitch);
110 CurrentBlock->getTerminator()->eraseFromParent();
151struct AllocaUseVisitor : PtrUseVisitor<AllocaUseVisitor> {
152 using Base = PtrUseVisitor<AllocaUseVisitor>;
153 AllocaUseVisitor(
const DataLayout &
DL,
const DominatorTree &DT,
154 const coro::Shape &CoroShape,
155 const SuspendCrossingInfo &Checker,
156 bool ShouldUseLifetimeStartInfo)
157 : PtrUseVisitor(
DL), DT(DT), CoroShape(CoroShape), Checker(Checker),
158 ShouldUseLifetimeStartInfo(ShouldUseLifetimeStartInfo) {
159 for (AnyCoroSuspendInst *SuspendInst : CoroShape.CoroSuspends)
160 CoroSuspendBBs.insert(SuspendInst->getParent());
163 void visit(Instruction &
I) {
168 if (PI.isEscaped() &&
169 !DT.dominates(CoroShape.CoroBegin, PI.getEscapingInst())) {
170 MayWriteBeforeCoroBegin =
true;
177 void visitPHINode(PHINode &
I) {
182 void visitSelectInst(SelectInst &
I) {
187 void visitStoreInst(StoreInst &
SI) {
192 if (
SI.getValueOperand() !=
U->get())
205 auto IsSimpleStoreThenLoad = [&]() {
206 auto *AI = dyn_cast<AllocaInst>(
SI.getPointerOperand());
213 SmallVector<Instruction *, 4> StoreAliases = {AI};
214 while (!StoreAliases.empty()) {
215 Instruction *
I = StoreAliases.pop_back_val();
216 for (User *U :
I->users()) {
219 if (
auto *LI = dyn_cast<LoadInst>(U)) {
226 if (
auto *S = dyn_cast<StoreInst>(U))
227 if (S->getPointerOperand() ==
I)
229 if (
auto *
II = dyn_cast<IntrinsicInst>(U))
230 if (
II->isLifetimeStartOrEnd())
234 if (
auto *BI = dyn_cast<BitCastInst>(U)) {
235 StoreAliases.push_back(BI);
245 if (!IsSimpleStoreThenLoad())
250 void visitMemIntrinsic(MemIntrinsic &
MI) { handleMayWrite(
MI); }
252 void visitBitCastInst(BitCastInst &BC) {
253 Base::visitBitCastInst(BC);
257 void visitAddrSpaceCastInst(AddrSpaceCastInst &ASC) {
258 Base::visitAddrSpaceCastInst(ASC);
262 void visitGetElementPtrInst(GetElementPtrInst &GEPI) {
264 Base::visitGetElementPtrInst(GEPI);
268 void visitIntrinsicInst(IntrinsicInst &
II) {
272 if (!IsOffsetKnown || !
Offset.isZero())
273 return Base::visitIntrinsicInst(
II);
274 switch (
II.getIntrinsicID()) {
276 return Base::visitIntrinsicInst(
II);
277 case Intrinsic::lifetime_start:
278 LifetimeStarts.insert(&
II);
279 LifetimeStartBBs.push_back(
II.getParent());
281 case Intrinsic::lifetime_end:
282 LifetimeEndBBs.insert(
II.getParent());
287 void visitCallBase(CallBase &CB) {
288 for (
unsigned Op = 0, OpCount = CB.arg_size(); Op < OpCount; ++Op)
289 if (
U->get() == CB.getArgOperand(Op) && !CB.doesNotCapture(Op))
294 bool getShouldLiveOnFrame()
const {
295 if (!ShouldLiveOnFrame)
296 ShouldLiveOnFrame = computeShouldLiveOnFrame();
297 return *ShouldLiveOnFrame;
300 bool getMayWriteBeforeCoroBegin()
const {
return MayWriteBeforeCoroBegin; }
302 DenseMap<Instruction *, std::optional<APInt>> getAliasesCopy()
const {
303 assert(getShouldLiveOnFrame() &&
"This method should only be called if the "
304 "alloca needs to live on the frame.");
305 for (
const auto &
P : AliasOffetMap)
308 "created before CoroBegin.");
309 return AliasOffetMap;
313 const DominatorTree &DT;
314 const coro::Shape &CoroShape;
315 const SuspendCrossingInfo &Checker;
319 DenseMap<Instruction *, std::optional<APInt>> AliasOffetMap{};
320 SmallPtrSet<Instruction *, 4>
Users{};
321 SmallPtrSet<IntrinsicInst *, 2> LifetimeStarts{};
322 SmallVector<BasicBlock *> LifetimeStartBBs{};
323 SmallPtrSet<BasicBlock *, 2> LifetimeEndBBs{};
324 SmallPtrSet<const BasicBlock *, 2> CoroSuspendBBs{};
325 bool MayWriteBeforeCoroBegin{
false};
326 bool ShouldUseLifetimeStartInfo{
true};
328 mutable std::optional<bool> ShouldLiveOnFrame{};
330 bool computeShouldLiveOnFrame()
const {
335 if (ShouldUseLifetimeStartInfo && !LifetimeStarts.empty()) {
338 if (LifetimeEndBBs.empty())
346 &LifetimeEndBBs, &DT))
353 if (PI.isEscaped()) {
354 for (
auto *
A : LifetimeStarts) {
355 for (
auto *
B : LifetimeStarts) {
356 if (Checker.hasPathOrLoopCrossingSuspendPoint(
A->getParent(),
377 for (
auto *U1 :
Users)
378 for (
auto *U2 :
Users)
379 if (Checker.isDefinitionAcrossSuspend(*U1, U2))
385 void handleMayWrite(
const Instruction &
I) {
386 if (!DT.dominates(CoroShape.CoroBegin, &
I))
387 MayWriteBeforeCoroBegin =
true;
390 bool usedAfterCoroBegin(Instruction &
I) {
391 for (
auto &U :
I.uses())
392 if (DT.dominates(CoroShape.CoroBegin, U))
397 void handleAlias(Instruction &
I) {
401 if (DT.dominates(CoroShape.CoroBegin, &
I) || !usedAfterCoroBegin(
I))
404 if (!IsOffsetKnown) {
405 AliasOffetMap[&
I].reset();
408 if (!Inserted && Itr->second && *Itr->second !=
Offset) {
418static void collectFrameAlloca(AllocaInst *AI,
const coro::Shape &Shape,
419 const SuspendCrossingInfo &Checker,
420 SmallVectorImpl<AllocaInfo> &Allocas,
421 const DominatorTree &DT) {
422 if (Shape.CoroSuspends.empty())
427 if (AI == Shape.SwitchLowering.PromiseAlloca)
432 if (AI->hasMetadata(LLVMContext::MD_coro_outside_frame))
438 bool ShouldUseLifetimeStartInfo =
441 AllocaUseVisitor Visitor{AI->getDataLayout(), DT, Shape, Checker,
442 ShouldUseLifetimeStartInfo};
443 Visitor.visitPtr(*AI);
444 if (!Visitor.getShouldLiveOnFrame())
446 Allocas.emplace_back(AI, Visitor.getAliasesCopy(),
447 Visitor.getMayWriteBeforeCoroBegin());
456 for (
User *U :
A.users())
457 if (Checker.isDefinitionAcrossSuspend(
A, U))
458 Spills[&
A].push_back(cast<Instruction>(U));
475 if (
auto AI = dyn_cast<CoroAllocaAllocInst>(&
I)) {
477 if (isLocalAlloca(AI)) {
487 auto Alloc = lowerNonLocalAlloca(AI,
Shape, DeadInstructions);
490 if (Checker.isDefinitionAcrossSuspend(*
Alloc, U))
491 Spills[
Alloc].push_back(cast<Instruction>(U));
497 if (isa<CoroAllocaGetInst>(
I))
500 if (
auto *AI = dyn_cast<AllocaInst>(&
I)) {
501 collectFrameAlloca(AI,
Shape, Checker, Allocas, DT);
505 for (
User *U :
I.users())
506 if (Checker.isDefinitionAcrossSuspend(
I, U)) {
508 if (
I.getType()->isTokenTy())
510 "token definition is separated from the use by a suspend point");
511 Spills[&
I].push_back(cast<Instruction>(U));
522 for (
auto &Iter : Spills) {
523 auto *V = Iter.first;
528 if (Checker.isDefinitionAcrossSuspend(*V, DVI))
529 Spills[V].push_back(DVI);
532 if (Checker.isDefinitionAcrossSuspend(*V, DVR->Marker->MarkedInstr))
533 Spills[V].push_back(DVR->Marker->MarkedInstr);
547 auto collectUsers = [&](
Value *Def) {
548 for (
User *U : Def->users()) {
549 auto Inst = cast<Instruction>(U);
550 if (Inst->getParent() != CoroBegin->
getParent() ||
557 std::for_each(Spills.
begin(), Spills.
end(),
558 [&](
auto &
I) { collectUsers(I.first); });
559 std::for_each(Allocas.
begin(), Allocas.
end(),
560 [&](
auto &
I) { collectUsers(I.Alloca); });
563 while (!Worklist.
empty()) {
565 for (
User *U : Def->users()) {
566 auto Inst = cast<Instruction>(U);
589 if (
auto *Arg = dyn_cast<Argument>(Def)) {
596 Arg->getParent()->removeParamAttr(Arg->getArgNo(), Attribute::NoCapture);
597 }
else if (
auto *CSI = dyn_cast<AnyCoroSuspendInst>(Def)) {
600 InsertPt = CSI->getParent()->getSingleSuccessor()->getFirstNonPHIIt();
602 auto *
I = cast<Instruction>(Def);
607 }
else if (
auto *
II = dyn_cast<InvokeInst>(
I)) {
610 auto *NewBB =
SplitEdge(
II->getParent(),
II->getNormalDest());
611 InsertPt = NewBB->getTerminator()->getIterator();
612 }
else if (isa<PHINode>(
I)) {
615 if (
auto *CSI = dyn_cast<CatchSwitchInst>(DefBlock->
getTerminator()))
616 InsertPt = splitBeforeCatchSwitch(CSI)->getIterator();
620 assert(!
I->isTerminator() &&
"unexpected terminator");
623 InsertPt =
I->getNextNode()->getIterator();
MachineBasicBlock MachineBasicBlock::iterator DebugLoc DL
Expand Atomic instructions
BlockVerifier::State From
static GCRegistry::Add< OcamlGC > B("ocaml", "ocaml 3.10-compatible GC")
static GCRegistry::Add< ErlangGC > A("erlang", "erlang-compatible garbage collector")
This file provides various utilities for inspecting and working with the control flow graph in LLVM I...
iv Induction Variable Users
uint64_t IntrinsicInst * II
StandardInstrumentations SI(Mod->getContext(), Debug, VerifyEach)
This file provides a collection of visitors which walk the (instruction) uses of a pointer.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
void visit(MachineFunction &MF, MachineBasicBlock &Start, std::function< void(MachineBasicBlock *)> op)
This class represents an incoming formal argument to a Function.
LLVM Basic Block Representation.
const_iterator getFirstInsertionPt() const
Returns an iterator to the first instruction in this block that is suitable for inserting a non-PHI i...
InstListType::iterator iterator
Instruction iterators...
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
static CleanupPadInst * Create(Value *ParentPad, ArrayRef< Value * > Args={}, const Twine &NameStr="", InsertPosition InsertBefore=nullptr)
static CleanupReturnInst * Create(Value *CleanupPad, BasicBlock *UnwindBB=nullptr, InsertPosition InsertBefore=nullptr)
This class represents the llvm.coro.begin or llvm.coro.begin.custom.abi instructions.
This represents the llvm.dbg.value instruction.
Record of a variable value-assignment, aka a non instruction representation of the dbg....
Concrete subclass of DominatorTreeBase that is used to compute a normal dominator tree.
bool dominates(const BasicBlock *BB, const Use &U) const
Return true if the (end of the) basic block BB dominates the use U.
void moveBefore(Instruction *MovePos)
Unlink this instruction from its current basic block and insert it into the basic block that MovePos ...
iterator end()
Get an iterator to the end of the SetVector.
iterator begin()
Get an iterator to the beginning of the SetVector.
bool insert(const value_type &X)
Insert a new element into the SetVector.
A SetVector that performs no allocations if smaller than a certain size.
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
void push_back(const T &Elt)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small.
LLVM Value Representation.
const ParentTy * getParent() const
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
@ BasicBlock
Various leaf nodes.
@ Async
The "async continuation" lowering, where each suspend point creates a single continuation function.
@ RetconOnce
The "unique returned-continuation" lowering, where each suspend point creates a single continuation f...
@ Retcon
The "returned-continuation" lowering, where each suspend point creates a single continuation function...
BasicBlock::iterator getSpillInsertionPt(const coro::Shape &, Value *Def, const DominatorTree &DT)
bool isSuspendBlock(BasicBlock *BB)
void sinkSpillUsesAfterCoroBegin(const DominatorTree &DT, CoroBeginInst *CoroBegin, coro::SpillInfo &Spills, SmallVectorImpl< coro::AllocaInfo > &Allocas)
Async and Retcon{Once} conventions assume that all spill uses can be sunk after the coro....
void collectSpillsFromArgs(SpillInfo &Spills, Function &F, const SuspendCrossingInfo &Checker)
void collectSpillsFromDbgInfo(SpillInfo &Spills, Function &F, const SuspendCrossingInfo &Checker)
void collectSpillsAndAllocasFromInsts(SpillInfo &Spills, SmallVector< AllocaInfo, 8 > &Allocas, SmallVector< Instruction *, 4 > &DeadInstructions, SmallVector< CoroAllocaAllocInst *, 4 > &LocalAllocas, Function &F, const SuspendCrossingInfo &Checker, const DominatorTree &DT, const coro::Shape &Shape)
This is an optimization pass for GlobalISel generic memory operations.
auto successors(const MachineBasicBlock *BB)
void findDbgValues(SmallVectorImpl< DbgValueInst * > &DbgValues, Value *V, SmallVectorImpl< DbgVariableRecord * > *DbgVariableRecords=nullptr)
Finds the llvm.dbg.value intrinsics describing a value.
void sort(IteratorTy Start, IteratorTy End)
void report_fatal_error(Error Err, bool gen_crash_diag=true)
Report a serious error, calling any installed error handler.
bool isManyPotentiallyReachableFromMany(SmallVectorImpl< BasicBlock * > &Worklist, const SmallPtrSetImpl< const BasicBlock * > &StopSet, const SmallPtrSetImpl< BasicBlock * > *ExclusionSet, const DominatorTree *DT=nullptr, const LoopInfo *LI=nullptr)
Determine whether there is a potentially a path from at least one block in 'Worklist' to at least one...
BasicBlock * SplitEdge(BasicBlock *From, BasicBlock *To, DominatorTree *DT=nullptr, LoopInfo *LI=nullptr, MemorySSAUpdater *MSSAU=nullptr, const Twine &BBName="")
Split the edge connecting the specified blocks, and return the newly created basic block between From...
A MapVector that performs no allocations if smaller than a certain size.
CoroBeginInst * CoroBegin
BasicBlock::iterator getInsertPtAfterFramePtr() const