LLVM  10.0.0svn
Attributor.cpp
Go to the documentation of this file.
1 //===- Attributor.cpp - Module-wide attribute deduction -------------------===//
2 //
3 // Part of the LLVM Project, under the Apache License v2.0 with LLVM Exceptions.
4 // See https://llvm.org/LICENSE.txt for license information.
5 // SPDX-License-Identifier: Apache-2.0 WITH LLVM-exception
6 //
7 //===----------------------------------------------------------------------===//
8 //
9 // This file implements an inter procedural pass that deduces and/or propagating
10 // attributes. This is done in an abstract interpretation style fixpoint
11 // iteration. See the Attributor.h file comment and the class descriptions in
12 // that file for more information.
13 //
14 //===----------------------------------------------------------------------===//
15 
17 
19 #include "llvm/ADT/STLExtras.h"
20 #include "llvm/ADT/SmallPtrSet.h"
21 #include "llvm/ADT/SmallVector.h"
22 #include "llvm/ADT/Statistic.h"
26 #include "llvm/Analysis/Loads.h"
29 #include "llvm/IR/Argument.h"
30 #include "llvm/IR/Attributes.h"
31 #include "llvm/IR/CFG.h"
32 #include "llvm/IR/InstIterator.h"
33 #include "llvm/IR/IntrinsicInst.h"
35 #include "llvm/Support/Debug.h"
39 
40 #include <cassert>
41 
42 using namespace llvm;
43 
44 #define DEBUG_TYPE "attributor"
45 
46 STATISTIC(NumFnWithExactDefinition,
47  "Number of function with exact definitions");
48 STATISTIC(NumFnWithoutExactDefinition,
49  "Number of function without exact definitions");
50 STATISTIC(NumAttributesTimedOut,
51  "Number of abstract attributes timed out before fixpoint");
52 STATISTIC(NumAttributesValidFixpoint,
53  "Number of abstract attributes in a valid fixpoint state");
54 STATISTIC(NumAttributesManifested,
55  "Number of abstract attributes manifested in IR");
56 
57 // Some helper macros to deal with statistics tracking.
58 //
59 // Usage:
60 // For simple IR attribute tracking overload trackStatistics in the abstract
61 // attribute and choose the right STATS_DECLTRACK_********* macro,
62 // e.g.,:
63 // void trackStatistics() const override {
64 // STATS_DECLTRACK_ARG_ATTR(returned)
65 // }
66 // If there is a single "increment" side one can use the macro
67 // STATS_DECLTRACK with a custom message. If there are multiple increment
68 // sides, STATS_DECL and STATS_TRACK can also be used separatly.
69 //
70 #define BUILD_STAT_MSG_IR_ATTR(TYPE, NAME) \
71  ("Number of " #TYPE " marked '" #NAME "'")
72 #define BUILD_STAT_NAME(NAME, TYPE) NumIR##TYPE##_##NAME
73 #define STATS_DECL_(NAME, MSG) STATISTIC(NAME, MSG);
74 #define STATS_DECL(NAME, TYPE, MSG) \
75  STATS_DECL_(BUILD_STAT_NAME(NAME, TYPE), MSG);
76 #define STATS_TRACK(NAME, TYPE) ++(BUILD_STAT_NAME(NAME, TYPE));
77 #define STATS_DECLTRACK(NAME, TYPE, MSG) \
78  { \
79  STATS_DECL(NAME, TYPE, MSG) \
80  STATS_TRACK(NAME, TYPE) \
81  }
82 #define STATS_DECLTRACK_ARG_ATTR(NAME) \
83  STATS_DECLTRACK(NAME, Arguments, BUILD_STAT_MSG_IR_ATTR(arguments, NAME))
84 #define STATS_DECLTRACK_CSARG_ATTR(NAME) \
85  STATS_DECLTRACK(NAME, CSArguments, \
86  BUILD_STAT_MSG_IR_ATTR(call site arguments, NAME))
87 #define STATS_DECLTRACK_FN_ATTR(NAME) \
88  STATS_DECLTRACK(NAME, Function, BUILD_STAT_MSG_IR_ATTR(functions, NAME))
89 #define STATS_DECLTRACK_CS_ATTR(NAME) \
90  STATS_DECLTRACK(NAME, CS, BUILD_STAT_MSG_IR_ATTR(call site, NAME))
91 #define STATS_DECLTRACK_FNRET_ATTR(NAME) \
92  STATS_DECLTRACK(NAME, FunctionReturn, \
93  BUILD_STAT_MSG_IR_ATTR(function returns, NAME))
94 #define STATS_DECLTRACK_CSRET_ATTR(NAME) \
95  STATS_DECLTRACK(NAME, CSReturn, \
96  BUILD_STAT_MSG_IR_ATTR(call site returns, NAME))
97 #define STATS_DECLTRACK_FLOATING_ATTR(NAME) \
98  STATS_DECLTRACK(NAME, Floating, \
99  ("Number of floating values known to be '" #NAME "'"))
100 
101 // TODO: Determine a good default value.
102 //
103 // In the LLVM-TS and SPEC2006, 32 seems to not induce compile time overheads
104 // (when run with the first 5 abstract attributes). The results also indicate
105 // that we never reach 32 iterations but always find a fixpoint sooner.
106 //
107 // This will become more evolved once we perform two interleaved fixpoint
108 // iterations: bottom-up and top-down.
109 static cl::opt<unsigned>
110  MaxFixpointIterations("attributor-max-iterations", cl::Hidden,
111  cl::desc("Maximal number of fixpoint iterations."),
112  cl::init(32));
114  "attributor-max-iterations-verify", cl::Hidden,
115  cl::desc("Verify that max-iterations is a tight bound for a fixpoint"),
116  cl::init(false));
117 
119  "attributor-disable", cl::Hidden,
120  cl::desc("Disable the attributor inter-procedural deduction pass."),
121  cl::init(true));
122 
124  "attributor-manifest-internal", cl::Hidden,
125  cl::desc("Manifest Attributor internal string attributes."),
126  cl::init(false));
127 
129  "attributor-verify", cl::Hidden,
130  cl::desc("Verify the Attributor deduction and "
131  "manifestation of attributes -- may issue false-positive errors"),
132  cl::init(false));
133 
135  "attributor-dependence-recompute-interval", cl::Hidden,
136  cl::desc("Number of iterations until dependences are recomputed."),
137  cl::init(4));
138 
139 static cl::opt<bool> EnableHeapToStack("enable-heap-to-stack-conversion",
140  cl::init(true), cl::Hidden);
141 
142 static cl::opt<int> MaxHeapToStackSize("max-heap-to-stack-size",
143  cl::init(128), cl::Hidden);
144 
145 /// Logic operators for the change status enum class.
146 ///
147 ///{
149  return l == ChangeStatus::CHANGED ? l : r;
150 }
152  return l == ChangeStatus::UNCHANGED ? l : r;
153 }
154 ///}
155 
156 /// Recursively visit all values that might become \p IRP at some point. This
157 /// will be done by looking through cast instructions, selects, phis, and calls
158 /// with the "returned" attribute. Once we cannot look through the value any
159 /// further, the callback \p VisitValueCB is invoked and passed the current
160 /// value, the \p State, and a flag to indicate if we stripped anything. To
161 /// limit how much effort is invested, we will never visit more values than
162 /// specified by \p MaxValues.
163 template <typename AAType, typename StateTy>
165  Attributor &A, IRPosition IRP, const AAType &QueryingAA, StateTy &State,
166  const function_ref<bool(Value &, StateTy &, bool)> &VisitValueCB,
167  int MaxValues = 8) {
168 
169  const AAIsDead *LivenessAA = nullptr;
170  if (IRP.getAnchorScope())
171  LivenessAA = &A.getAAFor<AAIsDead>(
172  QueryingAA, IRPosition::function(*IRP.getAnchorScope()),
173  /* TrackDependence */ false);
174  bool AnyDead = false;
175 
176  // TODO: Use Positions here to allow context sensitivity in VisitValueCB
177  SmallPtrSet<Value *, 16> Visited;
178  SmallVector<Value *, 16> Worklist;
179  Worklist.push_back(&IRP.getAssociatedValue());
180 
181  int Iteration = 0;
182  do {
183  Value *V = Worklist.pop_back_val();
184 
185  // Check if we should process the current value. To prevent endless
186  // recursion keep a record of the values we followed!
187  if (!Visited.insert(V).second)
188  continue;
189 
190  // Make sure we limit the compile time for complex expressions.
191  if (Iteration++ >= MaxValues)
192  return false;
193 
194  // Explicitly look through calls with a "returned" attribute if we do
195  // not have a pointer as stripPointerCasts only works on them.
196  Value *NewV = nullptr;
197  if (V->getType()->isPointerTy()) {
198  NewV = V->stripPointerCasts();
199  } else {
200  CallSite CS(V);
201  if (CS && CS.getCalledFunction()) {
202  for (Argument &Arg : CS.getCalledFunction()->args())
203  if (Arg.hasReturnedAttr()) {
204  NewV = CS.getArgOperand(Arg.getArgNo());
205  break;
206  }
207  }
208  }
209  if (NewV && NewV != V) {
210  Worklist.push_back(NewV);
211  continue;
212  }
213 
214  // Look through select instructions, visit both potential values.
215  if (auto *SI = dyn_cast<SelectInst>(V)) {
216  Worklist.push_back(SI->getTrueValue());
217  Worklist.push_back(SI->getFalseValue());
218  continue;
219  }
220 
221  // Look through phi nodes, visit all live operands.
222  if (auto *PHI = dyn_cast<PHINode>(V)) {
223  assert(LivenessAA &&
224  "Expected liveness in the presence of instructions!");
225  for (unsigned u = 0, e = PHI->getNumIncomingValues(); u < e; u++) {
226  const BasicBlock *IncomingBB = PHI->getIncomingBlock(u);
227  if (LivenessAA->isAssumedDead(IncomingBB->getTerminator())) {
228  AnyDead = true;
229  continue;
230  }
231  Worklist.push_back(PHI->getIncomingValue(u));
232  }
233  continue;
234  }
235 
236  // Once a leaf is reached we inform the user through the callback.
237  if (!VisitValueCB(*V, State, Iteration > 1))
238  return false;
239  } while (!Worklist.empty());
240 
241  // If we actually used liveness information so we have to record a dependence.
242  if (AnyDead)
243  A.recordDependence(*LivenessAA, QueryingAA);
244 
245  // All values have been visited.
246  return true;
247 }
248 
249 /// Return true if \p New is equal or worse than \p Old.
250 static bool isEqualOrWorse(const Attribute &New, const Attribute &Old) {
251  if (!Old.isIntAttribute())
252  return true;
253 
254  return Old.getValueAsInt() >= New.getValueAsInt();
255 }
256 
257 /// Return true if the information provided by \p Attr was added to the
258 /// attribute list \p Attrs. This is only the case if it was not already present
259 /// in \p Attrs at the position describe by \p PK and \p AttrIdx.
260 static bool addIfNotExistent(LLVMContext &Ctx, const Attribute &Attr,
261  AttributeList &Attrs, int AttrIdx) {
262 
263  if (Attr.isEnumAttribute()) {
265  if (Attrs.hasAttribute(AttrIdx, Kind))
266  if (isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind)))
267  return false;
268  Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr);
269  return true;
270  }
271  if (Attr.isStringAttribute()) {
272  StringRef Kind = Attr.getKindAsString();
273  if (Attrs.hasAttribute(AttrIdx, Kind))
274  if (isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind)))
275  return false;
276  Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr);
277  return true;
278  }
279  if (Attr.isIntAttribute()) {
281  if (Attrs.hasAttribute(AttrIdx, Kind))
282  if (isEqualOrWorse(Attr, Attrs.getAttribute(AttrIdx, Kind)))
283  return false;
284  Attrs = Attrs.removeAttribute(Ctx, AttrIdx, Kind);
285  Attrs = Attrs.addAttribute(Ctx, AttrIdx, Attr);
286  return true;
287  }
288 
289  llvm_unreachable("Expected enum or string attribute!");
290 }
291 
294  if (getState().isAtFixpoint())
295  return HasChanged;
296 
297  LLVM_DEBUG(dbgs() << "[Attributor] Update: " << *this << "\n");
298 
299  HasChanged = updateImpl(A);
300 
301  LLVM_DEBUG(dbgs() << "[Attributor] Update " << HasChanged << " " << *this
302  << "\n");
303 
304  return HasChanged;
305 }
306 
309  const ArrayRef<Attribute> &DeducedAttrs) {
310  Function *ScopeFn = IRP.getAssociatedFunction();
312 
313  // In the following some generic code that will manifest attributes in
314  // DeducedAttrs if they improve the current IR. Due to the different
315  // annotation positions we use the underlying AttributeList interface.
316 
318  switch (PK) {
325  Attrs = ScopeFn->getAttributes();
326  break;
331  break;
332  }
333 
335  LLVMContext &Ctx = IRP.getAnchorValue().getContext();
336  for (const Attribute &Attr : DeducedAttrs) {
337  if (!addIfNotExistent(Ctx, Attr, Attrs, IRP.getAttrIdx()))
338  continue;
339 
340  HasChanged = ChangeStatus::CHANGED;
341  }
342 
343  if (HasChanged == ChangeStatus::UNCHANGED)
344  return HasChanged;
345 
346  switch (PK) {
350  ScopeFn->setAttributes(Attrs);
351  break;
355  CallSite(&IRP.getAnchorValue()).setAttributes(Attrs);
356  break;
359  break;
360  }
361 
362  return HasChanged;
363 }
364 
367 
369  IRPositions.emplace_back(IRP);
370 
371  ImmutableCallSite ICS(&IRP.getAnchorValue());
372  switch (IRP.getPositionKind()) {
376  return;
379  IRPositions.emplace_back(
381  return;
383  assert(ICS && "Expected call site!");
384  // TODO: We need to look at the operand bundles similar to the redirection
385  // in CallBase.
386  if (!ICS.hasOperandBundles())
387  if (const Function *Callee = ICS.getCalledFunction())
388  IRPositions.emplace_back(IRPosition::function(*Callee));
389  return;
391  assert(ICS && "Expected call site!");
392  // TODO: We need to look at the operand bundles similar to the redirection
393  // in CallBase.
394  if (!ICS.hasOperandBundles()) {
395  if (const Function *Callee = ICS.getCalledFunction()) {
396  IRPositions.emplace_back(IRPosition::returned(*Callee));
397  IRPositions.emplace_back(IRPosition::function(*Callee));
398  }
399  }
400  IRPositions.emplace_back(
401  IRPosition::callsite_function(cast<CallBase>(*ICS.getInstruction())));
402  return;
404  int ArgNo = IRP.getArgNo();
405  assert(ICS && ArgNo >= 0 && "Expected call site!");
406  // TODO: We need to look at the operand bundles similar to the redirection
407  // in CallBase.
408  if (!ICS.hasOperandBundles()) {
409  const Function *Callee = ICS.getCalledFunction();
410  if (Callee && Callee->arg_size() > unsigned(ArgNo))
411  IRPositions.emplace_back(IRPosition::argument(*Callee->getArg(ArgNo)));
412  if (Callee)
413  IRPositions.emplace_back(IRPosition::function(*Callee));
414  }
415  IRPositions.emplace_back(IRPosition::value(IRP.getAssociatedValue()));
416  return;
417  }
418  }
419 }
420 
422  for (const IRPosition &EquivIRP : SubsumingPositionIterator(*this))
423  for (Attribute::AttrKind AK : AKs)
424  if (EquivIRP.getAttr(AK).getKindAsEnum() == AK)
425  return true;
426  return false;
427 }
428 
431  for (const IRPosition &EquivIRP : SubsumingPositionIterator(*this))
432  for (Attribute::AttrKind AK : AKs) {
433  const Attribute &Attr = EquivIRP.getAttr(AK);
434  if (Attr.getKindAsEnum() == AK)
435  Attrs.push_back(Attr);
436  }
437 }
438 
439 void IRPosition::verify() {
440  switch (KindOrArgNo) {
441  default:
442  assert(KindOrArgNo >= 0 && "Expected argument or call site argument!");
443  assert((isa<CallBase>(AnchorVal) || isa<Argument>(AnchorVal)) &&
444  "Expected call base or argument for positive attribute index!");
445  if (isa<Argument>(AnchorVal)) {
446  assert(cast<Argument>(AnchorVal)->getArgNo() == unsigned(getArgNo()) &&
447  "Argument number mismatch!");
448  assert(cast<Argument>(AnchorVal) == &getAssociatedValue() &&
449  "Associated value mismatch!");
450  } else {
451  assert(cast<CallBase>(*AnchorVal).arg_size() > unsigned(getArgNo()) &&
452  "Call site argument number mismatch!");
453  assert(cast<CallBase>(*AnchorVal).getArgOperand(getArgNo()) ==
454  &getAssociatedValue() &&
455  "Associated value mismatch!");
456  }
457  break;
458  case IRP_INVALID:
459  assert(!AnchorVal && "Expected no value for an invalid position!");
460  break;
461  case IRP_FLOAT:
462  assert((!isa<CallBase>(&getAssociatedValue()) &&
463  !isa<Argument>(&getAssociatedValue())) &&
464  "Expected specialized kind for call base and argument values!");
465  break;
466  case IRP_RETURNED:
467  assert(isa<Function>(AnchorVal) &&
468  "Expected function for a 'returned' position!");
469  assert(AnchorVal == &getAssociatedValue() && "Associated value mismatch!");
470  break;
471  case IRP_CALL_SITE_RETURNED:
472  assert((isa<CallBase>(AnchorVal)) &&
473  "Expected call base for 'call site returned' position!");
474  assert(AnchorVal == &getAssociatedValue() && "Associated value mismatch!");
475  break;
476  case IRP_CALL_SITE:
477  assert((isa<CallBase>(AnchorVal)) &&
478  "Expected call base for 'call site function' position!");
479  assert(AnchorVal == &getAssociatedValue() && "Associated value mismatch!");
480  break;
481  case IRP_FUNCTION:
482  assert(isa<Function>(AnchorVal) &&
483  "Expected function for a 'function' position!");
484  assert(AnchorVal == &getAssociatedValue() && "Associated value mismatch!");
485  break;
486  }
487 }
488 
489 /// Helper functions to clamp a state \p S of type \p StateType with the
490 /// information in \p R and indicate/return if \p S did change (as-in update is
491 /// required to be run again).
492 ///
493 ///{
494 template <typename StateType>
496 
497 template <>
499  const IntegerState &R) {
500  auto Assumed = S.getAssumed();
501  S ^= R;
502  return Assumed == S.getAssumed() ? ChangeStatus::UNCHANGED
504 }
505 
506 template <>
508  const BooleanState &R) {
510 }
511 ///}
512 
513 /// Clamp the information known for all returned values of a function
514 /// (identified by \p QueryingAA) into \p S.
515 template <typename AAType, typename StateType = typename AAType::StateType>
516 static void clampReturnedValueStates(Attributor &A, const AAType &QueryingAA,
517  StateType &S) {
518  LLVM_DEBUG(dbgs() << "[Attributor] Clamp return value states for "
519  << static_cast<const AbstractAttribute &>(QueryingAA)
520  << " into " << S << "\n");
521 
522  assert((QueryingAA.getIRPosition().getPositionKind() ==
524  QueryingAA.getIRPosition().getPositionKind() ==
526  "Can only clamp returned value states for a function returned or call "
527  "site returned position!");
528 
529  // Use an optional state as there might not be any return values and we want
530  // to join (IntegerState::operator&) the state of all there are.
532 
533  // Callback for each possibly returned value.
534  auto CheckReturnValue = [&](Value &RV) -> bool {
535  const IRPosition &RVPos = IRPosition::value(RV);
536  const AAType &AA = A.getAAFor<AAType>(QueryingAA, RVPos);
537  LLVM_DEBUG(dbgs() << "[Attributor] RV: " << RV << " AA: " << AA.getAsStr()
538  << " @ " << RVPos << "\n");
539  const StateType &AAS = static_cast<const StateType &>(AA.getState());
540  if (T.hasValue())
541  *T &= AAS;
542  else
543  T = AAS;
544  LLVM_DEBUG(dbgs() << "[Attributor] AA State: " << AAS << " RV State: " << T
545  << "\n");
546  return T->isValidState();
547  };
548 
549  if (!A.checkForAllReturnedValues(CheckReturnValue, QueryingAA))
550  S.indicatePessimisticFixpoint();
551  else if (T.hasValue())
552  S ^= *T;
553 }
554 
555 /// Helper class for generic deduction: return value -> returned position.
556 template <typename AAType, typename Base,
557  typename StateType = typename AAType::StateType>
558 struct AAReturnedFromReturnedValues : public Base {
559  AAReturnedFromReturnedValues(const IRPosition &IRP) : Base(IRP) {}
560 
561  /// See AbstractAttribute::updateImpl(...).
563  StateType S;
564  clampReturnedValueStates<AAType, StateType>(A, *this, S);
565  // TODO: If we know we visited all returned values, thus no are assumed
566  // dead, we can take the known information from the state T.
567  return clampStateAndIndicateChange<StateType>(this->getState(), S);
568  }
569 };
570 
571 /// Clamp the information known at all call sites for a given argument
572 /// (identified by \p QueryingAA) into \p S.
573 template <typename AAType, typename StateType = typename AAType::StateType>
574 static void clampCallSiteArgumentStates(Attributor &A, const AAType &QueryingAA,
575  StateType &S) {
576  LLVM_DEBUG(dbgs() << "[Attributor] Clamp call site argument states for "
577  << static_cast<const AbstractAttribute &>(QueryingAA)
578  << " into " << S << "\n");
579 
580  assert(QueryingAA.getIRPosition().getPositionKind() ==
582  "Can only clamp call site argument states for an argument position!");
583 
584  // Use an optional state as there might not be any return values and we want
585  // to join (IntegerState::operator&) the state of all there are.
587 
588  // The argument number which is also the call site argument number.
589  unsigned ArgNo = QueryingAA.getIRPosition().getArgNo();
590 
591  auto CallSiteCheck = [&](CallSite CS) {
592  const IRPosition &CSArgPos = IRPosition::callsite_argument(CS, ArgNo);
593  const AAType &AA = A.getAAFor<AAType>(QueryingAA, CSArgPos);
594  LLVM_DEBUG(dbgs() << "[Attributor] CS: " << *CS.getInstruction()
595  << " AA: " << AA.getAsStr() << " @" << CSArgPos << "\n");
596  const StateType &AAS = static_cast<const StateType &>(AA.getState());
597  if (T.hasValue())
598  *T &= AAS;
599  else
600  T = AAS;
601  LLVM_DEBUG(dbgs() << "[Attributor] AA State: " << AAS << " CSA State: " << T
602  << "\n");
603  return T->isValidState();
604  };
605 
606  if (!A.checkForAllCallSites(CallSiteCheck, QueryingAA, true))
607  S.indicatePessimisticFixpoint();
608  else if (T.hasValue())
609  S ^= *T;
610 }
611 
612 /// Helper class for generic deduction: call site argument -> argument position.
613 template <typename AAType, typename Base,
614  typename StateType = typename AAType::StateType>
615 struct AAArgumentFromCallSiteArguments : public Base {
616  AAArgumentFromCallSiteArguments(const IRPosition &IRP) : Base(IRP) {}
617 
618  /// See AbstractAttribute::updateImpl(...).
620  StateType S;
621  clampCallSiteArgumentStates<AAType, StateType>(A, *this, S);
622  // TODO: If we know we visited all incoming values, thus no are assumed
623  // dead, we can take the known information from the state T.
624  return clampStateAndIndicateChange<StateType>(this->getState(), S);
625  }
626 };
627 
628 /// Helper class for generic replication: function returned -> cs returned.
629 template <typename AAType, typename Base>
630 struct AACallSiteReturnedFromReturned : public Base {
631  AACallSiteReturnedFromReturned(const IRPosition &IRP) : Base(IRP) {}
632 
633  /// See AbstractAttribute::updateImpl(...).
635  assert(this->getIRPosition().getPositionKind() ==
637  "Can only wrap function returned positions for call site returned "
638  "positions!");
639  auto &S = this->getState();
640 
641  const Function *AssociatedFunction =
643  if (!AssociatedFunction)
644  return S.indicatePessimisticFixpoint();
645 
646  IRPosition FnPos = IRPosition::returned(*AssociatedFunction);
647  const AAType &AA = A.getAAFor<AAType>(*this, FnPos);
649  S, static_cast<const typename AAType::StateType &>(AA.getState()));
650  }
651 };
652 
653 /// -----------------------NoUnwind Function Attribute--------------------------
654 
656  AANoUnwindImpl(const IRPosition &IRP) : AANoUnwind(IRP) {}
657 
658  const std::string getAsStr() const override {
659  return getAssumed() ? "nounwind" : "may-unwind";
660  }
661 
662  /// See AbstractAttribute::updateImpl(...).
664  auto Opcodes = {
665  (unsigned)Instruction::Invoke, (unsigned)Instruction::CallBr,
666  (unsigned)Instruction::Call, (unsigned)Instruction::CleanupRet,
667  (unsigned)Instruction::CatchSwitch, (unsigned)Instruction::Resume};
668 
669  auto CheckForNoUnwind = [&](Instruction &I) {
670  if (!I.mayThrow())
671  return true;
672 
673  if (ImmutableCallSite ICS = ImmutableCallSite(&I)) {
674  const auto &NoUnwindAA =
676  return NoUnwindAA.isAssumedNoUnwind();
677  }
678  return false;
679  };
680 
681  if (!A.checkForAllInstructions(CheckForNoUnwind, *this, Opcodes))
682  return indicatePessimisticFixpoint();
683 
685  }
686 };
687 
688 struct AANoUnwindFunction final : public AANoUnwindImpl {
690 
691  /// See AbstractAttribute::trackStatistics()
692  void trackStatistics() const override { STATS_DECLTRACK_FN_ATTR(nounwind) }
693 };
694 
695 /// NoUnwind attribute deduction for a call sites.
698 
699  /// See AbstractAttribute::initialize(...).
700  void initialize(Attributor &A) override {
702  Function *F = getAssociatedFunction();
703  if (!F)
704  indicatePessimisticFixpoint();
705  }
706 
707  /// See AbstractAttribute::updateImpl(...).
709  // TODO: Once we have call site specific value information we can provide
710  // call site specific liveness information and then it makes
711  // sense to specialize attributes for call sites arguments instead of
712  // redirecting requests to the callee argument.
713  Function *F = getAssociatedFunction();
714  const IRPosition &FnPos = IRPosition::function(*F);
715  auto &FnAA = A.getAAFor<AANoUnwind>(*this, FnPos);
717  getState(),
718  static_cast<const AANoUnwind::StateType &>(FnAA.getState()));
719  }
720 
721  /// See AbstractAttribute::trackStatistics()
722  void trackStatistics() const override { STATS_DECLTRACK_CS_ATTR(nounwind); }
723 };
724 
725 /// --------------------- Function Return Values -------------------------------
726 
727 /// "Attribute" that collects all potential returned values and the return
728 /// instructions that they arise from.
729 ///
730 /// If there is a unique returned value R, the manifest method will:
731 /// - mark R with the "returned" attribute, if R is an argument.
733 
734  /// Mapping of values potentially returned by the associated function to the
735  /// return instructions that might return them.
737 
738  /// Mapping to remember the number of returned values for a call site such
739  /// that we can avoid updates if nothing changed.
740  DenseMap<const CallBase *, unsigned> NumReturnedValuesPerKnownAA;
741 
742  /// Set of unresolved calls returned by the associated function.
743  SmallSetVector<CallBase *, 4> UnresolvedCalls;
744 
745  /// State flags
746  ///
747  ///{
748  bool IsFixed = false;
749  bool IsValidState = true;
750  ///}
751 
752 public:
754 
755  /// See AbstractAttribute::initialize(...).
756  void initialize(Attributor &A) override {
757  // Reset the state.
758  IsFixed = false;
759  IsValidState = true;
760  ReturnedValues.clear();
761 
762  Function *F = getAssociatedFunction();
763  if (!F) {
764  indicatePessimisticFixpoint();
765  return;
766  }
767 
768  // The map from instruction opcodes to those instructions in the function.
769  auto &OpcodeInstMap = A.getInfoCache().getOpcodeInstMapForFunction(*F);
770 
771  // Look through all arguments, if one is marked as returned we are done.
772  for (Argument &Arg : F->args()) {
773  if (Arg.hasReturnedAttr()) {
774  auto &ReturnInstSet = ReturnedValues[&Arg];
775  for (Instruction *RI : OpcodeInstMap[Instruction::Ret])
776  ReturnInstSet.insert(cast<ReturnInst>(RI));
777 
778  indicateOptimisticFixpoint();
779  return;
780  }
781  }
782 
783  if (!F->hasExactDefinition())
784  indicatePessimisticFixpoint();
785  }
786 
787  /// See AbstractAttribute::manifest(...).
788  ChangeStatus manifest(Attributor &A) override;
789 
790  /// See AbstractAttribute::getState(...).
791  AbstractState &getState() override { return *this; }
792 
793  /// See AbstractAttribute::getState(...).
794  const AbstractState &getState() const override { return *this; }
795 
796  /// See AbstractAttribute::updateImpl(Attributor &A).
797  ChangeStatus updateImpl(Attributor &A) override;
798 
800  return llvm::make_range(ReturnedValues.begin(), ReturnedValues.end());
801  }
802 
804  return llvm::make_range(ReturnedValues.begin(), ReturnedValues.end());
805  }
806 
808  return UnresolvedCalls;
809  }
810 
811  /// Return the number of potential return values, -1 if unknown.
812  size_t getNumReturnValues() const override {
813  return isValidState() ? ReturnedValues.size() : -1;
814  }
815 
816  /// Return an assumed unique return value if a single candidate is found. If
817  /// there cannot be one, return a nullptr. If it is not clear yet, return the
818  /// Optional::NoneType.
819  Optional<Value *> getAssumedUniqueReturnValue(Attributor &A) const;
820 
821  /// See AbstractState::checkForAllReturnedValues(...).
822  bool checkForAllReturnedValuesAndReturnInsts(
823  const function_ref<bool(Value &, const SmallSetVector<ReturnInst *, 4> &)>
824  &Pred) const override;
825 
826  /// Pretty print the attribute similar to the IR representation.
827  const std::string getAsStr() const override;
828 
829  /// See AbstractState::isAtFixpoint().
830  bool isAtFixpoint() const override { return IsFixed; }
831 
832  /// See AbstractState::isValidState().
833  bool isValidState() const override { return IsValidState; }
834 
835  /// See AbstractState::indicateOptimisticFixpoint(...).
837  IsFixed = true;
839  }
840 
842  IsFixed = true;
843  IsValidState = false;
844  return ChangeStatus::CHANGED;
845  }
846 };
847 
850 
851  // Bookkeeping.
852  assert(isValidState());
853  STATS_DECLTRACK(KnownReturnValues, FunctionReturn,
854  "Number of function with known return values");
855 
856  // Check if we have an assumed unique return value that we could manifest.
857  Optional<Value *> UniqueRV = getAssumedUniqueReturnValue(A);
858 
859  if (!UniqueRV.hasValue() || !UniqueRV.getValue())
860  return Changed;
861 
862  // Bookkeeping.
863  STATS_DECLTRACK(UniqueReturnValue, FunctionReturn,
864  "Number of function with unique return");
865 
866  // Callback to replace the uses of CB with the constant C.
867  auto ReplaceCallSiteUsersWith = [](CallBase &CB, Constant &C) {
868  if (CB.getNumUses() == 0)
870  CB.replaceAllUsesWith(&C);
871  return ChangeStatus::CHANGED;
872  };
873 
874  // If the assumed unique return value is an argument, annotate it.
875  if (auto *UniqueRVArg = dyn_cast<Argument>(UniqueRV.getValue())) {
876  getIRPosition() = IRPosition::argument(*UniqueRVArg);
877  Changed = IRAttribute::manifest(A);
878  } else if (auto *RVC = dyn_cast<Constant>(UniqueRV.getValue())) {
879  // We can replace the returned value with the unique returned constant.
880  Value &AnchorValue = getAnchorValue();
881  if (Function *F = dyn_cast<Function>(&AnchorValue)) {
882  for (const Use &U : F->uses())
883  if (CallBase *CB = dyn_cast<CallBase>(U.getUser()))
884  if (CB->isCallee(&U)) {
885  Constant *RVCCast =
887  Changed = ReplaceCallSiteUsersWith(*CB, *RVCCast) | Changed;
888  }
889  } else {
890  assert(isa<CallBase>(AnchorValue) &&
891  "Expcected a function or call base anchor!");
892  Constant *RVCCast =
893  ConstantExpr::getTruncOrBitCast(RVC, AnchorValue.getType());
894  Changed = ReplaceCallSiteUsersWith(cast<CallBase>(AnchorValue), *RVCCast);
895  }
896  if (Changed == ChangeStatus::CHANGED)
897  STATS_DECLTRACK(UniqueConstantReturnValue, FunctionReturn,
898  "Number of function returns replaced by constant return");
899  }
900 
901  return Changed;
902 }
903 
904 const std::string AAReturnedValuesImpl::getAsStr() const {
905  return (isAtFixpoint() ? "returns(#" : "may-return(#") +
906  (isValidState() ? std::to_string(getNumReturnValues()) : "?") +
907  ")[#UC: " + std::to_string(UnresolvedCalls.size()) + "]";
908 }
909 
912  // If checkForAllReturnedValues provides a unique value, ignoring potential
913  // undef values that can also be present, it is assumed to be the actual
914  // return value and forwarded to the caller of this method. If there are
915  // multiple, a nullptr is returned indicating there cannot be a unique
916  // returned value.
917  Optional<Value *> UniqueRV;
918 
919  auto Pred = [&](Value &RV) -> bool {
920  // If we found a second returned value and neither the current nor the saved
921  // one is an undef, there is no unique returned value. Undefs are special
922  // since we can pretend they have any value.
923  if (UniqueRV.hasValue() && UniqueRV != &RV &&
924  !(isa<UndefValue>(RV) || isa<UndefValue>(UniqueRV.getValue()))) {
925  UniqueRV = nullptr;
926  return false;
927  }
928 
929  // Do not overwrite a value with an undef.
930  if (!UniqueRV.hasValue() || !isa<UndefValue>(RV))
931  UniqueRV = &RV;
932 
933  return true;
934  };
935 
936  if (!A.checkForAllReturnedValues(Pred, *this))
937  UniqueRV = nullptr;
938 
939  return UniqueRV;
940 }
941 
943  const function_ref<bool(Value &, const SmallSetVector<ReturnInst *, 4> &)>
944  &Pred) const {
945  if (!isValidState())
946  return false;
947 
948  // Check all returned values but ignore call sites as long as we have not
949  // encountered an overdefined one during an update.
950  for (auto &It : ReturnedValues) {
951  Value *RV = It.first;
952 
953  CallBase *CB = dyn_cast<CallBase>(RV);
954  if (CB && !UnresolvedCalls.count(CB))
955  continue;
956 
957  if (!Pred(*RV, It.second))
958  return false;
959  }
960 
961  return true;
962 }
963 
965  size_t NumUnresolvedCalls = UnresolvedCalls.size();
966  bool Changed = false;
967 
968  // State used in the value traversals starting in returned values.
969  struct RVState {
970  // The map in which we collect return values -> return instrs.
971  decltype(ReturnedValues) &RetValsMap;
972  // The flag to indicate a change.
973  bool &Changed;
974  // The return instrs we come from.
976  };
977 
978  // Callback for a leaf value returned by the associated function.
979  auto VisitValueCB = [](Value &Val, RVState &RVS, bool) -> bool {
980  auto Size = RVS.RetValsMap[&Val].size();
981  RVS.RetValsMap[&Val].insert(RVS.RetInsts.begin(), RVS.RetInsts.end());
982  bool Inserted = RVS.RetValsMap[&Val].size() != Size;
983  RVS.Changed |= Inserted;
984  LLVM_DEBUG({
985  if (Inserted)
986  dbgs() << "[AAReturnedValues] 1 Add new returned value " << Val
987  << " => " << RVS.RetInsts.size() << "\n";
988  });
989  return true;
990  };
991 
992  // Helper method to invoke the generic value traversal.
993  auto VisitReturnedValue = [&](Value &RV, RVState &RVS) {
994  IRPosition RetValPos = IRPosition::value(RV);
995  return genericValueTraversal<AAReturnedValues, RVState>(A, RetValPos, *this,
996  RVS, VisitValueCB);
997  };
998 
999  // Callback for all "return intructions" live in the associated function.
1000  auto CheckReturnInst = [this, &VisitReturnedValue, &Changed](Instruction &I) {
1001  ReturnInst &Ret = cast<ReturnInst>(I);
1002  RVState RVS({ReturnedValues, Changed, {}});
1003  RVS.RetInsts.insert(&Ret);
1004  return VisitReturnedValue(*Ret.getReturnValue(), RVS);
1005  };
1006 
1007  // Start by discovering returned values from all live returned instructions in
1008  // the associated function.
1009  if (!A.checkForAllInstructions(CheckReturnInst, *this, {Instruction::Ret}))
1010  return indicatePessimisticFixpoint();
1011 
1012  // Once returned values "directly" present in the code are handled we try to
1013  // resolve returned calls.
1014  decltype(ReturnedValues) NewRVsMap;
1015  for (auto &It : ReturnedValues) {
1016  LLVM_DEBUG(dbgs() << "[AAReturnedValues] Returned value: " << *It.first
1017  << " by #" << It.second.size() << " RIs\n");
1018  CallBase *CB = dyn_cast<CallBase>(It.first);
1019  if (!CB || UnresolvedCalls.count(CB))
1020  continue;
1021 
1022  if (!CB->getCalledFunction()) {
1023  LLVM_DEBUG(dbgs() << "[AAReturnedValues] Unresolved call: " << *CB
1024  << "\n");
1025  UnresolvedCalls.insert(CB);
1026  continue;
1027  }
1028 
1029  // TODO: use the function scope once we have call site AAReturnedValues.
1030  const auto &RetValAA = A.getAAFor<AAReturnedValues>(
1031  *this, IRPosition::function(*CB->getCalledFunction()));
1032  LLVM_DEBUG(dbgs() << "[AAReturnedValues] Found another AAReturnedValues: "
1033  << static_cast<const AbstractAttribute &>(RetValAA)
1034  << "\n");
1035 
1036  // Skip dead ends, thus if we do not know anything about the returned
1037  // call we mark it as unresolved and it will stay that way.
1038  if (!RetValAA.getState().isValidState()) {
1039  LLVM_DEBUG(dbgs() << "[AAReturnedValues] Unresolved call: " << *CB
1040  << "\n");
1041  UnresolvedCalls.insert(CB);
1042  continue;
1043  }
1044 
1045  // Do not try to learn partial information. If the callee has unresolved
1046  // return values we will treat the call as unresolved/opaque.
1047  auto &RetValAAUnresolvedCalls = RetValAA.getUnresolvedCalls();
1048  if (!RetValAAUnresolvedCalls.empty()) {
1049  UnresolvedCalls.insert(CB);
1050  continue;
1051  }
1052 
1053  // Now check if we can track transitively returned values. If possible, thus
1054  // if all return value can be represented in the current scope, do so.
1055  bool Unresolved = false;
1056  for (auto &RetValAAIt : RetValAA.returned_values()) {
1057  Value *RetVal = RetValAAIt.first;
1058  if (isa<Argument>(RetVal) || isa<CallBase>(RetVal) ||
1059  isa<Constant>(RetVal))
1060  continue;
1061  // Anything that did not fit in the above categories cannot be resolved,
1062  // mark the call as unresolved.
1063  LLVM_DEBUG(dbgs() << "[AAReturnedValues] transitively returned value "
1064  "cannot be translated: "
1065  << *RetVal << "\n");
1066  UnresolvedCalls.insert(CB);
1067  Unresolved = true;
1068  break;
1069  }
1070 
1071  if (Unresolved)
1072  continue;
1073 
1074  // Now track transitively returned values.
1075  unsigned &NumRetAA = NumReturnedValuesPerKnownAA[CB];
1076  if (NumRetAA == RetValAA.getNumReturnValues()) {
1077  LLVM_DEBUG(dbgs() << "[AAReturnedValues] Skip call as it has not "
1078  "changed since it was seen last\n");
1079  continue;
1080  }
1081  NumRetAA = RetValAA.getNumReturnValues();
1082 
1083  for (auto &RetValAAIt : RetValAA.returned_values()) {
1084  Value *RetVal = RetValAAIt.first;
1085  if (Argument *Arg = dyn_cast<Argument>(RetVal)) {
1086  // Arguments are mapped to call site operands and we begin the traversal
1087  // again.
1088  bool Unused = false;
1089  RVState RVS({NewRVsMap, Unused, RetValAAIt.second});
1090  VisitReturnedValue(*CB->getArgOperand(Arg->getArgNo()), RVS);
1091  continue;
1092  } else if (isa<CallBase>(RetVal)) {
1093  // Call sites are resolved by the callee attribute over time, no need to
1094  // do anything for us.
1095  continue;
1096  } else if (isa<Constant>(RetVal)) {
1097  // Constants are valid everywhere, we can simply take them.
1098  NewRVsMap[RetVal].insert(It.second.begin(), It.second.end());
1099  continue;
1100  }
1101  }
1102  }
1103 
1104  // To avoid modifications to the ReturnedValues map while we iterate over it
1105  // we kept record of potential new entries in a copy map, NewRVsMap.
1106  for (auto &It : NewRVsMap) {
1107  assert(!It.second.empty() && "Entry does not add anything.");
1108  auto &ReturnInsts = ReturnedValues[It.first];
1109  for (ReturnInst *RI : It.second)
1110  if (ReturnInsts.insert(RI)) {
1111  LLVM_DEBUG(dbgs() << "[AAReturnedValues] Add new returned value "
1112  << *It.first << " => " << *RI << "\n");
1113  Changed = true;
1114  }
1115  }
1116 
1117  Changed |= (NumUnresolvedCalls != UnresolvedCalls.size());
1119 }
1120 
1123 
1124  /// See AbstractAttribute::trackStatistics()
1125  void trackStatistics() const override { STATS_DECLTRACK_ARG_ATTR(returned) }
1126 };
1127 
1128 /// Returned values information for a call sites.
1131 
1132  /// See AbstractAttribute::initialize(...).
1133  void initialize(Attributor &A) override {
1134  // TODO: Once we have call site specific value information we can provide
1135  // call site specific liveness information and then it makes
1136  // sense to specialize attributes for call sites instead of
1137  // redirecting requests to the callee.
1138  llvm_unreachable("Abstract attributes for returned values are not "
1139  "supported for call sites yet!");
1140  }
1141 
1142  /// See AbstractAttribute::updateImpl(...).
1144  return indicatePessimisticFixpoint();
1145  }
1146 
1147  /// See AbstractAttribute::trackStatistics()
1148  void trackStatistics() const override {}
1149 };
1150 
1151 /// ------------------------ NoSync Function Attribute -------------------------
1152 
1154  AANoSyncImpl(const IRPosition &IRP) : AANoSync(IRP) {}
1155 
1156  const std::string getAsStr() const override {
1157  return getAssumed() ? "nosync" : "may-sync";
1158  }
1159 
1160  /// See AbstractAttribute::updateImpl(...).
1161  ChangeStatus updateImpl(Attributor &A) override;
1162 
1163  /// Helper function used to determine whether an instruction is non-relaxed
1164  /// atomic. In other words, if an atomic instruction does not have unordered
1165  /// or monotonic ordering
1166  static bool isNonRelaxedAtomic(Instruction *I);
1167 
1168  /// Helper function used to determine whether an instruction is volatile.
1169  static bool isVolatile(Instruction *I);
1170 
1171  /// Helper function uset to check if intrinsic is volatile (memcpy, memmove,
1172  /// memset).
1173  static bool isNoSyncIntrinsic(Instruction *I);
1174 };
1175 
1177  if (!I->isAtomic())
1178  return false;
1179 
1180  AtomicOrdering Ordering;
1181  switch (I->getOpcode()) {
1182  case Instruction::AtomicRMW:
1183  Ordering = cast<AtomicRMWInst>(I)->getOrdering();
1184  break;
1185  case Instruction::Store:
1186  Ordering = cast<StoreInst>(I)->getOrdering();
1187  break;
1188  case Instruction::Load:
1189  Ordering = cast<LoadInst>(I)->getOrdering();
1190  break;
1191  case Instruction::Fence: {
1192  auto *FI = cast<FenceInst>(I);
1193  if (FI->getSyncScopeID() == SyncScope::SingleThread)
1194  return false;
1195  Ordering = FI->getOrdering();
1196  break;
1197  }
1198  case Instruction::AtomicCmpXchg: {
1199  AtomicOrdering Success = cast<AtomicCmpXchgInst>(I)->getSuccessOrdering();
1200  AtomicOrdering Failure = cast<AtomicCmpXchgInst>(I)->getFailureOrdering();
1201  // Only if both are relaxed, than it can be treated as relaxed.
1202  // Otherwise it is non-relaxed.
1203  if (Success != AtomicOrdering::Unordered &&
1204  Success != AtomicOrdering::Monotonic)
1205  return true;
1206  if (Failure != AtomicOrdering::Unordered &&
1207  Failure != AtomicOrdering::Monotonic)
1208  return true;
1209  return false;
1210  }
1211  default:
1213  "New atomic operations need to be known in the attributor.");
1214  }
1215 
1216  // Relaxed.
1217  if (Ordering == AtomicOrdering::Unordered ||
1218  Ordering == AtomicOrdering::Monotonic)
1219  return false;
1220  return true;
1221 }
1222 
1223 /// Checks if an intrinsic is nosync. Currently only checks mem* intrinsics.
1224 /// FIXME: We should ipmrove the handling of intrinsics.
1226  if (auto *II = dyn_cast<IntrinsicInst>(I)) {
1227  switch (II->getIntrinsicID()) {
1228  /// Element wise atomic memory intrinsics are can only be unordered,
1229  /// therefore nosync.
1230  case Intrinsic::memset_element_unordered_atomic:
1231  case Intrinsic::memmove_element_unordered_atomic:
1232  case Intrinsic::memcpy_element_unordered_atomic:
1233  return true;
1234  case Intrinsic::memset:
1235  case Intrinsic::memmove:
1236  case Intrinsic::memcpy:
1237  if (!cast<MemIntrinsic>(II)->isVolatile())
1238  return true;
1239  return false;
1240  default:
1241  return false;
1242  }
1243  }
1244  return false;
1245 }
1246 
1248  assert(!ImmutableCallSite(I) && !isa<CallBase>(I) &&
1249  "Calls should not be checked here");
1250 
1251  switch (I->getOpcode()) {
1252  case Instruction::AtomicRMW:
1253  return cast<AtomicRMWInst>(I)->isVolatile();
1254  case Instruction::Store:
1255  return cast<StoreInst>(I)->isVolatile();
1256  case Instruction::Load:
1257  return cast<LoadInst>(I)->isVolatile();
1258  case Instruction::AtomicCmpXchg:
1259  return cast<AtomicCmpXchgInst>(I)->isVolatile();
1260  default:
1261  return false;
1262  }
1263 }
1264 
1266 
1267  auto CheckRWInstForNoSync = [&](Instruction &I) {
1268  /// We are looking for volatile instructions or Non-Relaxed atomics.
1269  /// FIXME: We should ipmrove the handling of intrinsics.
1270 
1271  if (isa<IntrinsicInst>(&I) && isNoSyncIntrinsic(&I))
1272  return true;
1273 
1274  if (ImmutableCallSite ICS = ImmutableCallSite(&I)) {
1275  if (ICS.hasFnAttr(Attribute::NoSync))
1276  return true;
1277 
1278  const auto &NoSyncAA =
1280  if (NoSyncAA.isAssumedNoSync())
1281  return true;
1282  return false;
1283  }
1284 
1285  if (!isVolatile(&I) && !isNonRelaxedAtomic(&I))
1286  return true;
1287 
1288  return false;
1289  };
1290 
1291  auto CheckForNoSync = [&](Instruction &I) {
1292  // At this point we handled all read/write effects and they are all
1293  // nosync, so they can be skipped.
1294  if (I.mayReadOrWriteMemory())
1295  return true;
1296 
1297  // non-convergent and readnone imply nosync.
1298  return !ImmutableCallSite(&I).isConvergent();
1299  };
1300 
1301  if (!A.checkForAllReadWriteInstructions(CheckRWInstForNoSync, *this) ||
1302  !A.checkForAllCallLikeInstructions(CheckForNoSync, *this))
1303  return indicatePessimisticFixpoint();
1304 
1305  return ChangeStatus::UNCHANGED;
1306 }
1307 
1308 struct AANoSyncFunction final : public AANoSyncImpl {
1310 
1311  /// See AbstractAttribute::trackStatistics()
1312  void trackStatistics() const override { STATS_DECLTRACK_FN_ATTR(nosync) }
1313 };
1314 
1315 /// NoSync attribute deduction for a call sites.
1318 
1319  /// See AbstractAttribute::initialize(...).
1320  void initialize(Attributor &A) override {
1322  Function *F = getAssociatedFunction();
1323  if (!F)
1324  indicatePessimisticFixpoint();
1325  }
1326 
1327  /// See AbstractAttribute::updateImpl(...).
1329  // TODO: Once we have call site specific value information we can provide
1330  // call site specific liveness information and then it makes
1331  // sense to specialize attributes for call sites arguments instead of
1332  // redirecting requests to the callee argument.
1333  Function *F = getAssociatedFunction();
1334  const IRPosition &FnPos = IRPosition::function(*F);
1335  auto &FnAA = A.getAAFor<AANoSync>(*this, FnPos);
1337  getState(), static_cast<const AANoSync::StateType &>(FnAA.getState()));
1338  }
1339 
1340  /// See AbstractAttribute::trackStatistics()
1341  void trackStatistics() const override { STATS_DECLTRACK_CS_ATTR(nosync); }
1342 };
1343 
1344 /// ------------------------ No-Free Attributes ----------------------------
1345 
1346 struct AANoFreeImpl : public AANoFree {
1347  AANoFreeImpl(const IRPosition &IRP) : AANoFree(IRP) {}
1348 
1349  /// See AbstractAttribute::updateImpl(...).
1351  auto CheckForNoFree = [&](Instruction &I) {
1352  ImmutableCallSite ICS(&I);
1353  if (ICS.hasFnAttr(Attribute::NoFree))
1354  return true;
1355 
1356  const auto &NoFreeAA =
1358  return NoFreeAA.isAssumedNoFree();
1359  };
1360 
1361  if (!A.checkForAllCallLikeInstructions(CheckForNoFree, *this))
1362  return indicatePessimisticFixpoint();
1363  return ChangeStatus::UNCHANGED;
1364  }
1365 
1366  /// See AbstractAttribute::getAsStr().
1367  const std::string getAsStr() const override {
1368  return getAssumed() ? "nofree" : "may-free";
1369  }
1370 };
1371 
1372 struct AANoFreeFunction final : public AANoFreeImpl {
1374 
1375  /// See AbstractAttribute::trackStatistics()
1376  void trackStatistics() const override { STATS_DECLTRACK_FN_ATTR(nofree) }
1377 };
1378 
1379 /// NoFree attribute deduction for a call sites.
1382 
1383  /// See AbstractAttribute::initialize(...).
1384  void initialize(Attributor &A) override {
1386  Function *F = getAssociatedFunction();
1387  if (!F)
1388  indicatePessimisticFixpoint();
1389  }
1390 
1391  /// See AbstractAttribute::updateImpl(...).
1393  // TODO: Once we have call site specific value information we can provide
1394  // call site specific liveness information and then it makes
1395  // sense to specialize attributes for call sites arguments instead of
1396  // redirecting requests to the callee argument.
1397  Function *F = getAssociatedFunction();
1398  const IRPosition &FnPos = IRPosition::function(*F);
1399  auto &FnAA = A.getAAFor<AANoFree>(*this, FnPos);
1401  getState(), static_cast<const AANoFree::StateType &>(FnAA.getState()));
1402  }
1403 
1404  /// See AbstractAttribute::trackStatistics()
1405  void trackStatistics() const override { STATS_DECLTRACK_CS_ATTR(nofree); }
1406 };
1407 
1408 /// ------------------------ NonNull Argument Attribute ------------------------
1410  AANonNullImpl(const IRPosition &IRP) : AANonNull(IRP) {}
1411 
1412  /// See AbstractAttribute::initialize(...).
1413  void initialize(Attributor &A) override {
1414  if (hasAttr({Attribute::NonNull, Attribute::Dereferenceable}))
1415  indicateOptimisticFixpoint();
1416  else
1418  }
1419 
1420  /// See AbstractAttribute::getAsStr().
1421  const std::string getAsStr() const override {
1422  return getAssumed() ? "nonnull" : "may-null";
1423  }
1424 };
1425 
1426 /// NonNull attribute for a floating value.
1429 
1430  /// See AbstractAttribute::initialize(...).
1431  void initialize(Attributor &A) override {
1433 
1434  if (isAtFixpoint())
1435  return;
1436 
1437  const IRPosition &IRP = getIRPosition();
1438  const Value &V = IRP.getAssociatedValue();
1439  const DataLayout &DL = A.getDataLayout();
1440 
1441  // TODO: This context sensitive query should be removed once we can do
1442  // context sensitive queries in the genericValueTraversal below.
1443  if (isKnownNonZero(&V, DL, 0, /* TODO: AC */ nullptr, IRP.getCtxI(),
1444  /* TODO: DT */ nullptr))
1445  indicateOptimisticFixpoint();
1446  }
1447 
1448  /// See AbstractAttribute::updateImpl(...).
1450  const DataLayout &DL = A.getDataLayout();
1451 
1452  auto VisitValueCB = [&](Value &V, AAAlign::StateType &T,
1453  bool Stripped) -> bool {
1454  const auto &AA = A.getAAFor<AANonNull>(*this, IRPosition::value(V));
1455  if (!Stripped && this == &AA) {
1456  if (!isKnownNonZero(&V, DL, 0, /* TODO: AC */ nullptr,
1457  /* TODO: CtxI */ nullptr,
1458  /* TODO: DT */ nullptr))
1460  } else {
1461  // Use abstract attribute information.
1462  const AANonNull::StateType &NS =
1463  static_cast<const AANonNull::StateType &>(AA.getState());
1464  T ^= NS;
1465  }
1466  return T.isValidState();
1467  };
1468 
1469  StateType T;
1470  if (!genericValueTraversal<AANonNull, StateType>(A, getIRPosition(), *this,
1471  T, VisitValueCB))
1472  return indicatePessimisticFixpoint();
1473 
1475  }
1476 
1477  /// See AbstractAttribute::trackStatistics()
1478  void trackStatistics() const override { STATS_DECLTRACK_FNRET_ATTR(nonnull) }
1479 };
1480 
1481 /// NonNull attribute for function return value.
1482 struct AANonNullReturned final
1483  : AAReturnedFromReturnedValues<AANonNull, AANonNullImpl> {
1486 
1487  /// See AbstractAttribute::trackStatistics()
1488  void trackStatistics() const override { STATS_DECLTRACK_FNRET_ATTR(nonnull) }
1489 };
1490 
1491 /// NonNull attribute for function argument.
1492 struct AANonNullArgument final
1493  : AAArgumentFromCallSiteArguments<AANonNull, AANonNullImpl> {
1496 
1497  /// See AbstractAttribute::trackStatistics()
1498  void trackStatistics() const override { STATS_DECLTRACK_ARG_ATTR(nonnull) }
1499 };
1500 
1503 
1504  /// See AbstractAttribute::trackStatistics()
1505  void trackStatistics() const override { STATS_DECLTRACK_CSARG_ATTR(nonnull) }
1506 };
1507 
1508 /// NonNull attribute for a call site return position.
1510  : AACallSiteReturnedFromReturned<AANonNull, AANonNullImpl> {
1513 
1514  /// See AbstractAttribute::trackStatistics()
1515  void trackStatistics() const override { STATS_DECLTRACK_CSRET_ATTR(nonnull) }
1516 };
1517 
1518 /// ------------------------ No-Recurse Attributes ----------------------------
1519 
1520 struct AANoRecurseImpl : public AANoRecurse {
1522 
1523  /// See AbstractAttribute::getAsStr()
1524  const std::string getAsStr() const override {
1525  return getAssumed() ? "norecurse" : "may-recurse";
1526  }
1527 };
1528 
1531 
1532  /// See AbstractAttribute::updateImpl(...).
1534  // TODO: Implement this.
1535  return indicatePessimisticFixpoint();
1536  }
1537 
1538  void trackStatistics() const override { STATS_DECLTRACK_FN_ATTR(norecurse) }
1539 };
1540 
1541 /// NoRecurse attribute deduction for a call sites.
1544 
1545  /// See AbstractAttribute::initialize(...).
1546  void initialize(Attributor &A) override {
1548  Function *F = getAssociatedFunction();
1549  if (!F)
1550  indicatePessimisticFixpoint();
1551  }
1552 
1553  /// See AbstractAttribute::updateImpl(...).
1555  // TODO: Once we have call site specific value information we can provide
1556  // call site specific liveness information and then it makes
1557  // sense to specialize attributes for call sites arguments instead of
1558  // redirecting requests to the callee argument.
1559  Function *F = getAssociatedFunction();
1560  const IRPosition &FnPos = IRPosition::function(*F);
1561  auto &FnAA = A.getAAFor<AANoRecurse>(*this, FnPos);
1563  getState(),
1564  static_cast<const AANoRecurse::StateType &>(FnAA.getState()));
1565  }
1566 
1567  /// See AbstractAttribute::trackStatistics()
1568  void trackStatistics() const override { STATS_DECLTRACK_CS_ATTR(norecurse); }
1569 };
1570 
1571 /// ------------------------ Will-Return Attributes ----------------------------
1572 
1573 // Helper function that checks whether a function has any cycle.
1574 // TODO: Replace with more efficent code
1575 static bool containsCycle(Function &F) {
1577 
1578  // Traverse BB by dfs and check whether successor is already visited.
1579  for (BasicBlock *BB : depth_first(&F)) {
1580  Visited.insert(BB);
1581  for (auto *SuccBB : successors(BB)) {
1582  if (Visited.count(SuccBB))
1583  return true;
1584  }
1585  }
1586  return false;
1587 }
1588 
1589 // Helper function that checks the function have a loop which might become an
1590 // endless loop
1591 // FIXME: Any cycle is regarded as endless loop for now.
1592 // We have to allow some patterns.
1594  return !F || !F->hasExactDefinition() || containsCycle(*F);
1595 }
1596 
1599 
1600  /// See AbstractAttribute::initialize(...).
1601  void initialize(Attributor &A) override {
1603 
1604  Function *F = getAssociatedFunction();
1606  indicatePessimisticFixpoint();
1607  }
1608 
1609  /// See AbstractAttribute::updateImpl(...).
1611  auto CheckForWillReturn = [&](Instruction &I) {
1613  const auto &WillReturnAA = A.getAAFor<AAWillReturn>(*this, IPos);
1614  if (WillReturnAA.isKnownWillReturn())
1615  return true;
1616  if (!WillReturnAA.isAssumedWillReturn())
1617  return false;
1618  const auto &NoRecurseAA = A.getAAFor<AANoRecurse>(*this, IPos);
1619  return NoRecurseAA.isAssumedNoRecurse();
1620  };
1621 
1622  if (!A.checkForAllCallLikeInstructions(CheckForWillReturn, *this))
1623  return indicatePessimisticFixpoint();
1624 
1625  return ChangeStatus::UNCHANGED;
1626  }
1627 
1628  /// See AbstractAttribute::getAsStr()
1629  const std::string getAsStr() const override {
1630  return getAssumed() ? "willreturn" : "may-noreturn";
1631  }
1632 };
1633 
1636 
1637  /// See AbstractAttribute::trackStatistics()
1638  void trackStatistics() const override { STATS_DECLTRACK_FN_ATTR(willreturn) }
1639 };
1640 
1641 /// WillReturn attribute deduction for a call sites.
1644 
1645  /// See AbstractAttribute::initialize(...).
1646  void initialize(Attributor &A) override {
1648  Function *F = getAssociatedFunction();
1649  if (!F)
1650  indicatePessimisticFixpoint();
1651  }
1652 
1653  /// See AbstractAttribute::updateImpl(...).
1655  // TODO: Once we have call site specific value information we can provide
1656  // call site specific liveness information and then it makes
1657  // sense to specialize attributes for call sites arguments instead of
1658  // redirecting requests to the callee argument.
1659  Function *F = getAssociatedFunction();
1660  const IRPosition &FnPos = IRPosition::function(*F);
1661  auto &FnAA = A.getAAFor<AAWillReturn>(*this, FnPos);
1663  getState(),
1664  static_cast<const AAWillReturn::StateType &>(FnAA.getState()));
1665  }
1666 
1667  /// See AbstractAttribute::trackStatistics()
1668  void trackStatistics() const override { STATS_DECLTRACK_CS_ATTR(willreturn); }
1669 };
1670 
1671 /// ------------------------ NoAlias Argument Attribute ------------------------
1672 
1674  AANoAliasImpl(const IRPosition &IRP) : AANoAlias(IRP) {}
1675 
1676  const std::string getAsStr() const override {
1677  return getAssumed() ? "noalias" : "may-alias";
1678  }
1679 };
1680 
1681 /// NoAlias attribute for a floating value.
1684 
1685  /// See AbstractAttribute::initialize(...).
1686  void initialize(Attributor &A) override {
1688  if (isa<AllocaInst>(getAnchorValue()))
1689  indicateOptimisticFixpoint();
1690  }
1691 
1692  /// See AbstractAttribute::updateImpl(...).
1694  // TODO: Implement this.
1695  return indicatePessimisticFixpoint();
1696  }
1697 
1698  /// See AbstractAttribute::trackStatistics()
1699  void trackStatistics() const override {
1701  }
1702 };
1703 
1704 /// NoAlias attribute for an argument.
1705 struct AANoAliasArgument final
1706  : AAArgumentFromCallSiteArguments<AANoAlias, AANoAliasImpl> {
1709 
1710  /// See AbstractAttribute::trackStatistics()
1711  void trackStatistics() const override { STATS_DECLTRACK_ARG_ATTR(noalias) }
1712 };
1713 
1716 
1717  /// See AbstractAttribute::initialize(...).
1718  void initialize(Attributor &A) override {
1719  // See callsite argument attribute and callee argument attribute.
1720  ImmutableCallSite ICS(&getAnchorValue());
1721  if (ICS.paramHasAttr(getArgNo(), Attribute::NoAlias))
1722  indicateOptimisticFixpoint();
1723  }
1724 
1725  /// See AbstractAttribute::updateImpl(...).
1727  // We can deduce "noalias" if the following conditions hold.
1728  // (i) Associated value is assumed to be noalias in the definition.
1729  // (ii) Associated value is assumed to be no-capture in all the uses
1730  // possibly executed before this callsite.
1731  // (iii) There is no other pointer argument which could alias with the
1732  // value.
1733 
1734  const Value &V = getAssociatedValue();
1735  const IRPosition IRP = IRPosition::value(V);
1736 
1737  // (i) Check whether noalias holds in the definition.
1738 
1739  auto &NoAliasAA = A.getAAFor<AANoAlias>(*this, IRP);
1740 
1741  if (!NoAliasAA.isAssumedNoAlias())
1742  return indicatePessimisticFixpoint();
1743 
1744  LLVM_DEBUG(dbgs() << "[Attributor][AANoAliasCSArg] " << V
1745  << " is assumed NoAlias in the definition\n");
1746 
1747  // (ii) Check whether the value is captured in the scope using AANoCapture.
1748  // FIXME: This is conservative though, it is better to look at CFG and
1749  // check only uses possibly executed before this callsite.
1750 
1751  auto &NoCaptureAA = A.getAAFor<AANoCapture>(*this, IRP);
1752  if (!NoCaptureAA.isAssumedNoCaptureMaybeReturned())
1753  return indicatePessimisticFixpoint();
1754 
1755  // (iii) Check there is no other pointer argument which could alias with the
1756  // value.
1757  ImmutableCallSite ICS(&getAnchorValue());
1758  for (unsigned i = 0; i < ICS.getNumArgOperands(); i++) {
1759  if (getArgNo() == (int)i)
1760  continue;
1761  const Value *ArgOp = ICS.getArgOperand(i);
1762  if (!ArgOp->getType()->isPointerTy())
1763  continue;
1764 
1765  if (const Function *F = getAnchorScope()) {
1766  if (AAResults *AAR = A.getInfoCache().getAAResultsForFunction(*F)) {
1767  LLVM_DEBUG(dbgs()
1768  << "[Attributor][NoAliasCSArg] Check alias between "
1769  "callsite arguments "
1770  << AAR->isNoAlias(&getAssociatedValue(), ArgOp) << " "
1771  << getAssociatedValue() << " " << *ArgOp << "\n");
1772 
1773  if (AAR->isNoAlias(&getAssociatedValue(), ArgOp))
1774  continue;
1775  }
1776  }
1777  return indicatePessimisticFixpoint();
1778  }
1779 
1780  return ChangeStatus::UNCHANGED;
1781  }
1782 
1783  /// See AbstractAttribute::trackStatistics()
1784  void trackStatistics() const override { STATS_DECLTRACK_CSARG_ATTR(noalias) }
1785 };
1786 
1787 /// NoAlias attribute for function return value.
1790 
1791  /// See AbstractAttribute::updateImpl(...).
1792  virtual ChangeStatus updateImpl(Attributor &A) override {
1793 
1794  auto CheckReturnValue = [&](Value &RV) -> bool {
1795  if (Constant *C = dyn_cast<Constant>(&RV))
1796  if (C->isNullValue() || isa<UndefValue>(C))
1797  return true;
1798 
1799  /// For now, we can only deduce noalias if we have call sites.
1800  /// FIXME: add more support.
1801  ImmutableCallSite ICS(&RV);
1802  if (!ICS)
1803  return false;
1804 
1805  const IRPosition &RVPos = IRPosition::value(RV);
1806  const auto &NoAliasAA = A.getAAFor<AANoAlias>(*this, RVPos);
1807  if (!NoAliasAA.isAssumedNoAlias())
1808  return false;
1809 
1810  const auto &NoCaptureAA = A.getAAFor<AANoCapture>(*this, RVPos);
1811  return NoCaptureAA.isAssumedNoCaptureMaybeReturned();
1812  };
1813 
1814  if (!A.checkForAllReturnedValues(CheckReturnValue, *this))
1815  return indicatePessimisticFixpoint();
1816 
1817  return ChangeStatus::UNCHANGED;
1818  }
1819 
1820  /// See AbstractAttribute::trackStatistics()
1821  void trackStatistics() const override { STATS_DECLTRACK_FNRET_ATTR(noalias) }
1822 };
1823 
1824 /// NoAlias attribute deduction for a call site return value.
1827 
1828  /// See AbstractAttribute::initialize(...).
1829  void initialize(Attributor &A) override {
1831  Function *F = getAssociatedFunction();
1832  if (!F)
1833  indicatePessimisticFixpoint();
1834  }
1835 
1836  /// See AbstractAttribute::updateImpl(...).
1838  // TODO: Once we have call site specific value information we can provide
1839  // call site specific liveness information and then it makes
1840  // sense to specialize attributes for call sites arguments instead of
1841  // redirecting requests to the callee argument.
1842  Function *F = getAssociatedFunction();
1843  const IRPosition &FnPos = IRPosition::returned(*F);
1844  auto &FnAA = A.getAAFor<AANoAlias>(*this, FnPos);
1846  getState(), static_cast<const AANoAlias::StateType &>(FnAA.getState()));
1847  }
1848 
1849  /// See AbstractAttribute::trackStatistics()
1850  void trackStatistics() const override { STATS_DECLTRACK_CSRET_ATTR(noalias); }
1851 };
1852 
1853 /// -------------------AAIsDead Function Attribute-----------------------
1854 
1855 struct AAIsDeadImpl : public AAIsDead {
1856  AAIsDeadImpl(const IRPosition &IRP) : AAIsDead(IRP) {}
1857 
1858  void initialize(Attributor &A) override {
1859  const Function *F = getAssociatedFunction();
1860  if (F && !F->isDeclaration())
1861  exploreFromEntry(A, F);
1862  }
1863 
1865  ToBeExploredPaths.insert(&(F->getEntryBlock().front()));
1866  assumeLive(A, F->getEntryBlock());
1867 
1868  for (size_t i = 0; i < ToBeExploredPaths.size(); ++i)
1869  if (const Instruction *NextNoReturnI =
1870  findNextNoReturn(A, ToBeExploredPaths[i]))
1871  NoReturnCalls.insert(NextNoReturnI);
1872  }
1873 
1874  /// Find the next assumed noreturn instruction in the block of \p I starting
1875  /// from, thus including, \p I.
1876  ///
1877  /// The caller is responsible to monitor the ToBeExploredPaths set as new
1878  /// instructions discovered in other basic block will be placed in there.
1879  ///
1880  /// \returns The next assumed noreturn instructions in the block of \p I
1881  /// starting from, thus including, \p I.
1882  const Instruction *findNextNoReturn(Attributor &A, const Instruction *I);
1883 
1884  /// See AbstractAttribute::getAsStr().
1885  const std::string getAsStr() const override {
1886  return "Live[#BB " + std::to_string(AssumedLiveBlocks.size()) + "/" +
1887  std::to_string(getAssociatedFunction()->size()) + "][#NRI " +
1888  std::to_string(NoReturnCalls.size()) + "]";
1889  }
1890 
1891  /// See AbstractAttribute::manifest(...).
1893  assert(getState().isValidState() &&
1894  "Attempted to manifest an invalid state!");
1895 
1897  Function &F = *getAssociatedFunction();
1898 
1899  if (AssumedLiveBlocks.empty()) {
1900  A.deleteAfterManifest(F);
1901  return ChangeStatus::CHANGED;
1902  }
1903 
1904  // Flag to determine if we can change an invoke to a call assuming the
1905  // callee is nounwind. This is not possible if the personality of the
1906  // function allows to catch asynchronous exceptions.
1907  bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(F);
1908 
1909  for (const Instruction *NRC : NoReturnCalls) {
1910  Instruction *I = const_cast<Instruction *>(NRC);
1911  BasicBlock *BB = I->getParent();
1912  Instruction *SplitPos = I->getNextNode();
1913  // TODO: mark stuff before unreachable instructions as dead.
1914  if (isa_and_nonnull<UnreachableInst>(SplitPos))
1915  continue;
1916 
1917  if (auto *II = dyn_cast<InvokeInst>(I)) {
1918  // If we keep the invoke the split position is at the beginning of the
1919  // normal desitination block (it invokes a noreturn function after all).
1920  BasicBlock *NormalDestBB = II->getNormalDest();
1921  SplitPos = &NormalDestBB->front();
1922 
1923  /// Invoke is replaced with a call and unreachable is placed after it if
1924  /// the callee is nounwind and noreturn. Otherwise, we keep the invoke
1925  /// and only place an unreachable in the normal successor.
1926  if (Invoke2CallAllowed) {
1927  if (II->getCalledFunction()) {
1928  const IRPosition &IPos = IRPosition::callsite_function(*II);
1929  const auto &AANoUnw = A.getAAFor<AANoUnwind>(*this, IPos);
1930  if (AANoUnw.isAssumedNoUnwind()) {
1931  LLVM_DEBUG(dbgs()
1932  << "[AAIsDead] Replace invoke with call inst\n");
1933  // We do not need an invoke (II) but instead want a call followed
1934  // by an unreachable. However, we do not remove II as other
1935  // abstract attributes might have it cached as part of their
1936  // results. Given that we modify the CFG anyway, we simply keep II
1937  // around but in a new dead block. To avoid II being live through
1938  // a different edge we have to ensure the block we place it in is
1939  // only reached from the current block of II and then not reached
1940  // at all when we insert the unreachable.
1941  SplitBlockPredecessors(NormalDestBB, {BB}, ".i2c");
1943  CI->insertBefore(II);
1944  CI->takeName(II);
1945  II->replaceAllUsesWith(CI);
1946  SplitPos = CI->getNextNode();
1947  }
1948  }
1949  }
1950 
1951  if (SplitPos == &NormalDestBB->front()) {
1952  // If this is an invoke of a noreturn function the edge to the normal
1953  // destination block is dead but not necessarily the block itself.
1954  // TODO: We need to move to an edge based system during deduction and
1955  // also manifest.
1956  assert(!NormalDestBB->isLandingPad() &&
1957  "Expected the normal destination not to be a landingpad!");
1958  BasicBlock *SplitBB =
1959  SplitBlockPredecessors(NormalDestBB, {BB}, ".dead");
1960  // The split block is live even if it contains only an unreachable
1961  // instruction at the end.
1962  assumeLive(A, *SplitBB);
1963  SplitPos = SplitBB->getTerminator();
1964  }
1965  }
1966 
1967  BB = SplitPos->getParent();
1968  SplitBlock(BB, SplitPos);
1969  changeToUnreachable(BB->getTerminator(), /* UseLLVMTrap */ false);
1970  HasChanged = ChangeStatus::CHANGED;
1971  }
1972 
1973  for (BasicBlock &BB : F)
1974  if (!AssumedLiveBlocks.count(&BB))
1975  A.deleteAfterManifest(BB);
1976 
1977  return HasChanged;
1978  }
1979 
1980  /// See AbstractAttribute::updateImpl(...).
1981  ChangeStatus updateImpl(Attributor &A) override;
1982 
1983  /// See AAIsDead::isAssumedDead(BasicBlock *).
1984  bool isAssumedDead(const BasicBlock *BB) const override {
1985  assert(BB->getParent() == getAssociatedFunction() &&
1986  "BB must be in the same anchor scope function.");
1987 
1988  if (!getAssumed())
1989  return false;
1990  return !AssumedLiveBlocks.count(BB);
1991  }
1992 
1993  /// See AAIsDead::isKnownDead(BasicBlock *).
1994  bool isKnownDead(const BasicBlock *BB) const override {
1995  return getKnown() && isAssumedDead(BB);
1996  }
1997 
1998  /// See AAIsDead::isAssumed(Instruction *I).
1999  bool isAssumedDead(const Instruction *I) const override {
2000  assert(I->getParent()->getParent() == getAssociatedFunction() &&
2001  "Instruction must be in the same anchor scope function.");
2002 
2003  if (!getAssumed())
2004  return false;
2005 
2006  // If it is not in AssumedLiveBlocks then it for sure dead.
2007  // Otherwise, it can still be after noreturn call in a live block.
2008  if (!AssumedLiveBlocks.count(I->getParent()))
2009  return true;
2010 
2011  // If it is not after a noreturn call, than it is live.
2012  return isAfterNoReturn(I);
2013  }
2014 
2015  /// See AAIsDead::isKnownDead(Instruction *I).
2016  bool isKnownDead(const Instruction *I) const override {
2017  return getKnown() && isAssumedDead(I);
2018  }
2019 
2020  /// Check if instruction is after noreturn call, in other words, assumed dead.
2021  bool isAfterNoReturn(const Instruction *I) const;
2022 
2023  /// Determine if \p F might catch asynchronous exceptions.
2025  return F.hasPersonalityFn() && !canSimplifyInvokeNoUnwind(&F);
2026  }
2027 
2028  /// Assume \p BB is (partially) live now and indicate to the Attributor \p A
2029  /// that internal function called from \p BB should now be looked at.
2030  void assumeLive(Attributor &A, const BasicBlock &BB) {
2031  if (!AssumedLiveBlocks.insert(&BB).second)
2032  return;
2033 
2034  // We assume that all of BB is (probably) live now and if there are calls to
2035  // internal functions we will assume that those are now live as well. This
2036  // is a performance optimization for blocks with calls to a lot of internal
2037  // functions. It can however cause dead functions to be treated as live.
2038  for (const Instruction &I : BB)
2039  if (ImmutableCallSite ICS = ImmutableCallSite(&I))
2040  if (const Function *F = ICS.getCalledFunction())
2041  if (F->hasInternalLinkage())
2043  }
2044 
2045  /// Collection of to be explored paths.
2047 
2048  /// Collection of all assumed live BasicBlocks.
2050 
2051  /// Collection of calls with noreturn attribute, assumed or knwon.
2053 };
2054 
2055 struct AAIsDeadFunction final : public AAIsDeadImpl {
2057 
2058  /// See AbstractAttribute::trackStatistics()
2059  void trackStatistics() const override {
2060  STATS_DECL(PartiallyDeadBlocks, Function,
2061  "Number of basic blocks classified as partially dead");
2062  BUILD_STAT_NAME(PartiallyDeadBlocks, Function) += NoReturnCalls.size();
2063  }
2064 };
2065 
2067  const Instruction *PrevI = I->getPrevNode();
2068  while (PrevI) {
2069  if (NoReturnCalls.count(PrevI))
2070  return true;
2071  PrevI = PrevI->getPrevNode();
2072  }
2073  return false;
2074 }
2075 
2077  const Instruction *I) {
2078  const BasicBlock *BB = I->getParent();
2079  const Function &F = *BB->getParent();
2080 
2081  // Flag to determine if we can change an invoke to a call assuming the callee
2082  // is nounwind. This is not possible if the personality of the function allows
2083  // to catch asynchronous exceptions.
2084  bool Invoke2CallAllowed = !mayCatchAsynchronousExceptions(F);
2085 
2086  // TODO: We should have a function that determines if an "edge" is dead.
2087  // Edges could be from an instruction to the next or from a terminator
2088  // to the successor. For now, we need to special case the unwind block
2089  // of InvokeInst below.
2090 
2091  while (I) {
2092  ImmutableCallSite ICS(I);
2093 
2094  if (ICS) {
2095  const IRPosition &IPos = IRPosition::callsite_function(ICS);
2096  // Regarless of the no-return property of an invoke instruction we only
2097  // learn that the regular successor is not reachable through this
2098  // instruction but the unwind block might still be.
2099  if (auto *Invoke = dyn_cast<InvokeInst>(I)) {
2100  // Use nounwind to justify the unwind block is dead as well.
2101  const auto &AANoUnw = A.getAAFor<AANoUnwind>(*this, IPos);
2102  if (!Invoke2CallAllowed || !AANoUnw.isAssumedNoUnwind()) {
2103  assumeLive(A, *Invoke->getUnwindDest());
2104  ToBeExploredPaths.insert(&Invoke->getUnwindDest()->front());
2105  }
2106  }
2107 
2108  const auto &NoReturnAA = A.getAAFor<AANoReturn>(*this, IPos);
2109  if (NoReturnAA.isAssumedNoReturn())
2110  return I;
2111  }
2112 
2113  I = I->getNextNode();
2114  }
2115 
2116  // get new paths (reachable blocks).
2117  for (const BasicBlock *SuccBB : successors(BB)) {
2118  assumeLive(A, *SuccBB);
2119  ToBeExploredPaths.insert(&SuccBB->front());
2120  }
2121 
2122  // No noreturn instruction found.
2123  return nullptr;
2124 }
2125 
2128 
2129  // Temporary collection to iterate over existing noreturn instructions. This
2130  // will alow easier modification of NoReturnCalls collection
2131  SmallVector<const Instruction *, 8> NoReturnChanged;
2132 
2133  for (const Instruction *I : NoReturnCalls)
2134  NoReturnChanged.push_back(I);
2135 
2136  for (const Instruction *I : NoReturnChanged) {
2137  size_t Size = ToBeExploredPaths.size();
2138 
2139  const Instruction *NextNoReturnI = findNextNoReturn(A, I);
2140  if (NextNoReturnI != I) {
2141  Status = ChangeStatus::CHANGED;
2142  NoReturnCalls.remove(I);
2143  if (NextNoReturnI)
2144  NoReturnCalls.insert(NextNoReturnI);
2145  }
2146 
2147  // Explore new paths.
2148  while (Size != ToBeExploredPaths.size()) {
2149  Status = ChangeStatus::CHANGED;
2150  if (const Instruction *NextNoReturnI =
2151  findNextNoReturn(A, ToBeExploredPaths[Size++]))
2152  NoReturnCalls.insert(NextNoReturnI);
2153  }
2154  }
2155 
2156  LLVM_DEBUG(dbgs() << "[AAIsDead] AssumedLiveBlocks: "
2157  << AssumedLiveBlocks.size() << " Total number of blocks: "
2158  << getAssociatedFunction()->size() << "\n");
2159 
2160  // If we know everything is live there is no need to query for liveness.
2161  if (NoReturnCalls.empty() &&
2162  getAssociatedFunction()->size() == AssumedLiveBlocks.size()) {
2163  // Indicating a pessimistic fixpoint will cause the state to be "invalid"
2164  // which will cause the Attributor to not return the AAIsDead on request,
2165  // which will prevent us from querying isAssumedDead().
2166  indicatePessimisticFixpoint();
2167  assert(!isValidState() && "Expected an invalid state!");
2168  Status = ChangeStatus::CHANGED;
2169  }
2170 
2171  return Status;
2172 }
2173 
2174 /// Liveness information for a call sites.
2177 
2178  /// See AbstractAttribute::initialize(...).
2179  void initialize(Attributor &A) override {
2180  // TODO: Once we have call site specific value information we can provide
2181  // call site specific liveness information and then it makes
2182  // sense to specialize attributes for call sites instead of
2183  // redirecting requests to the callee.
2184  llvm_unreachable("Abstract attributes for liveness are not "
2185  "supported for call sites yet!");
2186  }
2187 
2188  /// See AbstractAttribute::updateImpl(...).
2190  return indicatePessimisticFixpoint();
2191  }
2192 
2193  /// See AbstractAttribute::trackStatistics()
2194  void trackStatistics() const override {}
2195 };
2196 
2197 /// -------------------- Dereferenceable Argument Attribute --------------------
2198 
2199 template <>
2201  const DerefState &R) {
2203  S.DerefBytesState, R.DerefBytesState);
2204  ChangeStatus CS1 =
2205  clampStateAndIndicateChange<IntegerState>(S.GlobalState, R.GlobalState);
2206  return CS0 | CS1;
2207 }
2208 
2211  using StateType = DerefState;
2212 
2213  void initialize(Attributor &A) override {
2215  getAttrs({Attribute::Dereferenceable, Attribute::DereferenceableOrNull},
2216  Attrs);
2217  for (const Attribute &Attr : Attrs)
2218  takeKnownDerefBytesMaximum(Attr.getValueAsInt());
2219 
2220  NonNullAA = &A.getAAFor<AANonNull>(*this, getIRPosition());
2221 
2222  const IRPosition &IRP = this->getIRPosition();
2223  bool IsFnInterface = IRP.isFnInterfaceKind();
2224  const Function *FnScope = IRP.getAnchorScope();
2225  if (IsFnInterface && (!FnScope || !FnScope->hasExactDefinition()))
2226  indicatePessimisticFixpoint();
2227  }
2228 
2229  /// See AbstractAttribute::getState()
2230  /// {
2231  StateType &getState() override { return *this; }
2232  const StateType &getState() const override { return *this; }
2233  /// }
2234 
2236  SmallVectorImpl<Attribute> &Attrs) const override {
2237  // TODO: Add *_globally support
2238  if (isAssumedNonNull())
2240  Ctx, getAssumedDereferenceableBytes()));
2241  else
2243  Ctx, getAssumedDereferenceableBytes()));
2244  }
2245 
2246  /// See AbstractAttribute::getAsStr().
2247  const std::string getAsStr() const override {
2248  if (!getAssumedDereferenceableBytes())
2249  return "unknown-dereferenceable";
2250  return std::string("dereferenceable") +
2251  (isAssumedNonNull() ? "" : "_or_null") +
2252  (isAssumedGlobal() ? "_globally" : "") + "<" +
2253  std::to_string(getKnownDereferenceableBytes()) + "-" +
2254  std::to_string(getAssumedDereferenceableBytes()) + ">";
2255  }
2256 };
2257 
2258 /// Dereferenceable attribute for a floating value.
2261  : AADereferenceableImpl(IRP) {}
2262 
2263  /// See AbstractAttribute::updateImpl(...).
2265  const DataLayout &DL = A.getDataLayout();
2266 
2267  auto VisitValueCB = [&](Value &V, DerefState &T, bool Stripped) -> bool {
2268  unsigned IdxWidth =
2270  APInt Offset(IdxWidth, 0);
2271  const Value *Base =
2273 
2274  const auto &AA =
2275  A.getAAFor<AADereferenceable>(*this, IRPosition::value(*Base));
2276  int64_t DerefBytes = 0;
2277  if (!Stripped && this == &AA) {
2278  // Use IR information if we did not strip anything.
2279  // TODO: track globally.
2280  bool CanBeNull;
2281  DerefBytes = Base->getPointerDereferenceableBytes(DL, CanBeNull);
2282  T.GlobalState.indicatePessimisticFixpoint();
2283  } else {
2284  const DerefState &DS = static_cast<const DerefState &>(AA.getState());
2285  DerefBytes = DS.DerefBytesState.getAssumed();
2286  T.GlobalState &= DS.GlobalState;
2287  }
2288 
2289  // For now we do not try to "increase" dereferenceability due to negative
2290  // indices as we first have to come up with code to deal with loops and
2291  // for overflows of the dereferenceable bytes.
2292  int64_t OffsetSExt = Offset.getSExtValue();
2293  if (OffsetSExt < 0)
2294  Offset = 0;
2295 
2296  T.takeAssumedDerefBytesMinimum(
2297  std::max(int64_t(0), DerefBytes - OffsetSExt));
2298 
2299  if (this == &AA) {
2300  if (!Stripped) {
2301  // If nothing was stripped IR information is all we got.
2302  T.takeKnownDerefBytesMaximum(
2303  std::max(int64_t(0), DerefBytes - OffsetSExt));
2304  T.indicatePessimisticFixpoint();
2305  } else if (OffsetSExt > 0) {
2306  // If something was stripped but there is circular reasoning we look
2307  // for the offset. If it is positive we basically decrease the
2308  // dereferenceable bytes in a circluar loop now, which will simply
2309  // drive them down to the known value in a very slow way which we
2310  // can accelerate.
2311  T.indicatePessimisticFixpoint();
2312  }
2313  }
2314 
2315  return T.isValidState();
2316  };
2317 
2318  DerefState T;
2319  if (!genericValueTraversal<AADereferenceable, DerefState>(
2320  A, getIRPosition(), *this, T, VisitValueCB))
2321  return indicatePessimisticFixpoint();
2322 
2324  }
2325 
2326  /// See AbstractAttribute::trackStatistics()
2327  void trackStatistics() const override {
2328  STATS_DECLTRACK_FLOATING_ATTR(dereferenceable)
2329  }
2330 };
2331 
2332 /// Dereferenceable attribute for a return value.
2334  : AAReturnedFromReturnedValues<AADereferenceable, AADereferenceableImpl,
2335  DerefState> {
2338  DerefState>(IRP) {}
2339 
2340  /// See AbstractAttribute::trackStatistics()
2341  void trackStatistics() const override {
2342  STATS_DECLTRACK_FNRET_ATTR(dereferenceable)
2343  }
2344 };
2345 
2346 /// Dereferenceable attribute for an argument
2348  : AAArgumentFromCallSiteArguments<AADereferenceable, AADereferenceableImpl,
2349  DerefState> {
2353  IRP) {}
2354 
2355  /// See AbstractAttribute::trackStatistics()
2356  void trackStatistics() const override {
2357  STATS_DECLTRACK_ARG_ATTR(dereferenceable)
2358  }
2359 };
2360 
2361 /// Dereferenceable attribute for a call site argument.
2364  : AADereferenceableFloating(IRP) {}
2365 
2366  /// See AbstractAttribute::trackStatistics()
2367  void trackStatistics() const override {
2368  STATS_DECLTRACK_CSARG_ATTR(dereferenceable)
2369  }
2370 };
2371 
2372 /// Dereferenceable attribute deduction for a call site return value.
2375  : AADereferenceableImpl(IRP) {}
2376 
2377  /// See AbstractAttribute::initialize(...).
2378  void initialize(Attributor &A) override {
2380  Function *F = getAssociatedFunction();
2381  if (!F)
2382  indicatePessimisticFixpoint();
2383  }
2384 
2385  /// See AbstractAttribute::updateImpl(...).
2387  // TODO: Once we have call site specific value information we can provide
2388  // call site specific liveness information and then it makes
2389  // sense to specialize attributes for call sites arguments instead of
2390  // redirecting requests to the callee argument.
2391  Function *F = getAssociatedFunction();
2392  const IRPosition &FnPos = IRPosition::returned(*F);
2393  auto &FnAA = A.getAAFor<AADereferenceable>(*this, FnPos);
2395  getState(), static_cast<const DerefState &>(FnAA.getState()));
2396  }
2397 
2398  /// See AbstractAttribute::trackStatistics()
2399  void trackStatistics() const override {
2400  STATS_DECLTRACK_CS_ATTR(dereferenceable);
2401  }
2402 };
2403 
2404 // ------------------------ Align Argument Attribute ------------------------
2405 
2407  AAAlignImpl(const IRPosition &IRP) : AAAlign(IRP) {}
2408 
2409  // Max alignemnt value allowed in IR
2410  static const unsigned MAX_ALIGN = 1U << 29;
2411 
2412  /// See AbstractAttribute::initialize(...).
2413  void initialize(Attributor &A) override {
2414  takeAssumedMinimum(MAX_ALIGN);
2415 
2417  getAttrs({Attribute::Alignment}, Attrs);
2418  for (const Attribute &Attr : Attrs)
2419  takeKnownMaximum(Attr.getValueAsInt());
2420 
2421  if (getIRPosition().isFnInterfaceKind() &&
2422  (!getAssociatedFunction() ||
2423  !getAssociatedFunction()->hasExactDefinition()))
2424  indicatePessimisticFixpoint();
2425  }
2426 
2427  /// See AbstractAttribute::manifest(...).
2430 
2431  // Check for users that allow alignment annotations.
2432  Value &AnchorVal = getIRPosition().getAnchorValue();
2433  for (const Use &U : AnchorVal.uses()) {
2434  if (auto *SI = dyn_cast<StoreInst>(U.getUser())) {
2435  if (SI->getPointerOperand() == &AnchorVal)
2436  if (SI->getAlignment() < getAssumedAlign()) {
2438  "Number of times alignemnt added to a store");
2439  SI->setAlignment(getAssumedAlign());
2440  Changed = ChangeStatus::CHANGED;
2441  }
2442  } else if (auto *LI = dyn_cast<LoadInst>(U.getUser())) {
2443  if (LI->getPointerOperand() == &AnchorVal)
2444  if (LI->getAlignment() < getAssumedAlign()) {
2445  LI->setAlignment(getAssumedAlign());
2447  "Number of times alignemnt added to a load");
2448  Changed = ChangeStatus::CHANGED;
2449  }
2450  }
2451  }
2452 
2453  return AAAlign::manifest(A) | Changed;
2454  }
2455 
2456  // TODO: Provide a helper to determine the implied ABI alignment and check in
2457  // the existing manifest method and a new one for AAAlignImpl that value
2458  // to avoid making the alignment explicit if it did not improve.
2459 
2460  /// See AbstractAttribute::getDeducedAttributes
2461  virtual void
2463  SmallVectorImpl<Attribute> &Attrs) const override {
2464  if (getAssumedAlign() > 1)
2465  Attrs.emplace_back(Attribute::getWithAlignment(Ctx, getAssumedAlign()));
2466  }
2467 
2468  /// See AbstractAttribute::getAsStr().
2469  const std::string getAsStr() const override {
2470  return getAssumedAlign() ? ("align<" + std::to_string(getKnownAlign()) +
2471  "-" + std::to_string(getAssumedAlign()) + ">")
2472  : "unknown-align";
2473  }
2474 };
2475 
2476 /// Align attribute for a floating value.
2479 
2480  /// See AbstractAttribute::updateImpl(...).
2482  const DataLayout &DL = A.getDataLayout();
2483 
2484  auto VisitValueCB = [&](Value &V, AAAlign::StateType &T,
2485  bool Stripped) -> bool {
2486  const auto &AA = A.getAAFor<AAAlign>(*this, IRPosition::value(V));
2487  if (!Stripped && this == &AA) {
2488  // Use only IR information if we did not strip anything.
2491  } else {
2492  // Use abstract attribute information.
2493  const AAAlign::StateType &DS =
2494  static_cast<const AAAlign::StateType &>(AA.getState());
2495  T ^= DS;
2496  }
2497  return T.isValidState();
2498  };
2499 
2500  StateType T;
2501  if (!genericValueTraversal<AAAlign, StateType>(A, getIRPosition(), *this, T,
2502  VisitValueCB))
2503  return indicatePessimisticFixpoint();
2504 
2505  // TODO: If we know we visited all incoming values, thus no are assumed
2506  // dead, we can take the known information from the state T.
2508  }
2509 
2510  /// See AbstractAttribute::trackStatistics()
2511  void trackStatistics() const override { STATS_DECLTRACK_FLOATING_ATTR(align) }
2512 };
2513 
2514 /// Align attribute for function return value.
2515 struct AAAlignReturned final
2516  : AAReturnedFromReturnedValues<AAAlign, AAAlignImpl> {
2519 
2520  /// See AbstractAttribute::trackStatistics()
2522 };
2523 
2524 /// Align attribute for function argument.
2525 struct AAAlignArgument final
2526  : AAArgumentFromCallSiteArguments<AAAlign, AAAlignImpl> {
2529 
2530  /// See AbstractAttribute::trackStatistics()
2532 };
2533 
2536 
2537  /// See AbstractAttribute::manifest(...).
2539  return AAAlignImpl::manifest(A);
2540  }
2541 
2542  /// See AbstractAttribute::trackStatistics()
2544 };
2545 
2546 /// Align attribute deduction for a call site return value.
2549 
2550  /// See AbstractAttribute::initialize(...).
2551  void initialize(Attributor &A) override {
2553  Function *F = getAssociatedFunction();
2554  if (!F)
2555  indicatePessimisticFixpoint();
2556  }
2557 
2558  /// See AbstractAttribute::updateImpl(...).
2560  // TODO: Once we have call site specific value information we can provide
2561  // call site specific liveness information and then it makes
2562  // sense to specialize attributes for call sites arguments instead of
2563  // redirecting requests to the callee argument.
2564  Function *F = getAssociatedFunction();
2565  const IRPosition &FnPos = IRPosition::returned(*F);
2566  auto &FnAA = A.getAAFor<AAAlign>(*this, FnPos);
2568  getState(), static_cast<const AAAlign::StateType &>(FnAA.getState()));
2569  }
2570 
2571  /// See AbstractAttribute::trackStatistics()
2572  void trackStatistics() const override { STATS_DECLTRACK_CS_ATTR(align); }
2573 };
2574 
2575 /// ------------------ Function No-Return Attribute ----------------------------
2576 struct AANoReturnImpl : public AANoReturn {
2577  AANoReturnImpl(const IRPosition &IRP) : AANoReturn(IRP) {}
2578 
2579  /// See AbstractAttribute::getAsStr().
2580  const std::string getAsStr() const override {
2581  return getAssumed() ? "noreturn" : "may-return";
2582  }
2583 
2584  /// See AbstractAttribute::updateImpl(Attributor &A).
2585  virtual ChangeStatus updateImpl(Attributor &A) override {
2586  auto CheckForNoReturn = [](Instruction &) { return false; };
2587  if (!A.checkForAllInstructions(CheckForNoReturn, *this,
2588  {(unsigned)Instruction::Ret}))
2589  return indicatePessimisticFixpoint();
2590  return ChangeStatus::UNCHANGED;
2591  }
2592 };
2593 
2596 
2597  /// See AbstractAttribute::trackStatistics()
2598  void trackStatistics() const override { STATS_DECLTRACK_FN_ATTR(noreturn) }
2599 };
2600 
2601 /// NoReturn attribute deduction for a call sites.
2604 
2605  /// See AbstractAttribute::initialize(...).
2606  void initialize(Attributor &A) override {
2608  Function *F = getAssociatedFunction();
2609  if (!F)
2610  indicatePessimisticFixpoint();
2611  }
2612 
2613  /// See AbstractAttribute::updateImpl(...).
2615  // TODO: Once we have call site specific value information we can provide
2616  // call site specific liveness information and then it makes
2617  // sense to specialize attributes for call sites arguments instead of
2618  // redirecting requests to the callee argument.
2619  Function *F = getAssociatedFunction();
2620  const IRPosition &FnPos = IRPosition::function(*F);
2621  auto &FnAA = A.getAAFor<AANoReturn>(*this, FnPos);
2623  getState(),
2624  static_cast<const AANoReturn::StateType &>(FnAA.getState()));
2625  }
2626 
2627  /// See AbstractAttribute::trackStatistics()
2628  void trackStatistics() const override { STATS_DECLTRACK_CS_ATTR(noreturn); }
2629 };
2630 
2631 /// ----------------------- Variable Capturing ---------------------------------
2632 
2633 /// A class to hold the state of for no-capture attributes.
2634 struct AANoCaptureImpl : public AANoCapture {
2636 
2637  /// See AbstractAttribute::initialize(...).
2638  void initialize(Attributor &A) override {
2640 
2641  const IRPosition &IRP = getIRPosition();
2642  const Function *F =
2643  getArgNo() >= 0 ? IRP.getAssociatedFunction() : IRP.getAnchorScope();
2644 
2645  // Check what state the associated function can actually capture.
2646  if (F)
2647  determineFunctionCaptureCapabilities(*F, *this);
2648  else
2649  indicatePessimisticFixpoint();
2650  }
2651 
2652  /// See AbstractAttribute::updateImpl(...).
2653  ChangeStatus updateImpl(Attributor &A) override;
2654 
2655  /// see AbstractAttribute::isAssumedNoCaptureMaybeReturned(...).
2656  virtual void
2658  SmallVectorImpl<Attribute> &Attrs) const override {
2659  if (!isAssumedNoCaptureMaybeReturned())
2660  return;
2661 
2662  if (getArgNo() >= 0) {
2663  if (isAssumedNoCapture())
2664  Attrs.emplace_back(Attribute::get(Ctx, Attribute::NoCapture));
2665  else if (ManifestInternal)
2666  Attrs.emplace_back(Attribute::get(Ctx, "no-capture-maybe-returned"));
2667  }
2668  }
2669 
2670  /// Set the NOT_CAPTURED_IN_MEM and NOT_CAPTURED_IN_RET bits in \p Known
2671  /// depending on the ability of the function associated with \p IRP to capture
2672  /// state in memory and through "returning/throwing", respectively.
2674  IntegerState &State) {
2675  // TODO: Once we have memory behavior attributes we should use them here.
2676 
2677  // If we know we cannot communicate or write to memory, we do not care about
2678  // ptr2int anymore.
2679  if (F.onlyReadsMemory() && F.doesNotThrow() &&
2680  F.getReturnType()->isVoidTy()) {
2681  State.addKnownBits(NO_CAPTURE);
2682  return;
2683  }
2684 
2685  // A function cannot capture state in memory if it only reads memory, it can
2686  // however return/throw state and the state might be influenced by the
2687  // pointer value, e.g., loading from a returned pointer might reveal a bit.
2688  if (F.onlyReadsMemory())
2689  State.addKnownBits(NOT_CAPTURED_IN_MEM);
2690 
2691  // A function cannot communicate state back if it does not through
2692  // exceptions and doesn not return values.
2693  if (F.doesNotThrow() && F.getReturnType()->isVoidTy())
2694  State.addKnownBits(NOT_CAPTURED_IN_RET);
2695  }
2696 
2697  /// See AbstractState::getAsStr().
2698  const std::string getAsStr() const override {
2699  if (isKnownNoCapture())
2700  return "known not-captured";
2701  if (isAssumedNoCapture())
2702  return "assumed not-captured";
2703  if (isKnownNoCaptureMaybeReturned())
2704  return "known not-captured-maybe-returned";
2705  if (isAssumedNoCaptureMaybeReturned())
2706  return "assumed not-captured-maybe-returned";
2707  return "assumed-captured";
2708  }
2709 };
2710 
2711 /// Attributor-aware capture tracker.
2712 struct AACaptureUseTracker final : public CaptureTracker {
2713 
2714  /// Create a capture tracker that can lookup in-flight abstract attributes
2715  /// through the Attributor \p A.
2716  ///
2717  /// If a use leads to a potential capture, \p CapturedInMemory is set and the
2718  /// search is stopped. If a use leads to a return instruction,
2719  /// \p CommunicatedBack is set to true and \p CapturedInMemory is not changed.
2720  /// If a use leads to a ptr2int which may capture the value,
2721  /// \p CapturedInInteger is set. If a use is found that is currently assumed
2722  /// "no-capture-maybe-returned", the user is added to the \p PotentialCopies
2723  /// set. All values in \p PotentialCopies are later tracked as well. For every
2724  /// explored use we decrement \p RemainingUsesToExplore. Once it reaches 0,
2725  /// the search is stopped with \p CapturedInMemory and \p CapturedInInteger
2726  /// conservatively set to true.
2728  const AAIsDead &IsDeadAA, IntegerState &State,
2729  SmallVectorImpl<const Value *> &PotentialCopies,
2730  unsigned &RemainingUsesToExplore)
2731  : A(A), NoCaptureAA(NoCaptureAA), IsDeadAA(IsDeadAA), State(State),
2732  PotentialCopies(PotentialCopies),
2733  RemainingUsesToExplore(RemainingUsesToExplore) {}
2734 
2735  /// Determine if \p V maybe captured. *Also updates the state!*
2736  bool valueMayBeCaptured(const Value *V) {
2737  if (V->getType()->isPointerTy()) {
2738  PointerMayBeCaptured(V, this);
2739  } else {
2740  State.indicatePessimisticFixpoint();
2741  }
2742  return State.isAssumed(AANoCapture::NO_CAPTURE_MAYBE_RETURNED);
2743  }
2744 
2745  /// See CaptureTracker::tooManyUses().
2746  void tooManyUses() override {
2747  State.removeAssumedBits(AANoCapture::NO_CAPTURE);
2748  }
2749 
2750  bool isDereferenceableOrNull(Value *O, const DataLayout &DL) override {
2752  return true;
2753  const auto &DerefAA =
2754  A.getAAFor<AADereferenceable>(NoCaptureAA, IRPosition::value(*O));
2755  return DerefAA.getAssumedDereferenceableBytes();
2756  }
2757 
2758  /// See CaptureTracker::captured(...).
2759  bool captured(const Use *U) override {
2760  Instruction *UInst = cast<Instruction>(U->getUser());
2761  LLVM_DEBUG(dbgs() << "Check use: " << *U->get() << " in " << *UInst
2762  << "\n");
2763 
2764  // Because we may reuse the tracker multiple times we keep track of the
2765  // number of explored uses ourselves as well.
2766  if (RemainingUsesToExplore-- == 0) {
2767  LLVM_DEBUG(dbgs() << " - too many uses to explore!\n");
2768  return isCapturedIn(/* Memory */ true, /* Integer */ true,
2769  /* Return */ true);
2770  }
2771 
2772  // Deal with ptr2int by following uses.
2773  if (isa<PtrToIntInst>(UInst)) {
2774  LLVM_DEBUG(dbgs() << " - ptr2int assume the worst!\n");
2775  return valueMayBeCaptured(UInst);
2776  }
2777 
2778  // Explicitly catch return instructions.
2779  if (isa<ReturnInst>(UInst))
2780  return isCapturedIn(/* Memory */ false, /* Integer */ false,
2781  /* Return */ true);
2782 
2783  // For now we only use special logic for call sites. However, the tracker
2784  // itself knows about a lot of other non-capturing cases already.
2785  CallSite CS(UInst);
2786  if (!CS || !CS.isArgOperand(U))
2787  return isCapturedIn(/* Memory */ true, /* Integer */ true,
2788  /* Return */ true);
2789 
2790  unsigned ArgNo = CS.getArgumentNo(U);
2791  const IRPosition &CSArgPos = IRPosition::callsite_argument(CS, ArgNo);
2792  // If we have a abstract no-capture attribute for the argument we can use
2793  // it to justify a non-capture attribute here. This allows recursion!
2794  auto &ArgNoCaptureAA = A.getAAFor<AANoCapture>(NoCaptureAA, CSArgPos);
2795  if (ArgNoCaptureAA.isAssumedNoCapture())
2796  return isCapturedIn(/* Memory */ false, /* Integer */ false,
2797  /* Return */ false);
2798  if (ArgNoCaptureAA.isAssumedNoCaptureMaybeReturned()) {
2799  addPotentialCopy(CS);
2800  return isCapturedIn(/* Memory */ false, /* Integer */ false,
2801  /* Return */ false);
2802  }
2803 
2804  // Lastly, we could not find a reason no-capture can be assumed so we don't.
2805  return isCapturedIn(/* Memory */ true, /* Integer */ true,
2806  /* Return */ true);
2807  }
2808 
2809  /// Register \p CS as potential copy of the value we are checking.
2811  PotentialCopies.push_back(CS.getInstruction());
2812  }
2813 
2814  /// See CaptureTracker::shouldExplore(...).
2815  bool shouldExplore(const Use *U) override {
2816  // Check liveness.
2817  return !IsDeadAA.isAssumedDead(cast<Instruction>(U->getUser()));
2818  }
2819 
2820  /// Update the state according to \p CapturedInMem, \p CapturedInInt, and
2821  /// \p CapturedInRet, then return the appropriate value for use in the
2822  /// CaptureTracker::captured() interface.
2823  bool isCapturedIn(bool CapturedInMem, bool CapturedInInt,
2824  bool CapturedInRet) {
2825  LLVM_DEBUG(dbgs() << " - captures [Mem " << CapturedInMem << "|Int "
2826  << CapturedInInt << "|Ret " << CapturedInRet << "]\n");
2827  if (CapturedInMem)
2828  State.removeAssumedBits(AANoCapture::NOT_CAPTURED_IN_MEM);
2829  if (CapturedInInt)
2830  State.removeAssumedBits(AANoCapture::NOT_CAPTURED_IN_INT);
2831  if (CapturedInRet)
2832  State.removeAssumedBits(AANoCapture::NOT_CAPTURED_IN_RET);
2833  return !State.isAssumed(AANoCapture::NO_CAPTURE_MAYBE_RETURNED);
2834  }
2835 
2836 private:
2837  /// The attributor providing in-flight abstract attributes.
2838  Attributor &A;
2839 
2840  /// The abstract attribute currently updated.
2841  AANoCapture &NoCaptureAA;
2842 
2843  /// The abstract liveness state.
2844  const AAIsDead &IsDeadAA;
2845 
2846  /// The state currently updated.
2847  IntegerState &State;
2848 
2849  /// Set of potential copies of the tracked value.
2850  SmallVectorImpl<const Value *> &PotentialCopies;
2851 
2852  /// Global counter to limit the number of explored uses.
2853  unsigned &RemainingUsesToExplore;
2854 };
2855 
2857  const IRPosition &IRP = getIRPosition();
2858  const Value *V =
2859  getArgNo() >= 0 ? IRP.getAssociatedArgument() : &IRP.getAssociatedValue();
2860  if (!V)
2861  return indicatePessimisticFixpoint();
2862 
2863  const Function *F =
2864  getArgNo() >= 0 ? IRP.getAssociatedFunction() : IRP.getAnchorScope();
2865  assert(F && "Expected a function!");
2866  const auto &IsDeadAA = A.getAAFor<AAIsDead>(*this, IRPosition::function(*F));
2867 
2869  // TODO: Once we have memory behavior attributes we should use them here
2870  // similar to the reasoning in
2871  // AANoCaptureImpl::determineFunctionCaptureCapabilities(...).
2872 
2873  // TODO: Use the AAReturnedValues to learn if the argument can return or
2874  // not.
2875 
2876  // Use the CaptureTracker interface and logic with the specialized tracker,
2877  // defined in AACaptureUseTracker, that can look at in-flight abstract
2878  // attributes and directly updates the assumed state.
2879  SmallVector<const Value *, 4> PotentialCopies;
2880  unsigned RemainingUsesToExplore = DefaultMaxUsesToExplore;
2881  AACaptureUseTracker Tracker(A, *this, IsDeadAA, T, PotentialCopies,
2882  RemainingUsesToExplore);
2883 
2884  // Check all potential copies of the associated value until we can assume
2885  // none will be captured or we have to assume at least one might be.
2886  unsigned Idx = 0;
2887  PotentialCopies.push_back(V);
2888  while (T.isAssumed(NO_CAPTURE_MAYBE_RETURNED) && Idx < PotentialCopies.size())
2889  Tracker.valueMayBeCaptured(PotentialCopies[Idx++]);
2890 
2892  auto Assumed = S.getAssumed();
2893  S.intersectAssumedBits(T.getAssumed());
2894  return Assumed == S.getAssumed() ? ChangeStatus::UNCHANGED
2896 }
2897 
2898 /// NoCapture attribute for function arguments.
2901 
2902  /// See AbstractAttribute::trackStatistics()
2903  void trackStatistics() const override { STATS_DECLTRACK_ARG_ATTR(nocapture) }
2904 };
2905 
2906 /// NoCapture attribute for call site arguments.
2909 
2910  /// See AbstractAttribute::updateImpl(...).
2912  // TODO: Once we have call site specific value information we can provide
2913  // call site specific liveness information and then it makes
2914  // sense to specialize attributes for call sites arguments instead of
2915  // redirecting requests to the callee argument.
2916  Argument *Arg = getAssociatedArgument();
2917  if (!Arg)
2918  return indicatePessimisticFixpoint();
2919  const IRPosition &ArgPos = IRPosition::argument(*Arg);
2920  auto &ArgAA = A.getAAFor<AANoCapture>(*this, ArgPos);
2922  getState(),
2923  static_cast<const AANoCapture::StateType &>(ArgAA.getState()));
2924  }
2925 
2926  /// See AbstractAttribute::trackStatistics()
2927  void trackStatistics() const override{STATS_DECLTRACK_CSARG_ATTR(nocapture)};
2928 };
2929 
2930 /// NoCapture attribute for floating values.
2933 
2934  /// See AbstractAttribute::trackStatistics()
2935  void trackStatistics() const override {
2937  }
2938 };
2939 
2940 /// NoCapture attribute for function return value.
2943  llvm_unreachable("NoCapture is not applicable to function returns!");
2944  }
2945 
2946  /// See AbstractAttribute::initialize(...).
2947  void initialize(Attributor &A) override {
2948  llvm_unreachable("NoCapture is not applicable to function returns!");
2949  }
2950 
2951  /// See AbstractAttribute::updateImpl(...).
2953  llvm_unreachable("NoCapture is not applicable to function returns!");
2954  }
2955 
2956  /// See AbstractAttribute::trackStatistics()
2957  void trackStatistics() const override {}
2958 };
2959 
2960 /// NoCapture attribute deduction for a call site return value.
2963 
2964  /// See AbstractAttribute::trackStatistics()
2965  void trackStatistics() const override {
2966  STATS_DECLTRACK_CSRET_ATTR(nocapture)
2967  }
2968 };
2969 
2970 /// ------------------ Value Simplify Attribute ----------------------------
2973 
2974  /// See AbstractAttribute::getAsStr().
2975  const std::string getAsStr() const override {
2976  return getAssumed() ? (getKnown() ? "simplified" : "maybe-simple")
2977  : "not-simple";
2978  }
2979 
2980  /// See AbstractAttribute::trackStatistics()
2981  void trackStatistics() const override {}
2982 
2983  /// See AAValueSimplify::getAssumedSimplifiedValue()
2985  if (!getAssumed())
2986  return const_cast<Value *>(&getAssociatedValue());
2987  return SimplifiedAssociatedValue;
2988  }
2989  void initialize(Attributor &A) override {}
2990 
2991  /// Helper function for querying AAValueSimplify and updating candicate.
2992  /// \param QueryingValue Value trying to unify with SimplifiedValue
2993  /// \param AccumulatedSimplifiedValue Current simplification result.
2994  static bool checkAndUpdate(Attributor &A, const AbstractAttribute &QueryingAA,
2995  Value &QueryingValue,
2996  Optional<Value *> &AccumulatedSimplifiedValue) {
2997  // FIXME: Add a typecast support.
2998 
2999  auto &ValueSimpifyAA = A.getAAFor<AAValueSimplify>(
3000  QueryingAA, IRPosition::value(QueryingValue));
3001 
3002  Optional<Value *> QueryingValueSimplified =
3003  ValueSimpifyAA.getAssumedSimplifiedValue(A);
3004 
3005  if (!QueryingValueSimplified.hasValue())
3006  return true;
3007 
3008  if (!QueryingValueSimplified.getValue())
3009  return false;
3010 
3011  Value &QueryingValueSimplifiedUnwrapped =
3012  *QueryingValueSimplified.getValue();
3013 
3014  if (isa<UndefValue>(QueryingValueSimplifiedUnwrapped))
3015  return true;
3016 
3017  if (AccumulatedSimplifiedValue.hasValue())
3018  return AccumulatedSimplifiedValue == QueryingValueSimplified;
3019 
3020  LLVM_DEBUG(dbgs() << "[Attributor][ValueSimplify] " << QueryingValue
3021  << " is assumed to be "
3022  << QueryingValueSimplifiedUnwrapped << "\n");
3023 
3024  AccumulatedSimplifiedValue = QueryingValueSimplified;
3025  return true;
3026  }
3027 
3028  /// See AbstractAttribute::manifest(...).
3031 
3032  if (!SimplifiedAssociatedValue.hasValue() ||
3033  !SimplifiedAssociatedValue.getValue())
3034  return Changed;
3035 
3036  if (auto *C = dyn_cast<Constant>(SimplifiedAssociatedValue.getValue())) {
3037  // We can replace the AssociatedValue with the constant.
3038  Value &V = getAssociatedValue();
3039  if (!V.user_empty() && &V != C && V.getType() == C->getType()) {
3040  LLVM_DEBUG(dbgs() << "[Attributor][ValueSimplify] " << V << " -> " << *C
3041  << "\n");
3042  V.replaceAllUsesWith(C);
3043  Changed = ChangeStatus::CHANGED;
3044  }
3045  }
3046 
3047  return Changed | AAValueSimplify::manifest(A);
3048  }
3049 
3050 protected:
3051  // An assumed simplified value. Initially, it is set to Optional::None, which
3052  // means that the value is not clear under current assumption. If in the
3053  // pessimistic state, getAssumedSimplifiedValue doesn't return this value but
3054  // returns orignal associated value.
3056 };
3057 
3060 
3061  /// See AbstractAttribute::updateImpl(...).
3063  bool HasValueBefore = SimplifiedAssociatedValue.hasValue();
3064 
3065  auto PredForCallSite = [&](CallSite CS) {
3066  return checkAndUpdate(A, *this, *CS.getArgOperand(getArgNo()),
3067  SimplifiedAssociatedValue);
3068  };
3069 
3070  if (!A.checkForAllCallSites(PredForCallSite, *this, true))
3071  return indicatePessimisticFixpoint();
3072 
3073  // If a candicate was found in this update, return CHANGED.
3074  return HasValueBefore == SimplifiedAssociatedValue.hasValue()
3077  }
3078 
3079  /// See AbstractAttribute::trackStatistics()
3080  void trackStatistics() const override {
3081  STATS_DECLTRACK_ARG_ATTR(value_simplify)
3082  }
3083 };
3084 
3087 
3088  /// See AbstractAttribute::updateImpl(...).
3090  bool HasValueBefore = SimplifiedAssociatedValue.hasValue();
3091 
3092  auto PredForReturned = [&](Value &V) {
3093  return checkAndUpdate(A, *this, V, SimplifiedAssociatedValue);
3094  };
3095 
3096  if (!A.checkForAllReturnedValues(PredForReturned, *this))
3097  return indicatePessimisticFixpoint();
3098 
3099  // If a candicate was found in this update, return CHANGED.
3100  return HasValueBefore == SimplifiedAssociatedValue.hasValue()
3103  }
3104  /// See AbstractAttribute::trackStatistics()
3105  void trackStatistics() const override {
3106  STATS_DECLTRACK_FNRET_ATTR(value_simplify)
3107  }
3108 };
3109 
3112 
3113  /// See AbstractAttribute::initialize(...).
3114  void initialize(Attributor &A) override {
3115  Value &V = getAnchorValue();
3116 
3117  // TODO: add other stuffs
3118  if (isa<Constant>(V) || isa<UndefValue>(V))
3119  indicatePessimisticFixpoint();
3120  }
3121 
3122  /// See AbstractAttribute::updateImpl(...).
3124  bool HasValueBefore = SimplifiedAssociatedValue.hasValue();
3125 
3126  auto VisitValueCB = [&](Value &V, BooleanState, bool Stripped) -> bool {
3127  auto &AA = A.getAAFor<AAValueSimplify>(*this, IRPosition::value(V));
3128  if (!Stripped && this == &AA) {
3129  // TODO: Look the instruction and check recursively.
3130  LLVM_DEBUG(
3131  dbgs() << "[Attributor][ValueSimplify] Can't be stripped more : "
3132  << V << "\n");
3133  indicatePessimisticFixpoint();
3134  return false;
3135  }
3136  return checkAndUpdate(A, *this, V, SimplifiedAssociatedValue);
3137  };
3138 
3139  if (!genericValueTraversal<AAValueSimplify, BooleanState>(
3140  A, getIRPosition(), *this, static_cast<BooleanState &>(*this),
3141  VisitValueCB))
3142  return indicatePessimisticFixpoint();
3143 
3144  // If a candicate was found in this update, return CHANGED.
3145 
3146  return HasValueBefore == SimplifiedAssociatedValue.hasValue()
3149  }
3150 
3151  /// See AbstractAttribute::trackStatistics()
3152  void trackStatistics() const override {
3153  STATS_DECLTRACK_FLOATING_ATTR(value_simplify)
3154  }
3155 };
3156 
3159 
3160  /// See AbstractAttribute::initialize(...).
3161  void initialize(Attributor &A) override {
3162  SimplifiedAssociatedValue = &getAnchorValue();
3163  indicateOptimisticFixpoint();
3164  }
3165  /// See AbstractAttribute::initialize(...).
3168  "AAValueSimplify(Function|CallSite)::updateImpl will not be called");
3169  }
3170  /// See AbstractAttribute::trackStatistics()
3171  void trackStatistics() const override {
3172  STATS_DECLTRACK_FN_ATTR(value_simplify)
3173  }
3174 };
3175 
3178  : AAValueSimplifyFunction(IRP) {}
3179  /// See AbstractAttribute::trackStatistics()
3180  void trackStatistics() const override {
3181  STATS_DECLTRACK_CS_ATTR(value_simplify)
3182  }
3183 };
3184 
3187  : AAValueSimplifyReturned(IRP) {}
3188 
3189  void trackStatistics() const override {
3190  STATS_DECLTRACK_CSRET_ATTR(value_simplify)
3191  }
3192 };
3195  : AAValueSimplifyFloating(IRP) {}
3196 
3197  void trackStatistics() const override {
3198  STATS_DECLTRACK_CSARG_ATTR(value_simplify)
3199  }
3200 };
3201 
3202 /// ----------------------- Heap-To-Stack Conversion ---------------------------
3205 
3206  const std::string getAsStr() const override {
3207  return "[H2S] Mallocs: " + std::to_string(MallocCalls.size());
3208  }
3209 
3211  assert(getState().isValidState() &&
3212  "Attempted to manifest an invalid state!");
3213 
3215  Function *F = getAssociatedFunction();
3216  const auto *TLI = A.getInfoCache().getTargetLibraryInfoForFunction(*F);
3217 
3218  for (Instruction *MallocCall : MallocCalls) {
3219  // This malloc cannot be replaced.
3220  if (BadMallocCalls.count(MallocCall))
3221  continue;
3222 
3223  for (Instruction *FreeCall : FreesForMalloc[MallocCall]) {
3224  LLVM_DEBUG(dbgs() << "H2S: Removing free call: " << *FreeCall << "\n");
3225  A.deleteAfterManifest(*FreeCall);
3226  HasChanged = ChangeStatus::CHANGED;
3227  }
3228 
3229  LLVM_DEBUG(dbgs() << "H2S: Removing malloc call: " << *MallocCall
3230  << "\n");
3231 
3232  Constant *Size;
3233  if (isCallocLikeFn(MallocCall, TLI)) {
3234  auto *Num = cast<ConstantInt>(MallocCall->getOperand(0));
3235  auto *SizeT = dyn_cast<ConstantInt>(MallocCall->getOperand(1));
3236  APInt TotalSize = SizeT->getValue() * Num->getValue();
3237  Size =
3238  ConstantInt::get(MallocCall->getOperand(0)->getType(), TotalSize);
3239  } else {
3240  Size = cast<ConstantInt>(MallocCall->getOperand(0));
3241  }
3242 
3243  unsigned AS = cast<PointerType>(MallocCall->getType())->getAddressSpace();
3244  Instruction *AI = new AllocaInst(Type::getInt8Ty(F->getContext()), AS,
3245  Size, "", MallocCall->getNextNode());
3246 
3247  if (AI->getType() != MallocCall->getType())
3248  AI = new BitCastInst(AI, MallocCall->getType(), "malloc_bc",
3249  AI->getNextNode());
3250 
3251  MallocCall->replaceAllUsesWith(AI);
3252 
3253  if (auto *II = dyn_cast<InvokeInst>(MallocCall)) {
3254  auto *NBB = II->getNormalDest();
3255  BranchInst::Create(NBB, MallocCall->getParent());
3256  A.deleteAfterManifest(*MallocCall);
3257  } else {
3258  A.deleteAfterManifest(*MallocCall);
3259  }
3260 
3261  if (isCallocLikeFn(MallocCall, TLI)) {
3262  auto *BI = new BitCastInst(AI, MallocCall->getType(), "calloc_bc",
3263  AI->getNextNode());
3264  Value *Ops[] = {
3265  BI, ConstantInt::get(F->getContext(), APInt(8, 0, false)), Size,
3267 
3268  Type *Tys[] = {BI->getType(), MallocCall->getOperand(0)->getType()};
3269  Module *M = F->getParent();
3270  Function *Fn = Intrinsic::getDeclaration(M, Intrinsic::memset, Tys);
3271  CallInst::Create(Fn, Ops, "", BI->getNextNode());
3272  }
3273  HasChanged = ChangeStatus::CHANGED;
3274  }
3275 
3276  return HasChanged;
3277  }
3278 
3279  /// Collection of all malloc calls in a function.
3281 
3282  /// Collection of malloc calls that cannot be converted.
3284 
3285  /// A map for each malloc call to the set of associated free calls.
3287 
3288  ChangeStatus updateImpl(Attributor &A) override;
3289 };
3290 
3292  const Function *F = getAssociatedFunction();
3293  const auto *TLI = A.getInfoCache().getTargetLibraryInfoForFunction(*F);
3294 
3295  auto UsesCheck = [&](Instruction &I) {
3297  SmallVector<const Use *, 8> Worklist;
3298 
3299  for (Use &U : I.uses())
3300  Worklist.push_back(&U);
3301 
3302  while (!Worklist.empty()) {
3303  const Use *U = Worklist.pop_back_val();
3304  if (!Visited.insert(U).second)
3305  continue;
3306 
3307  auto *UserI = U->getUser();
3308 
3309  if (isa<LoadInst>(UserI) || isa<StoreInst>(UserI))
3310  continue;
3311 
3312  // NOTE: Right now, if a function that has malloc pointer as an argument
3313  // frees memory, we assume that the malloc pointer is freed.
3314 
3315  // TODO: Add nofree callsite argument attribute to indicate that pointer
3316  // argument is not freed.
3317  if (auto *CB = dyn_cast<CallBase>(UserI)) {
3318  if (!CB->isArgOperand(U))
3319  continue;
3320 
3321  if (CB->isLifetimeStartOrEnd())
3322  continue;
3323 
3324  // Record malloc.
3325  if (isFreeCall(UserI, TLI)) {
3326  FreesForMalloc[&I].insert(
3327  cast<Instruction>(const_cast<User *>(UserI)));
3328  continue;
3329  }
3330 
3331  // If a function does not free memory we are fine
3332  const auto &NoFreeAA =
3334 
3335  unsigned ArgNo = U - CB->arg_begin();
3336  const auto &NoCaptureAA = A.getAAFor<AANoCapture>(
3337  *this, IRPosition::callsite_argument(*CB, ArgNo));
3338 
3339  if (!NoCaptureAA.isAssumedNoCapture() || !NoFreeAA.isAssumedNoFree()) {
3340  LLVM_DEBUG(dbgs() << "[H2S] Bad user: " << *UserI << "\n");
3341  return false;
3342  }
3343  continue;
3344  }
3345 
3346  if (isa<GetElementPtrInst>(UserI) || isa<BitCastInst>(UserI)) {
3347  for (Use &U : UserI->uses())
3348  Worklist.push_back(&U);
3349  continue;
3350  }
3351 
3352  // Unknown user.
3353  LLVM_DEBUG(dbgs() << "[H2S] Unknown user: " << *UserI << "\n");
3354  return false;
3355  }
3356  return true;
3357  };
3358 
3359  auto MallocCallocCheck = [&](Instruction &I) {
3360  if (isMallocLikeFn(&I, TLI)) {
3361  if (auto *Size = dyn_cast<ConstantInt>(I.getOperand(0)))
3362  if (!Size->getValue().sle(MaxHeapToStackSize))
3363  return true;
3364  } else if (isCallocLikeFn(&I, TLI)) {
3365  bool Overflow = false;
3366  if (auto *Num = dyn_cast<ConstantInt>(I.getOperand(0)))
3367  if (auto *Size = dyn_cast<ConstantInt>(I.getOperand(1)))
3368  if (!(Size->getValue().umul_ov(Num->getValue(), Overflow))
3369  .sle(MaxHeapToStackSize))
3370  if (!Overflow)
3371  return true;
3372  } else {
3373  BadMallocCalls.insert(&I);
3374  return true;
3375  }
3376 
3377  if (BadMallocCalls.count(&I))
3378  return true;
3379 
3380  if (UsesCheck(I))
3381  MallocCalls.insert(&I);
3382  else
3383  BadMallocCalls.insert(&I);
3384  return true;
3385  };
3386 
3387  size_t NumBadMallocs = BadMallocCalls.size();
3388 
3389  A.checkForAllCallLikeInstructions(MallocCallocCheck, *this);
3390 
3391  if (NumBadMallocs != BadMallocCalls.size())
3392  return ChangeStatus::CHANGED;
3393 
3394  return ChangeStatus::UNCHANGED;
3395 }
3396 
3399 
3400  /// See AbstractAttribute::trackStatistics()
3401  void trackStatistics() const override {
3402  STATS_DECL(MallocCalls, Function,
3403  "Number of MallocCalls converted to allocas");
3404  BUILD_STAT_NAME(MallocCalls, Function) += MallocCalls.size();
3405  }
3406 };
3407 
3408 /// ----------------------------------------------------------------------------
3409 /// Attributor
3410 /// ----------------------------------------------------------------------------
3411 
3413  const AAIsDead *LivenessAA) {
3414  const Instruction *CtxI = AA.getIRPosition().getCtxI();
3415  if (!CtxI)
3416  return false;
3417 
3418  if (!LivenessAA)
3419  LivenessAA =
3420  &getAAFor<AAIsDead>(AA, IRPosition::function(*CtxI->getFunction()),
3421  /* TrackDependence */ false);
3422 
3423  // Don't check liveness for AAIsDead.
3424  if (&AA == LivenessAA)
3425  return false;
3426 
3427  if (!LivenessAA->isAssumedDead(CtxI))
3428  return false;
3429 
3430  // We actually used liveness information so we have to record a dependence.
3431  recordDependence(*LivenessAA, AA);
3432 
3433  return true;
3434 }
3435 
3437  const AbstractAttribute &QueryingAA,
3438  bool RequireAllCallSites) {
3439  // We can try to determine information from
3440  // the call sites. However, this is only possible all call sites are known,
3441  // hence the function has internal linkage.
3442  const IRPosition &IRP = QueryingAA.getIRPosition();
3443  const Function *AssociatedFunction = IRP.getAssociatedFunction();
3444  if (!AssociatedFunction)
3445  return false;
3446 
3447  if (RequireAllCallSites && !AssociatedFunction->hasInternalLinkage()) {
3448  LLVM_DEBUG(
3449  dbgs()
3450  << "[Attributor] Function " << AssociatedFunction->getName()
3451  << " has no internal linkage, hence not all call sites are known\n");
3452  return false;
3453  }
3454 
3455  for (const Use &U : AssociatedFunction->uses()) {
3456  Instruction *I = dyn_cast<Instruction>(U.getUser());
3457  // TODO: Deal with abstract call sites here.
3458  if (!I)
3459  return false;
3460 
3461  Function *Caller = I->getFunction();
3462 
3463  const auto &LivenessAA = getAAFor<AAIsDead>(
3464  QueryingAA, IRPosition::function(*Caller), /* TrackDependence */ false);
3465 
3466  // Skip dead calls.
3467  if (LivenessAA.isAssumedDead(I)) {
3468  // We actually used liveness information so we have to record a
3469  // dependence.
3470  recordDependence(LivenessAA, QueryingAA);
3471  continue;
3472  }
3473 
3474  CallSite CS(U.getUser());
3475  if (!CS || !CS.isCallee(&U)) {
3476  if (!RequireAllCallSites)
3477  continue;
3478 
3479  LLVM_DEBUG(dbgs() << "[Attributor] User " << *U.getUser()
3480  << " is an invalid use of "
3481  << AssociatedFunction->getName() << "\n");
3482  return false;
3483  }
3484 
3485  if (Pred(CS))
3486  continue;
3487 
3488  LLVM_DEBUG(dbgs() << "[Attributor] Call site callback failed for "
3489  << *CS.getInstruction() << "\n");
3490  return false;
3491  }
3492 
3493  return true;
3494 }
3495 
3497  const function_ref<bool(Value &, const SmallSetVector<ReturnInst *, 4> &)>
3498  &Pred,
3499  const AbstractAttribute &QueryingAA) {
3500 
3501  const IRPosition &IRP = QueryingAA.getIRPosition();
3502  // Since we need to provide return instructions we have to have an exact
3503  // definition.
3504  const Function *AssociatedFunction = IRP.getAssociatedFunction();
3505  if (!AssociatedFunction)
3506  return false;
3507 
3508  // If this is a call site query we use the call site specific return values
3509  // and liveness information.
3510  // TODO: use the function scope once we have call site AAReturnedValues.
3511  const IRPosition &QueryIRP = IRPosition::function(*AssociatedFunction);
3512  const auto &AARetVal = getAAFor<AAReturnedValues>(QueryingAA, QueryIRP);
3513  if (!AARetVal.getState().isValidState())
3514  return false;
3515 
3516  return AARetVal.checkForAllReturnedValuesAndReturnInsts(Pred);
3517 }
3518 
3520  const function_ref<bool(Value &)> &Pred,
3521  const AbstractAttribute &QueryingAA) {
3522 
3523  const IRPosition &IRP = QueryingAA.getIRPosition();
3524  const Function *AssociatedFunction = IRP.getAssociatedFunction();
3525  if (!AssociatedFunction)
3526  return false;
3527 
3528  // TODO: use the function scope once we have call site AAReturnedValues.
3529  const IRPosition &QueryIRP = IRPosition::function(*AssociatedFunction);
3530  const auto &AARetVal = getAAFor<AAReturnedValues>(QueryingAA, QueryIRP);
3531  if (!AARetVal.getState().isValidState())
3532  return false;
3533 
3534  return AARetVal.checkForAllReturnedValuesAndReturnInsts(
3535  [&](Value &RV, const SmallSetVector<ReturnInst *, 4> &) {
3536  return Pred(RV);
3537  });
3538 }
3539 
3541  const llvm::function_ref<bool(Instruction &)> &Pred,
3542  const AbstractAttribute &QueryingAA, const ArrayRef<unsigned> &Opcodes) {
3543 
3544  const IRPosition &IRP = QueryingAA.getIRPosition();
3545  // Since we need to provide instructions we have to have an exact definition.
3546  const Function *AssociatedFunction = IRP.getAssociatedFunction();
3547  if (!AssociatedFunction)
3548  return false;
3549 
3550  // TODO: use the function scope once we have call site AAReturnedValues.
3551  const IRPosition &QueryIRP = IRPosition::function(*AssociatedFunction);
3552  const auto &LivenessAA =
3553  getAAFor<AAIsDead>(QueryingAA, QueryIRP, /* TrackDependence */ false);
3554  bool AnyDead = false;
3555 
3556  auto &OpcodeInstMap =
3557  InfoCache.getOpcodeInstMapForFunction(*AssociatedFunction);
3558  for (unsigned Opcode : Opcodes) {
3559  for (Instruction *I : OpcodeInstMap[Opcode]) {
3560  // Skip dead instructions.
3561  if (LivenessAA.isAssumedDead(I)) {
3562  AnyDead = true;
3563  continue;
3564  }
3565 
3566  if (!Pred(*I))
3567  return false;
3568  }
3569  }
3570 
3571  // If we actually used liveness information so we have to record a dependence.
3572  if (AnyDead)
3573  recordDependence(LivenessAA, QueryingAA);
3574 
3575  return true;
3576 }
3577 
3579  const llvm::function_ref<bool(Instruction &)> &Pred,
3580  AbstractAttribute &QueryingAA) {
3581 
3582  const Function *AssociatedFunction =
3583  QueryingAA.getIRPosition().getAssociatedFunction();
3584  if (!AssociatedFunction)
3585  return false;
3586 
3587  // TODO: use the function scope once we have call site AAReturnedValues.
3588  const IRPosition &QueryIRP = IRPosition::function(*AssociatedFunction);
3589  const auto &LivenessAA =
3590  getAAFor<AAIsDead>(QueryingAA, QueryIRP, /* TrackDependence */ false);
3591  bool AnyDead = false;
3592 
3593  for (Instruction *I :
3594  InfoCache.getReadOrWriteInstsForFunction(*AssociatedFunction)) {
3595  // Skip dead instructions.
3596  if (LivenessAA.isAssumedDead(I)) {
3597  AnyDead = true;
3598  continue;
3599  }
3600 
3601  if (!Pred(*I))
3602  return false;
3603  }
3604 
3605  // If we actually used liveness information so we have to record a dependence.
3606  if (AnyDead)
3607  recordDependence(LivenessAA, QueryingAA);
3608 
3609  return true;
3610 }
3611 
3613  LLVM_DEBUG(dbgs() << "[Attributor] Identified and initialized "
3614  << AllAbstractAttributes.size()
3615  << " abstract attributes.\n");
3616 
3617  // Now that all abstract attributes are collected and initialized we start
3618  // the abstract analysis.
3619 
3620  unsigned IterationCounter = 1;
3621 
3624  Worklist.insert(AllAbstractAttributes.begin(), AllAbstractAttributes.end());
3625 
3626  bool RecomputeDependences = false;
3627 
3628  do {
3629  // Remember the size to determine new attributes.
3630  size_t NumAAs = AllAbstractAttributes.size();
3631  LLVM_DEBUG(dbgs() << "\n\n[Attributor] #Iteration: " << IterationCounter
3632  << ", Worklist size: " << Worklist.size() << "\n");
3633 
3634  // If dependences (=QueryMap) are recomputed we have to look at all abstract
3635  // attributes again, regardless of what changed in the last iteration.
3636  if (RecomputeDependences) {
3637  LLVM_DEBUG(
3638  dbgs() << "[Attributor] Run all AAs to recompute dependences\n");
3639  QueryMap.clear();
3640  ChangedAAs.clear();
3641  Worklist.insert(AllAbstractAttributes.begin(),
3642  AllAbstractAttributes.end());
3643  }
3644 
3645  // Add all abstract attributes that are potentially dependent on one that
3646  // changed to the work list.
3647  for (AbstractAttribute *ChangedAA : ChangedAAs) {
3648  auto &QuerriedAAs = QueryMap[ChangedAA];
3649  Worklist.insert(QuerriedAAs.begin(), QuerriedAAs.end());
3650  }
3651 
3652  LLVM_DEBUG(dbgs() << "[Attributor] #Iteration: " << IterationCounter
3653  << ", Worklist+Dependent size: " << Worklist.size()
3654  << "\n");
3655 
3656  // Reset the changed set.
3657  ChangedAAs.clear();
3658 
3659  // Update all abstract attribute in the work list and record the ones that
3660  // changed.
3661  for (AbstractAttribute *AA : Worklist)
3662  if (!isAssumedDead(*AA, nullptr))
3663  if (AA->update(*this) == ChangeStatus::CHANGED)
3664  ChangedAAs.push_back(AA);
3665 
3666  // Check if we recompute the dependences in the next iteration.
3667  RecomputeDependences = (DepRecomputeInterval > 0 &&
3668  IterationCounter % DepRecomputeInterval == 0);
3669 
3670  // Add attributes to the changed set if they have been created in the last
3671  // iteration.
3672  ChangedAAs.append(AllAbstractAttributes.begin() + NumAAs,
3673  AllAbstractAttributes.end());
3674 
3675  // Reset the work list and repopulate with the changed abstract attributes.
3676  // Note that dependent ones are added above.
3677  Worklist.clear();
3678  Worklist.insert(ChangedAAs.begin(), ChangedAAs.end());
3679 
3680  } while (!Worklist.empty() && (IterationCounter++ < MaxFixpointIterations ||
3682 
3683  LLVM_DEBUG(dbgs() << "\n[Attributor] Fixpoint iteration done after: "
3684  << IterationCounter << "/" << MaxFixpointIterations
3685  << " iterations\n");
3686 
3687  size_t NumFinalAAs = AllAbstractAttributes.size();
3688 
3689  bool FinishedAtFixpoint = Worklist.empty();
3690 
3691  // Reset abstract arguments not settled in a sound fixpoint by now. This
3692  // happens when we stopped the fixpoint iteration early. Note that only the
3693  // ones marked as "changed" *and* the ones transitively depending on them
3694  // need to be reverted to a pessimistic state. Others might not be in a
3695  // fixpoint state but we can use the optimistic results for them anyway.
3697  for (unsigned u = 0; u < ChangedAAs.size(); u++) {
3698  AbstractAttribute *ChangedAA = ChangedAAs[u];
3699  if (!Visited.insert(ChangedAA).second)
3700  continue;
3701 
3702  AbstractState &State = ChangedAA->getState();
3703  if (!State.isAtFixpoint()) {
3705 
3706  NumAttributesTimedOut++;
3707  }
3708 
3709  auto &QuerriedAAs = QueryMap[ChangedAA];
3710  ChangedAAs.append(QuerriedAAs.begin(), QuerriedAAs.end());
3711  }
3712 
3713  LLVM_DEBUG({
3714  if (!Visited.empty())
3715  dbgs() << "\n[Attributor] Finalized " << Visited.size()
3716  << " abstract attributes.\n";
3717  });
3718 
3719  unsigned NumManifested = 0;
3720  unsigned NumAtFixpoint = 0;
3721  ChangeStatus ManifestChange = ChangeStatus::UNCHANGED;
3722  for (AbstractAttribute *AA : AllAbstractAttributes) {
3723  AbstractState &State = AA->getState();
3724 
3725  // If there is not already a fixpoint reached, we can now take the
3726  // optimistic state. This is correct because we enforced a pessimistic one
3727  // on abstract attributes that were transitively dependent on a changed one
3728  // already above.
3729  if (!State.isAtFixpoint())
3731 
3732  // If the state is invalid, we do not try to manifest it.
3733  if (!State.isValidState())
3734  continue;
3735 
3736  // Skip dead code.
3737  if (isAssumedDead(*AA, nullptr))
3738  continue;
3739  // Manifest the state and record if we changed the IR.
3740  ChangeStatus LocalChange = AA->manifest(*this);
3741  if (LocalChange == ChangeStatus::CHANGED && AreStatisticsEnabled())
3742  AA->trackStatistics();
3743 
3744  ManifestChange = ManifestChange | LocalChange;
3745 
3746  NumAtFixpoint++;
3747  NumManifested += (LocalChange == ChangeStatus::CHANGED);
3748  }
3749 
3750  (void)NumManifested;
3751  (void)NumAtFixpoint;
3752  LLVM_DEBUG(dbgs() << "\n[Attributor] Manifested " << NumManifested
3753  << " arguments while " << NumAtFixpoint
3754  << " were in a valid fixpoint state\n");
3755 
3756  // If verification is requested, we finished this run at a fixpoint, and the
3757  // IR was changed, we re-run the whole fixpoint analysis, starting at
3758  // re-initialization of the arguments. This re-run should not result in an IR
3759  // change. Though, the (virtual) state of attributes at the end of the re-run
3760  // might be more optimistic than the known state or the IR state if the better
3761  // state cannot be manifested.
3762  if (VerifyAttributor && FinishedAtFixpoint &&
3763  ManifestChange == ChangeStatus::CHANGED) {
3764  VerifyAttributor = false;
3765  ChangeStatus VerifyStatus = run(M);
3766  if (VerifyStatus != ChangeStatus::UNCHANGED)
3768  "Attributor verification failed, re-run did result in an IR change "
3769  "even after a fixpoint was reached in the original run. (False "
3770  "positives possible!)");
3771  VerifyAttributor = true;
3772  }
3773 
3774  NumAttributesManifested += NumManifested;
3775  NumAttributesValidFixpoint += NumAtFixpoint;
3776 
3777  (void)NumFinalAAs;
3778  assert(
3779  NumFinalAAs == AllAbstractAttributes.size() &&
3780  "Expected the final number of abstract attributes to remain unchanged!");
3781 
3782  // Delete stuff at the end to avoid invalid references and a nice order.
3783  {
3784  LLVM_DEBUG(dbgs() << "\n[Attributor] Delete at least "
3785  << ToBeDeletedFunctions.size() << " functions and "
3786  << ToBeDeletedBlocks.size() << " blocks and "
3787  << ToBeDeletedInsts.size() << " instructions\n");
3788  for (Instruction *I : ToBeDeletedInsts) {
3789  if (!I->use_empty())
3790  I->replaceAllUsesWith(UndefValue::get(I->getType()));
3791  I->eraseFromParent();
3792  }
3793 
3794  if (unsigned NumDeadBlocks = ToBeDeletedBlocks.size()) {
3795  SmallVector<BasicBlock *, 8> ToBeDeletedBBs;
3796  ToBeDeletedBBs.reserve(NumDeadBlocks);
3797  ToBeDeletedBBs.append(ToBeDeletedBlocks.begin(), ToBeDeletedBlocks.end());
3798  DeleteDeadBlocks(ToBeDeletedBBs);
3800  "Number of dead basic blocks deleted.");
3801  }
3802 
3803  STATS_DECL(AAIsDead, Function, "Number of dead functions deleted.");
3804  for (Function *Fn : ToBeDeletedFunctions) {
3805  Fn->replaceAllUsesWith(UndefValue::get(Fn->getType()));
3806  Fn->eraseFromParent();
3808  }
3809 
3810  // Identify dead internal functions and delete them. This happens outside
3811  // the other fixpoint analysis as we might treat potentially dead functions
3812  // as live to lower the number of iterations. If they happen to be dead, the
3813  // below fixpoint loop will identify and eliminate them.
3814  SmallVector<Function *, 8> InternalFns;
3815  for (Function &F : M)
3816  if (F.hasInternalLinkage())
3817  InternalFns.push_back(&F);
3818 
3819  bool FoundDeadFn = true;
3820  while (FoundDeadFn) {
3821  FoundDeadFn = false;
3822  for (unsigned u = 0, e = InternalFns.size(); u < e; ++u) {
3823  Function *F = InternalFns[u];
3824  if (!F)
3825  continue;
3826 
3827  const auto *LivenessAA =
3828  lookupAAFor<AAIsDead>(IRPosition::function(*F));
3829  if (LivenessAA &&
3830  !checkForAllCallSites([](CallSite CS) { return false; },
3831  *LivenessAA, true))
3832  continue;
3833 
3836  F->eraseFromParent();
3837  InternalFns[u] = nullptr;
3838  FoundDeadFn = true;
3839  }
3840  }
3841  }
3842 
3844  IterationCounter != MaxFixpointIterations) {
3845  errs() << "\n[Attributor] Fixpoint iteration done after: "
3846  << IterationCounter << "/" << MaxFixpointIterations
3847  << " iterations\n";
3848  llvm_unreachable("The fixpoint was not reached with exactly the number of "
3849  "specified iterations!");
3850  }
3851 
3852  return ManifestChange;
3853 }
3854 
3856  if (!VisitedFunctions.insert(&F).second)
3857  return;
3858 
3859  IRPosition FPos = IRPosition::function(F);
3860 
3861  // Check for dead BasicBlocks in every function.
3862  // We need dead instruction detection because we do not want to deal with
3863  // broken IR in which SSA rules do not apply.
3864  getOrCreateAAFor<AAIsDead>(FPos);
3865 
3866  // Every function might be "will-return".
3867  getOrCreateAAFor<AAWillReturn>(FPos);
3868 
3869  // Every function can be nounwind.
3870  getOrCreateAAFor<AANoUnwind>(FPos);
3871 
3872  // Every function might be marked "nosync"
3873  getOrCreateAAFor<AANoSync>(FPos);
3874 
3875  // Every function might be "no-free".
3876  getOrCreateAAFor<AANoFree>(FPos);
3877 
3878  // Every function might be "no-return".
3879  getOrCreateAAFor<AANoReturn>(FPos);
3880 
3881  // Every function might be applicable for Heap-To-Stack conversion.
3882  if (EnableHeapToStack)
3883  getOrCreateAAFor<AAHeapToStack>(FPos);
3884 
3885  // Return attributes are only appropriate if the return type is non void.
3886  Type *ReturnType = F.getReturnType();
3887  if (!ReturnType->isVoidTy()) {
3888  // Argument attribute "returned" --- Create only one per function even
3889  // though it is an argument attribute.
3890  getOrCreateAAFor<AAReturnedValues>(FPos);
3891 
3892  IRPosition RetPos = IRPosition::returned(F);
3893 
3894  // Every function might be simplified.
3895  getOrCreateAAFor<AAValueSimplify>(RetPos);
3896 
3897  if (ReturnType->isPointerTy()) {
3898 
3899  // Every function with pointer return type might be marked align.
3900  getOrCreateAAFor<AAAlign>(RetPos);
3901 
3902  // Every function with pointer return type might be marked nonnull.
3903  getOrCreateAAFor<AANonNull>(RetPos);
3904 
3905  // Every function with pointer return type might be marked noalias.
3906  getOrCreateAAFor<AANoAlias>(RetPos);
3907 
3908  // Every function with pointer return type might be marked
3909  // dereferenceable.
3910  getOrCreateAAFor<AADereferenceable>(RetPos);
3911  }
3912  }
3913 
3914  for (Argument &Arg : F.args()) {
3916 
3917  // Every argument might be simplified.
3918  getOrCreateAAFor<AAValueSimplify>(ArgPos);
3919 
3920  if (Arg.getType()->isPointerTy()) {
3921  // Every argument with pointer type might be marked nonnull.
3922  getOrCreateAAFor<AANonNull>(ArgPos);
3923 
3924  // Every argument with pointer type might be marked noalias.
3925  getOrCreateAAFor<AANoAlias>(ArgPos);
3926 
3927  // Every argument with pointer type might be marked dereferenceable.
3928  getOrCreateAAFor<AADereferenceable>(ArgPos);
3929 
3930  // Every argument with pointer type might be marked align.
3931  getOrCreateAAFor<AAAlign>(ArgPos);
3932 
3933  // Every argument with pointer type might be marked nocapture.
3934  getOrCreateAAFor<AANoCapture>(ArgPos);
3935  }
3936  }
3937 
3938  // Walk all instructions to find more attribute opportunities and also
3939  // interesting instructions that might be queried by abstract attributes
3940  // during their initialization or update.
3941  auto &ReadOrWriteInsts = InfoCache.FuncRWInstsMap[&F];
3942  auto &InstOpcodeMap = InfoCache.FuncInstOpcodeMap[&F];
3943 
3944  for (Instruction &I : instructions(&F)) {
3945  bool IsInterestingOpcode = false;
3946 
3947  // To allow easy access to all instructions in a function with a given
3948  // opcode we store them in the InfoCache. As not all opcodes are interesting
3949  // to concrete attributes we only cache the ones that are as identified in
3950  // the following switch.
3951  // Note: There are no concrete attributes now so this is initially empty.
3952  switch (I.getOpcode()) {
3953  default:
3954  assert((!ImmutableCallSite(&I)) && (!isa<CallBase>(&I)) &&
3955  "New call site/base instruction type needs to be known int the "
3956  "attributor.");
3957  break;
3958  case Instruction::Load:
3959  // The alignment of a pointer is interesting for loads.
3960  getOrCreateAAFor<AAAlign>(
3961  IRPosition::value(*cast<LoadInst>(I).getPointerOperand()));
3962  break;
3963  case Instruction::Store:
3964  // The alignment of a pointer is interesting for stores.
3965  getOrCreateAAFor<AAAlign>(
3966  IRPosition::value(*cast<StoreInst>(I).getPointerOperand()));
3967  break;
3968  case Instruction::Call:
3969  case Instruction::CallBr:
3970  case Instruction::Invoke:
3971  case Instruction::CleanupRet:
3972  case Instruction::CatchSwitch:
3973  case Instruction::Resume:
3974  case Instruction::Ret:
3975  IsInterestingOpcode = true;
3976  }
3977  if (IsInterestingOpcode)
3978  InstOpcodeMap[I.getOpcode()].push_back(&I);
3979  if (I.mayReadOrWriteMemory())
3980  ReadOrWriteInsts.push_back(&I);
3981 
3982  CallSite CS(&I);
3983  if (CS && CS.getCalledFunction()) {
3984  for (int i = 0, e = CS.getCalledFunction()->arg_size(); i < e; i++) {
3985 
3986  IRPosition CSArgPos = IRPosition::callsite_argument(CS, i);
3987 
3988  // Call site argument might be simplified.
3989  getOrCreateAAFor<AAValueSimplify>(CSArgPos);
3990 
3991  if (!CS.getArgument(i)->getType()->isPointerTy())
3992  continue;
3993 
3994  // Call site argument attribute "non-null".
3995  getOrCreateAAFor<AANonNull>(CSArgPos);
3996 
3997  // Call site argument attribute "no-alias".
3998  getOrCreateAAFor<AANoAlias>(CSArgPos);
3999 
4000  // Call site argument attribute "dereferenceable".
4001  getOrCreateAAFor<AADereferenceable>(CSArgPos);
4002 
4003  // Call site argument attribute "align".
4004  getOrCreateAAFor<AAAlign>(CSArgPos);
4005  }
4006  }
4007  }
4008 }
4009 
4010 /// Helpers to ease debugging through output streams and print calls.
4011 ///
4012 ///{
4014  return OS << (S == ChangeStatus::CHANGED ? "changed" : "unchanged");
4015 }
4016 
4018  switch (AP) {
4020  return OS << "inv";
4021  case IRPosition::IRP_FLOAT:
4022  return OS << "flt";
4024  return OS << "fn_ret";
4026  return OS << "cs_ret";
4028  return OS << "fn";
4030  return OS << "cs";
4032  return OS << "arg";
4034  return OS << "cs_arg";
4035  }
4036  llvm_unreachable("Unknown attribute position!");
4037 }
4038 
4040  const Value &AV = Pos.getAssociatedValue();
4041  return OS << "{" << Pos.getPositionKind() << ":" << AV.getName() << " ["
4042  << Pos.getAnchorValue().getName() << "@" << Pos.getArgNo() << "]}";
4043 }
4044 
4046  return OS << "(" << S.getKnown() << "-" << S.getAssumed() << ")"
4047  << static_cast<const AbstractState &>(S);
4048 }
4049 
4051  return OS << (!S.isValidState() ? "top" : (S.isAtFixpoint() ? "fix" : ""));
4052 }
4053 
4055  AA.print(OS);
4056  return OS;
4057 }
4058 
4060  OS << "[P: " << getIRPosition() << "][" << getAsStr() << "][S: " << getState()
4061  << "]";
4062 }
4063 ///}
4064 
4065 /// ----------------------------------------------------------------------------
4066 /// Pass (Manager) Boilerplate
4067 /// ----------------------------------------------------------------------------
4068 
4070  if (DisableAttributor)
4071  return false;
4072 
4073  LLVM_DEBUG(dbgs() << "[Attributor] Run on module with " << M.size()
4074  << " functions.\n");
4075 
4076  // Create an Attributor and initially empty information cache that is filled
4077  // while we identify default attribute opportunities.
4078  InformationCache InfoCache(M.getDataLayout(), AG);
4079  Attributor A(InfoCache, DepRecInterval);
4080 
4081  for (Function &F : M) {
4082  if (F.hasExactDefinition())
4083  NumFnWithExactDefinition++;
4084  else
4085  NumFnWithoutExactDefinition++;
4086 
4087  // For now we ignore naked and optnone functions.
4088  if (F.hasFnAttribute(Attribute::Naked) ||
4089  F.hasFnAttribute(Attribute::OptimizeNone))
4090  continue;
4091 
4092  // We look at internal functions only on-demand but if any use is not a
4093  // direct call, we have to do it eagerly.
4094  if (F.hasInternalLinkage()) {
4095  if (llvm::all_of(F.uses(), [](const Use &U) {
4096  return ImmutableCallSite(U.getUser()) &&
4097  ImmutableCallSite(U.getUser()).isCallee(&U);
4098  }))
4099  continue;
4100  }
4101 
4102  // Populate the Attributor with abstract attribute opportunities in the
4103  // function and the information cache with IR information.
4104  A.identifyDefaultAbstractAttributes(F);
4105  }
4106 
4107  return A.run(M) == ChangeStatus::CHANGED;
4108 }
4109 
4111  auto &FAM = AM.getResult<FunctionAnalysisManagerModuleProxy>(M).getManager();
4112 
4113  AnalysisGetter AG(FAM);
4114  if (runAttributorOnModule(M, AG)) {
4115  // FIXME: Think about passes we will preserve and add them here.
4116  return PreservedAnalyses::none();
4117  }
4118  return PreservedAnalyses::all();
4119 }
4120 
4121 namespace {
4122 
4123 struct AttributorLegacyPass : public ModulePass {
4124  static char ID;
4125 
4126  AttributorLegacyPass() : ModulePass(ID) {
4128  }
4129 
4130  bool runOnModule(Module &M) override {
4131  if (skipModule(M))
4132  return false;
4133 
4134  AnalysisGetter AG;
4135  return runAttributorOnModule(M, AG);
4136  }
4137 
4138  void getAnalysisUsage(AnalysisUsage &AU) const override {
4139  // FIXME: Think about passes we will preserve and add them here.
4141  }
4142 };
4143 
4144 } // end anonymous namespace
4145 
4146 Pass *llvm::createAttributorLegacyPass() { return new AttributorLegacyPass(); }
4147 
4148 char AttributorLegacyPass::ID = 0;
4149 
4150 const char AAReturnedValues::ID = 0;
4151 const char AANoUnwind::ID = 0;
4152 const char AANoSync::ID = 0;
4153 const char AANoFree::ID = 0;
4154 const char AANonNull::ID = 0;
4155 const char AANoRecurse::ID = 0;
4156 const char AAWillReturn::ID = 0;
4157 const char AANoAlias::ID = 0;
4158 const char AANoReturn::ID = 0;
4159 const char AAIsDead::ID = 0;
4160 const char AADereferenceable::ID = 0;
4161 const char AAAlign::ID = 0;
4162 const char AANoCapture::ID = 0;
4163 const char AAValueSimplify::ID = 0;
4164 const char AAHeapToStack::ID = 0;
4165 
4166 // Macro magic to create the static generator function for attributes that
4167 // follow the naming scheme.
4168 
4169 #define SWITCH_PK_INV(CLASS, PK, POS_NAME) \
4170  case IRPosition::PK: \
4171  llvm_unreachable("Cannot create " #CLASS " for a " POS_NAME " position!");
4172 
4173 #define SWITCH_PK_CREATE(CLASS, IRP, PK, SUFFIX) \
4174  case IRPosition::PK: \
4175  AA = new CLASS##SUFFIX(IRP); \
4176  break;
4177 
4178 #define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
4179  CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
4180  CLASS *AA = nullptr; \
4181  switch (IRP.getPositionKind()) { \
4182  SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
4183  SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
4184  SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
4185  SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
4186  SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
4187  SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
4188  SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
4189  SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
4190  } \
4191  return *AA; \
4192  }
4193 
4194 #define CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
4195  CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
4196  CLASS *AA = nullptr; \
4197  switch (IRP.getPositionKind()) { \
4198  SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
4199  SWITCH_PK_INV(CLASS, IRP_FUNCTION, "function") \
4200  SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
4201  SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
4202  SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
4203  SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
4204  SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
4205  SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
4206  } \
4207  return *AA; \
4208  }
4209 
4210 #define CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
4211  CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
4212  CLASS *AA = nullptr; \
4213  switch (IRP.getPositionKind()) { \
4214  SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
4215  SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
4216  SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE, CallSite) \
4217  SWITCH_PK_CREATE(CLASS, IRP, IRP_FLOAT, Floating) \
4218  SWITCH_PK_CREATE(CLASS, IRP, IRP_ARGUMENT, Argument) \
4219  SWITCH_PK_CREATE(CLASS, IRP, IRP_RETURNED, Returned) \
4220  SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_RETURNED, CallSiteReturned) \
4221  SWITCH_PK_CREATE(CLASS, IRP, IRP_CALL_SITE_ARGUMENT, CallSiteArgument) \
4222  } \
4223  return *AA; \
4224  }
4225 
4226 #define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS) \
4227  CLASS &CLASS::createForPosition(const IRPosition &IRP, Attributor &A) { \
4228  CLASS *AA = nullptr; \
4229  switch (IRP.getPositionKind()) { \
4230  SWITCH_PK_INV(CLASS, IRP_INVALID, "invalid") \
4231  SWITCH_PK_INV(CLASS, IRP_ARGUMENT, "argument") \
4232  SWITCH_PK_INV(CLASS, IRP_FLOAT, "floating") \
4233  SWITCH_PK_INV(CLASS, IRP_RETURNED, "returned") \
4234  SWITCH_PK_INV(CLASS, IRP_CALL_SITE_RETURNED, "call site returned") \
4235  SWITCH_PK_INV(CLASS, IRP_CALL_SITE_ARGUMENT, "call site argument") \
4236  SWITCH_PK_INV(CLASS, IRP_CALL_SITE, "call site") \
4237  SWITCH_PK_CREATE(CLASS, IRP, IRP_FUNCTION, Function) \
4238  } \
4239  AA->initialize(A); \
4240  return *AA; \
4241  }
4242 
4251 
4257 
4259 
4261 
4262 #undef CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION
4263 #undef CREATE_VALUE_ABSTRACT_ATTRIBUTE_FOR_POSITION
4264 #undef CREATE_ALL_ABSTRACT_ATTRIBUTE_FOR_POSITION
4265 #undef SWITCH_PK_CREATE
4266 #undef SWITCH_PK_INV
4267 
4268 INITIALIZE_PASS_BEGIN(AttributorLegacyPass, "attributor",
4269  "Deduce and propagate attributes", false, false)
4271 INITIALIZE_PASS_END(AttributorLegacyPass, "attributor",
4272  "Deduce and propagate attributes", false, false)
An attribute for a call site return value.
Definition: Attributor.h:148
ChangeStatus clampStateAndIndicateChange< IntegerState >(IntegerState &S, const IntegerState &R)
Definition: Attributor.cpp:498
Pass interface - Implemented by all &#39;passes&#39;.
Definition: Pass.h:80
void DeleteDeadBlocks(ArrayRef< BasicBlock *> BBs, DomTreeUpdater *DTU=nullptr, bool KeepOneInputPHIs=false)
Delete the specified blocks from BB.
bool onlyReadsMemory() const
Determine if the function does not access or only reads memory.
Definition: Function.h:481
OpcodeInstMapTy & getOpcodeInstMapForFunction(const Function &F)
Return the map that relates "interesting" opcodes with all instructions with that opcode in F...
Definition: Attributor.h:558
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1762
uint64_t CallInst * C
AAIsDeadImpl(const IRPosition &IRP)
Return a value (possibly void), from a function.
StateType & getState() override
See AbstractAttribute::getState() {.
void initialize(Attributor &A) override
Initialize the state with the information in the Attributor A.
A parsed version of the target data layout string in and methods for querying it. ...
Definition: DataLayout.h:111
reference emplace_back(ArgTypes &&... Args)
Definition: SmallVector.h:641
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::initialize(...).
iterator_range< use_iterator > uses()
Definition: Value.h:374
StringRef getKindAsString() const
Return the attribute&#39;s kind as a string.
Definition: Attributes.cpp:216
raw_ostream & errs()
This returns a reference to a raw_ostream for standard error.
static IntegerType * getInt1Ty(LLVMContext &C)
Definition: Type.cpp:172
unsigned getIndexSizeInBits(unsigned AS) const
Size in bits of index used for address calculation in getelementptr.
Definition: DataLayout.h:399
static bool isEqualOrWorse(const Attribute &New, const Attribute &Old)
Return true if New is equal or worse than Old.
Definition: Attributor.cpp:250
NoCapture attribute for call site arguments.
virtual void getDeducedAttributes(LLVMContext &Ctx, SmallVectorImpl< Attribute > &Attrs) const override
see AbstractAttribute::isAssumedNoCaptureMaybeReturned(...).
void clear()
Definition: MapVector.h:88
Helper class for generic deduction: return value -> returned position.
Definition: Attributor.cpp:558
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
static PassRegistry * getPassRegistry()
getPassRegistry - Access the global registry object, which is automatically initialized at applicatio...
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
size_type size() const
Definition: MapVector.h:60
GCNRegPressure max(const GCNRegPressure &P1, const GCNRegPressure &P2)
static void determineFunctionCaptureCapabilities(const Function &F, IntegerState &State)
Set the NOT_CAPTURED_IN_MEM and NOT_CAPTURED_IN_RET bits in Known depending on the ability of the fun...
This class represents an incoming formal argument to a Function.
Definition: Argument.h:29
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
AANoRecurseImpl(const IRPosition &IRP)
---------------— Function No-Return Attribute -------------------------—
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
AAValueSimplifyFunction(const IRPosition &IRP)
Align attribute for a floating value.
This callback is used in conjunction with PointerMayBeCaptured.
static bool isNoSyncIntrinsic(Instruction *I)
Helper function uset to check if intrinsic is volatile (memcpy, memmove, memset). ...
static ChangeStatus manifestAttrs(Attributor &A, IRPosition &IRP, const ArrayRef< Attribute > &DeducedAttrs)
Definition: Attributor.cpp:308
NodeTy * getNextNode()
Get the next node, or nullptr for the list tail.
Definition: ilist_node.h:288
PassT::Result & getResult(IRUnitT &IR, ExtraArgTs... ExtraArgs)
Get the result of an analysis pass for a given IR unit.
Definition: PassManager.h:776
This class represents lattice values for constants.
Definition: AllocatorList.h:23
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
const std::string getAsStr() const override
See AbstractState::getAsStr().
bool isAtomic() const
Return true if this instruction has an AtomicOrdering of unordered or higher.
size_type size() const
Determine the number of elements in the SetVector.
Definition: SetVector.h:77
This is the interface for a simple mod/ref and alias analysis over globals.
SubsumingPositionIterator(const IRPosition &IRP)
Definition: Attributor.cpp:368
#define STATS_TRACK(NAME, TYPE)
Definition: Attributor.cpp:76
---------------------— No-Free Attributes -------------------------—
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
AANonNullImpl(const IRPosition &IRP)
static Attribute getWithAlignment(LLVMContext &Context, uint64_t Align)
Return a uniquified Attribute object that has the specific alignment set.
Definition: Attributes.cpp:145
NonNull attribute for a call site return position.
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
A Module instance is used to store all the information related to an LLVM module. ...
Definition: Module.h:65
bool user_empty() const
Definition: Value.h:383
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
static Attribute getWithDereferenceableBytes(LLVMContext &Context, uint64_t Bytes)
Definition: Attributes.cpp:158
ChangeStatus
Simple enum class that forces the status to be spelled out explicitly.
Definition: Attributor.h:117
static CallInst * Create(FunctionType *Ty, Value *F, const Twine &NameStr="", Instruction *InsertBefore=nullptr)
NoRecurse attribute deduction for a call sites.
virtual void print(raw_ostream &OS) const
Helper functions, for debug purposes only.
A position that is not associated with a spot suitable for attributes.
Definition: Attributor.h:145
AANoAliasFloating(const IRPosition &IRP)
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Implements a dense probed hash-table based set.
Definition: DenseSet.h:249
static bool checkAndUpdate(Attributor &A, const AbstractAttribute &QueryingAA, Value &QueryingValue, Optional< Value *> &AccumulatedSimplifiedValue)
Helper function for querying AAValueSimplify and updating candicate.
If we do not capture the value in memory, through integers, or as a derived pointer we know it is not...
Definition: Attributor.h:1709
bool shouldExplore(const Use *U) override
See CaptureTracker::shouldExplore(...).
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
Dereferenceable attribute for an argument.
IntegerState DerefBytesState
State representing for dereferenceable bytes.
Definition: Attributor.h:1555
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1484
An abstract interface for all nocapture attributes.
Definition: Attributor.h:1691
This class represents a function call, abstracting a target machine&#39;s calling convention.
unsigned constexpr DefaultMaxUsesToExplore
The default value for MaxUsesToExplore argument.
Abstract Attribute Classes
Definition: Attributor.h:1318
AAAlignCallSiteArgument(const IRPosition &IRP)
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
Definition: Attributor.cpp:708
AANoRecurseFunction(const IRPosition &IRP)
The two locations do not alias at all.
Definition: AliasAnalysis.h:84
static cl::opt< bool > VerifyMaxFixpointIterations("attributor-max-iterations-verify", cl::Hidden, cl::desc("Verify that max-iterations is a tight bound for a fixpoint"), cl::init(false))
An efficient, type-erasing, non-owning reference to a callable.
Definition: STLExtras.h:104
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
const std::string getAsStr() const override
This function should return the "summarized" assumed state as string.
base_t getAssumed() const
Return the assumed state encoding.
Definition: Attributor.h:1007
AANoSyncImpl(const IRPosition &IRP)
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1786
--------------------— Heap-To-Stack Conversion ------------------------—
An attribute for a call site argument.
Definition: Attributor.h:152
LLVMContext & getContext() const
All values hold a context through their type.
Definition: Value.cpp:733
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
virtual const IRPosition & getIRPosition() const =0
Return an IR position, see struct IRPosition.
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
This class implements a map that also provides access to all stored values in a deterministic order...
Definition: MapVector.h:37
bool all_of(R &&range, UnaryPredicate P)
Provide wrappers to std::all_of which take ranges instead of having to pass begin/end explicitly...
Definition: STLExtras.h:1165
bool isAssumedNoRecurse() const
Return true if "norecurse" is assumed.
Definition: Attributor.h:1418
An abstract attribute for willreturn.
Definition: Attributor.h:1431
STATISTIC(NumFunctions, "Total number of functions")
APInt operator &(APInt a, const APInt &b)
Definition: APInt.h:1985
Value & getAssociatedValue()
}
Definition: Attributor.h:348
void assumeLive(Attributor &A, const BasicBlock &BB)
Assume BB is (partially) live now and indicate to the Attributor A that internal function called from...
Base class for all callable instructions (InvokeInst and CallInst) Holds everything related to callin...
Definition: InstrTypes.h:1100
F(f)
bool isValidState() const override
See AbstractState::isValidState() NOTE: For now we simply pretend that the worst possible state is in...
Definition: Attributor.h:986
AAReturnedValuesCallSite(const IRPosition &IRP)
unsigned getPointerAddressSpace() const
Get the address space of this pointer or pointer vector type.
Definition: DerivedTypes.h:580
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
FunTy * getCalledFunction() const
Return the function being called if this is a direct call, otherwise return null (if it&#39;s an indirect...
Definition: CallSite.h:111
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
const Instruction * getTerminator() const LLVM_READONLY
Returns the terminator instruction if the block is well formed or null if the block is not well forme...
Definition: BasicBlock.cpp:137
void addPotentialCopy(CallSite CS)
Register CS as potential copy of the value we are checking.
BasicBlock * SplitBlock(BasicBlock *Old, Instruction *SplitPt, DominatorTree *DT=nullptr, LoopInfo *LI=nullptr, MemorySSAUpdater *MSSAU=nullptr, const Twine &BBName="")
Split the specified block at the specified instruction - everything before SplitPt stays in Old and e...
void reserve(size_type N)
Definition: SmallVector.h:369
--------------------—NoUnwind Function Attribute-----------------------—
Definition: Attributor.cpp:655
AANoReturnCallSite(const IRPosition &IRP)
Kind
The positions we distinguish in the IR.
Definition: Attributor.h:143
Wrapper for FunctoinAnalysisManager.
Definition: Attributor.h:524
NoAlias attribute for function return value.
NoFree attribute deduction for a call sites.
NoAlias attribute for an argument.
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
NoUnwind attribute deduction for a call sites.
Definition: Attributor.cpp:696
NoCapture attribute for function return value.
bool checkForAllReturnedValuesAndReturnInsts(const function_ref< bool(Value &, const SmallSetVector< ReturnInst *, 4 > &)> &Pred) const override
See AbstractState::checkForAllReturnedValues(...).
Definition: Attributor.cpp:942
virtual ChangeStatus indicatePessimisticFixpoint()=0
Indicate that the abstract state should converge to the pessimistic state.
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
const std::string getAsStr() const override
See AbstractAttribute::getAsStr()
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
bool hasAttribute(unsigned Index, Attribute::AttrKind Kind) const
Return true if the attribute exists at the given index.
Instruction * getCtxI()
}
Definition: Attributor.h:328
AANonNullArgument(const IRPosition &IRP)
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
unsigned getPointerAlignment(const DataLayout &DL) const
Returns an alignment of the pointer value.
Definition: Value.cpp:666
const CallInst * isFreeCall(const Value *I, const TargetLibraryInfo *TLI)
isFreeCall - Returns non-null if the value is a call to the builtin free()
AANonNullFloating(const IRPosition &IRP)
const std::string getAsStr() const override
See AbstractAttribute::getAsStr().
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
virtual bool isDereferenceableOrNull(Value *O, const DataLayout &DL)
isDereferenceableOrNull - Overload to allow clients with additional knowledge about pointer dereferen...
AANoReturnFunction(const IRPosition &IRP)
AnalysisUsage & addRequired()
#define INITIALIZE_PASS_DEPENDENCY(depName)
Definition: PassSupport.h:50
const std::string getAsStr() const override
See AbstractAttribute::getAsStr()
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
---------------------— No-Recurse Attributes -------------------------—
Align attribute for function return value.
bool isAssumedDead(const AbstractAttribute &AA, const AAIsDead *LivenessAA)
Return true if AA (or its context instruction) is assumed dead.
bool isAssumedDead(const BasicBlock *BB) const override
See AAIsDead::isAssumedDead(BasicBlock *).
const DataLayout & getDataLayout() const
Get the data layout for the module&#39;s target platform.
Definition: Module.cpp:369
An AbstractAttribute for noreturn.
Definition: Attributor.h:1488
uint64_t getValueAsInt() const
Return the attribute&#39;s value as an integer.
Definition: Attributes.cpp:209
A visitor class for IR positions.
Definition: Attributor.h:513
bool isStringAttribute() const
Return true if the attribute is a string (target-dependent) attribute.
Definition: Attributes.cpp:194
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1687
A Use represents the edge between a Value definition and its users.
Definition: Use.h:55
void initialize(Attributor &A) override
Initialize the state with the information in the Attributor A.
AAHeapToStackImpl(const IRPosition &IRP)
This class consists of common code factored out of the SmallVector class to reduce code duplication b...
Definition: APFloat.h:41
WillReturn attribute deduction for a call sites.
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
An abstract attribute for norecurse.
Definition: Attributor.h:1412
static Attribute getWithDereferenceableOrNullBytes(LLVMContext &Context, uint64_t Bytes)
Definition: Attributes.cpp:164
#define STATS_DECLTRACK_ARG_ATTR(NAME)
Definition: Attributor.cpp:82
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
unsigned getArgumentNo(Value::const_user_iterator I) const
Given a value use iterator, returns the argument that corresponds to it.
Definition: CallSite.h:206
ChangeStatus updateImpl(Attributor &A) override
The actual update/transfer function which has to be implemented by the derived classes.
This file contains the simple types necessary to represent the attributes associated with functions a...
bool isAssumedNoUnwind() const
Returns true if nounwind is assumed.
Definition: Attributor.h:1362
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
const std::string getAsStr() const override
This function should return the "summarized" assumed state as string.
#define STATS_DECLTRACK_CSARG_ATTR(NAME)
Definition: Attributor.cpp:84
bool canSimplifyInvokeNoUnwind(const Function *F)
llvm::iterator_range< iterator > returned_values() override
Definition: Attributor.cpp:799
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
static cl::opt< unsigned > MaxFixpointIterations("attributor-max-iterations", cl::Hidden, cl::desc("Maximal number of fixpoint iterations."), cl::init(32))
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1427
AAReturnedValuesImpl(const IRPosition &IRP)
}
Definition: Attributor.cpp:753
SmallSetVector< const Instruction *, 8 > ToBeExploredPaths
Collection of to be explored paths.
An abstract interface for all noalias attributes.
Definition: Attributor.h:1450
AANoCaptureReturned(const IRPosition &IRP)
bool checkForAllReadWriteInstructions(const llvm::function_ref< bool(Instruction &)> &Pred, AbstractAttribute &QueryingAA)
Check Pred on all Read/Write instructions.
AAAlignFloating(const IRPosition &IRP)
AANoSyncCallSite(const IRPosition &IRP)
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
AtomicOrdering
Atomic ordering for LLVM&#39;s memory model.
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
AttributeList getAttributes(LLVMContext &C, ID id)
Return the attributes for an intrinsic.
An attribute for the function return value.
Definition: Attributor.h:147
InstrTy * getInstruction() const
Definition: CallSite.h:96
uint32_t getAssumedDereferenceableBytes() const
Return assumed dereferenceable bytes.
Definition: Attributor.h:1654
ChangeStatus indicatePessimisticFixpoint() override
See AbstractState::indicatePessimisticFixpoint(...)
Definition: Attributor.h:998
----------------—AAIsDead Function Attribute--------------------—
NoAlias attribute for a floating value.
attributor
int64_t getSExtValue() const
Get sign extended value.
Definition: APInt.h:1581
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
int getArgNo() const
}
Definition: Attributor.h:363
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:245
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
static cl::opt< bool > ManifestInternal("attributor-manifest-internal", cl::Hidden, cl::desc("Manifest Attributor internal string attributes."), cl::init(false))
bool insert(const value_type &X)
Insert a new element into the SetVector.
Definition: SetVector.h:141
AAValueSimplifyFloating(const IRPosition &IRP)
An abstract interface for liveness abstract attribute.
Definition: Attributor.h:1507
AANoFreeCallSite(const IRPosition &IRP)
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
bool checkForAllReturnedValuesAndReturnInsts(const function_ref< bool(Value &, const SmallSetVector< ReturnInst *, 4 > &)> &Pred, const AbstractAttribute &QueryingAA)
Check Pred on all values potentially returned by F.
AAAlignReturned(const IRPosition &IRP)
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
Definition: Attributor.cpp:756
bool isDereferenceableOrNull(Value *O, const DataLayout &DL) override
isDereferenceableOrNull - Overload to allow clients with additional knowledge about pointer dereferen...
const T & getValue() const LLVM_LVALUE_FUNCTION
Definition: Optional.h:255
AAArgumentFromCallSiteArguments(const IRPosition &IRP)
Definition: Attributor.cpp:616
const std::string getAsStr() const override
See AbstractAttribute::getAsStr().
bool isAssumedNoFree() const
Return true if "nofree" is assumed.
Definition: Attributor.h:1475
This class represents a no-op cast from one type to another.
void initializeAttributorLegacyPassPass(PassRegistry &)
ChangeStatus run(Module &M)
Run the analyses until a fixpoint is reached or enforced (timeout).
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory)...
Definition: APInt.h:32
unsigned getOpcode() const
Returns a member of one of the enums like Instruction::Add.
Definition: Instruction.h:125
#define CREATE_FUNCTION_ONLY_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
bool isKnownDead(const Instruction *I) const override
See AAIsDead::isKnownDead(Instruction *I).
bool paramHasAttr(unsigned ArgNo, Attribute::AttrKind Kind) const
Return true if the call or the callee has the given attribute.
Definition: CallSite.h:385
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
AAValueSimplifyReturned(const IRPosition &IRP)
AttributeList getAttributes() const
Return the attribute list for this Function.
Definition: Function.h:223
const Value * stripAndAccumulateInBoundsConstantOffsets(const DataLayout &DL, APInt &Offset) const
This is a wrapper around stripAndAccumulateConstantOffsets with the in-bounds requirement set to fals...
Definition: Value.h:591
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
bool hasPersonalityFn() const
Check whether this function has a personality function.
Definition: Function.h:732
AANoSyncFunction(const IRPosition &IRP)
void replaceAllUsesWith(Value *V)
Change all uses of this to point to a new Value.
Definition: Value.cpp:429
An abstract interface for all nonnull attributes.
Definition: Attributor.h:1393
const std::string getAsStr() const override
See AbstractAttribute::getAsStr().
constexpr char Attrs[]
Key for Kernel::Metadata::mAttrs.
AANoUnwindCallSite(const IRPosition &IRP)
Definition: Attributor.cpp:697
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
Function * getDeclaration(Module *M, ID id, ArrayRef< Type *> Tys=None)
Create or insert an LLVM Function declaration for an intrinsic, and return it.
Definition: Function.cpp:1057
ChangeStatus manifest(Attributor &A) override
See AbstractAttribute::manifest(...).
Definition: Attributor.h:1158
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
AANoAliasArgument(const IRPosition &IRP)
ChangeStatus update(Attributor &A)
Hook for the Attributor to trigger an update of the internal state.
Definition: Attributor.cpp:292
#define STATS_DECLTRACK_CS_ATTR(NAME)
Definition: Attributor.cpp:89
IntegerState & takeKnownMaximum(base_t Value)
Take maximum of known and Value.
Definition: Attributor.h:1049
unsigned getAttrIdx() const
Return the index in the attribute list for this position.
Definition: Attributor.h:366
static PreservedAnalyses none()
Convenience factory function for the empty preserved set.
Definition: PassManager.h:156
virtual bool isAtFixpoint() const =0
Return if this abstract state is fixed, thus does not need to be updated if information changes as it...
const AAType & getAAFor(const AbstractAttribute &QueryingAA, const IRPosition &IRP, bool TrackDependence=true)
Lookup an abstract attribute of type AAType at position IRP.
Definition: Attributor.h:681
AAAlignImpl(const IRPosition &IRP)
---------------------— NoSync Function Attribute ----------------------—
virtual bool isValidState() const =0
Return if this abstract state is in a valid state.
Optional< Value * > getAssumedUniqueReturnValue(Attributor &A) const
Return an assumed unique return value if a single candidate is found.
Definition: Attributor.cpp:911
bool isValidState() const override
See AbstractState::isValidState().
Definition: Attributor.cpp:833
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1465
AAValueSimplifyCallSiteReturned(const IRPosition &IRP)
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
#define BUILD_STAT_NAME(NAME, TYPE)
Definition: Attributor.cpp:72
bool isVoidTy() const
Return true if this is &#39;void&#39;.
Definition: Type.h:140
const BasicBlock & getEntryBlock() const
Definition: Function.h:664
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
BasicBlock * SplitBlockPredecessors(BasicBlock *BB, ArrayRef< BasicBlock *> Preds, const char *Suffix, DominatorTree *DT=nullptr, LoopInfo *LI=nullptr, MemorySSAUpdater *MSSAU=nullptr, bool PreserveLCSSA=false)
This method introduces at least one new basic block into the function and moves some of the predecess...
bool isCapturedIn(bool CapturedInMem, bool CapturedInInt, bool CapturedInRet)
Update the state according to CapturedInMem, CapturedInInt, and CapturedInRet, then return the approp...
initializer< Ty > init(const Ty &Val)
Definition: CommandLine.h:432
AbstractState StateType
Definition: Attributor.h:1225
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
Type * getReturnType() const
Returns the type of the ret val.
Definition: Function.h:168
base_t getKnown() const
Return the known state encoding.
Definition: Attributor.h:1004
AADereferenceableReturned(const IRPosition &IRP)
#define STATS_DECLTRACK_FN_ATTR(NAME)
Definition: Attributor.cpp:87
A set of analyses that are preserved following a run of a transformation pass.
Definition: PassManager.h:153
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
Definition: Attributor.cpp:722
AANoCaptureImpl(const IRPosition &IRP)
AANoCaptureFloating(const IRPosition &IRP)
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
bool checkForAllCallLikeInstructions(const function_ref< bool(Instruction &)> &Pred, const AbstractAttribute &QueryingAA)
Check Pred on all call-like instructions (=CallBased derived).
Definition: Attributor.h:800
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1737
AANoAliasCallSiteArgument(const IRPosition &IRP)
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
AAValueSimplifyCallSite(const IRPosition &IRP)
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
unsigned getNumArgOperands() const
Definition: CallSite.h:303
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
LLVM Basic Block Representation.
Definition: BasicBlock.h:57
The instances of the Type class are immutable: once they are created, they are never changed...
Definition: Type.h:45
const SmallSetVector< CallBase *, 4 > & getUnresolvedCalls() const override
Definition: Attributor.cpp:807
AADereferenceableCallSiteArgument(const IRPosition &IRP)
This is an important class for using LLVM in a threaded context.
Definition: LLVMContext.h:64
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
virtual ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(Attributor &A).
const std::string getAsStr() const override
See AbstractAttribute::getAsStr().
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1446
NonNull attribute for a floating value.
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
Definition: Attributor.cpp:692
An abstract interface for all dereferenceable attribute.
Definition: Attributor.h:1635
This is an important base class in LLVM.
Definition: Constant.h:41
LLVM_NODISCARD bool empty() const
Definition: SmallPtrSet.h:91
static void clampCallSiteArgumentStates(Attributor &A, const AAType &QueryingAA, StateType &S)
Clamp the information known at all call sites for a given argument (identified by QueryingAA) into S...
Definition: Attributor.cpp:574
bool isPointerTy() const
True if this is an instance of PointerType.
Definition: Type.h:223
const Instruction & front() const
Definition: BasicBlock.h:280
ValTy * getArgument(unsigned ArgNo) const
Definition: CallSite.h:193
std::pair< iterator, bool > insert(PtrType Ptr)
Inserts Ptr if and only if there is no element in the container equal to Ptr.
Definition: SmallPtrSet.h:370
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
static bool containsPossiblyEndlessLoop(Function *F)
bool captured(const Use *U) override
See CaptureTracker::captured(...).
void getDeducedAttributes(LLVMContext &Ctx, SmallVectorImpl< Attribute > &Attrs) const override
}
Value & getAnchorValue()
Return the value this abstract attribute is anchored with.
Definition: Attributor.h:243
Represent the analysis usage information of a pass.
static const IRPosition returned(const Function &F)
Create a position describing the returned value of F.
Definition: Attributor.h:175
An attribute for a function (scope).
Definition: Attributor.h:149
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
bool hasInternalLinkage() const
Definition: GlobalValue.h:443
Dereferenceable attribute for a call site argument.
const std::string getAsStr() const override
See AbstractAttribute::getAsStr().
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
bool isAssumedNoCaptureMaybeReturned() const
Return true if we assume that the underlying value is not captured in its respective scope but we all...
Definition: Attributor.h:1729
AANoUnwindFunction(const IRPosition &IRP)
Definition: Attributor.cpp:689
Attribute::AttrKind getKindAsEnum() const
Return the attribute&#39;s kind as an enum (Attribute::AttrKind).
Definition: Attributes.cpp:202
An attribute for a function argument.
Definition: Attributor.h:151
bool isFnInterfaceKind() const
}
Definition: Attributor.h:296
bool isEnumAttribute() const
Return true if the attribute is an Attribute::AttrKind type.
Definition: Attributes.cpp:186
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
The fixpoint analysis framework that orchestrates the attribute deduction.
Definition: Attributor.h:637
amdgpu Simplify well known AMD library false FunctionCallee Value * Arg
size_t arg_size() const
Definition: Function.h:728
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
AANoUnwindImpl(const IRPosition &IRP)
Definition: Attributor.cpp:656
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
Definition: Attributor.cpp:619
Argument * getArg(unsigned i) const
Definition: Function.h:713
---------------—— Function Return Values -------------------------——
Definition: Attributor.cpp:732
size_type count(ConstPtrType Ptr) const
count - Return 1 if the specified pointer is in the set, 0 otherwise.
Definition: SmallPtrSet.h:381
#define STATS_DECL(NAME, TYPE, MSG)
Definition: Attributor.cpp:74
Returned values information for a call sites.
PreservedAnalyses run(Module &M, ModuleAnalysisManager &AM)
Optional< Value * > SimplifiedAssociatedValue
IntegerState StateType
Provide static access to the type of the state.
Definition: Attributor.h:1121
const DataLayout & getDataLayout() const
Return the data layout associated with the anchor scope.
Definition: Attributor.h:818
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
const Function * getFunction() const
Return the function this instruction belongs to.
Definition: Instruction.cpp:59
void trackStatistics() const override
Hook to enable custom statistic tracking, called after manifest that resulted in a change if statisti...
LLVMContext & getContext() const
getContext - Return a reference to the LLVMContext associated with this function. ...
Definition: Function.cpp:205
Simple state with integers encoding.
Definition: Attributor.h:974
void getAttrs(ArrayRef< Attribute::AttrKind > AKs, SmallVectorImpl< Attribute > &Attrs) const
Return the attributes of any kind in AKs existing in the IR at a position that will affect this one...
Definition: Attributor.cpp:429
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
#define CREATE_FUNCTION_ABSTRACT_ATTRIBUTE_FOR_POSITION(CLASS)
bool hasAttr(ArrayRef< Attribute::AttrKind > AKs) const
TODO: Figure out if the attribute related helper functions should live here or somewhere else...
Definition: Attributor.cpp:421
ChangeStatus updateImpl(Attributor &A) override
See AbstractAttribute::updateImpl(...).
Definition: Attributor.cpp:562
static UndefValue * get(Type *T)
Static factory methods - Return an &#39;undef&#39; object of the specified type.
Definition: Constants.cpp:1446
const Value * stripPointerCasts() const
Strip off pointer casts, all-zero GEPs and address space casts.
Definition: Value.cpp:525
AAReturnedFromReturnedValues(const IRPosition &IRP)
Definition: Attributor.cpp:559
void tooManyUses() override
See CaptureTracker::tooManyUses().
---------------— Value Simplify Attribute -------------------------—
static PreservedAnalyses all()
Construct a special preserved set that preserves all passes.
Definition: PassManager.h:159
Attribute getAttribute(unsigned Index, Attribute::AttrKind Kind) const
Return the attribute object that exists at the given index.
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
AAAlignArgument(const IRPosition &IRP)
llvm::iterator_range< const_iterator > returned_values() const override
Definition: Attributor.cpp:803
size_t size() const
Definition: SmallVector.h:52
Base struct for all "concrete attribute" deductions.
Definition: Attributor.h:1224
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1548
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
void recordDependence(const AbstractAttribute &FromAA, const AbstractAttribute &ToAA)
Explicitly record a dependence from FromAA to ToAA, that is if FromAA changes ToAA should be updated ...
Definition: Attributor.h:693
static cl::opt< int > MaxHeapToStackSize("max-heap-to-stack-size", cl::init(128), cl::Hidden)
INITIALIZE_PASS_END(RegBankSelect, DEBUG_TYPE, "Assign register bank of generic virtual registers", false, false) RegBankSelect
#define llvm_unreachable(msg)
Marks that the current location is not supposed to be reachable.
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1371
Liveness information for a call sites.
ChangeStatus indicateOptimisticFixpoint() override
See AbstractState::indicateOptimisticFixpoint(...).
Definition: Attributor.cpp:836
AACaptureUseTracker(Attributor &A, AANoCapture &NoCaptureAA, const AAIsDead &IsDeadAA, IntegerState &State, SmallVectorImpl< const Value *> &PotentialCopies, unsigned &RemainingUsesToExplore)
Create a capture tracker that can lookup in-flight abstract attributes through the Attributor A...
NoAlias attribute deduction for a call site return value.
const AbstractState & getState() const override
See AbstractAttribute::getState(...).
Definition: Attributor.cpp:794
Align attribute deduction for a call site return value.
const StateType & getState() const override
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
void initialize(Attributor &A) override
See AbstractAttribute::initialize(...).
bool doesNotThrow() const
Determine if the function cannot unwind.
Definition: Function.h:533
void trackStatistics() const override
See AbstractAttribute::trackStatistics()
void exploreFromEntry(Attributor &A, const Function *F)
size_type size() const
Definition: SmallPtrSet.h:92
bool isLandingPad() const
Return true if this basic block is a landing pad.
Definition: BasicBlock.cpp:463
iterator_range< T > make_range(T x, T y)
Convenience function for iterating over sub-ranges.
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition: MapVector.h:117
virtual StateType & getState()=0
Return the internal abstract state for inspection.
static const char ID
Unique ID (due to the unique address)
Definition: Attributor.h:1353
An AbstractAt