LLVM  6.0.0svn
ValueEnumerator.cpp
Go to the documentation of this file.
1 //===-- ValueEnumerator.cpp - Number values and types for bitcode writer --===//
2 //
3 // The LLVM Compiler Infrastructure
4 //
5 // This file is distributed under the University of Illinois Open Source
6 // License. See LICENSE.TXT for details.
7 //
8 //===----------------------------------------------------------------------===//
9 //
10 // This file implements the ValueEnumerator class.
11 //
12 //===----------------------------------------------------------------------===//
13 
14 #include "ValueEnumerator.h"
15 #include "llvm/ADT/STLExtras.h"
16 #include "llvm/ADT/SmallPtrSet.h"
17 #include "llvm/IR/Constants.h"
19 #include "llvm/IR/DerivedTypes.h"
20 #include "llvm/IR/Instructions.h"
21 #include "llvm/IR/Module.h"
22 #include "llvm/IR/UseListOrder.h"
24 #include "llvm/Support/Debug.h"
26 #include <algorithm>
27 using namespace llvm;
28 
29 namespace {
30 struct OrderMap {
32  unsigned LastGlobalConstantID;
33  unsigned LastGlobalValueID;
34 
35  OrderMap() : LastGlobalConstantID(0), LastGlobalValueID(0) {}
36 
37  bool isGlobalConstant(unsigned ID) const {
38  return ID <= LastGlobalConstantID;
39  }
40  bool isGlobalValue(unsigned ID) const {
41  return ID <= LastGlobalValueID && !isGlobalConstant(ID);
42  }
43 
44  unsigned size() const { return IDs.size(); }
45  std::pair<unsigned, bool> &operator[](const Value *V) { return IDs[V]; }
46  std::pair<unsigned, bool> lookup(const Value *V) const {
47  return IDs.lookup(V);
48  }
49  void index(const Value *V) {
50  // Explicitly sequence get-size and insert-value operations to avoid UB.
51  unsigned ID = IDs.size() + 1;
52  IDs[V].first = ID;
53  }
54 };
55 }
56 
57 static void orderValue(const Value *V, OrderMap &OM) {
58  if (OM.lookup(V).first)
59  return;
60 
61  if (const Constant *C = dyn_cast<Constant>(V))
62  if (C->getNumOperands() && !isa<GlobalValue>(C))
63  for (const Value *Op : C->operands())
64  if (!isa<BasicBlock>(Op) && !isa<GlobalValue>(Op))
65  orderValue(Op, OM);
66 
67  // Note: we cannot cache this lookup above, since inserting into the map
68  // changes the map's size, and thus affects the other IDs.
69  OM.index(V);
70 }
71 
72 static OrderMap orderModule(const Module &M) {
73  // This needs to match the order used by ValueEnumerator::ValueEnumerator()
74  // and ValueEnumerator::incorporateFunction().
75  OrderMap OM;
76 
77  // In the reader, initializers of GlobalValues are set *after* all the
78  // globals have been read. Rather than awkwardly modeling this behaviour
79  // directly in predictValueUseListOrderImpl(), just assign IDs to
80  // initializers of GlobalValues before GlobalValues themselves to model this
81  // implicitly.
82  for (const GlobalVariable &G : M.globals())
83  if (G.hasInitializer())
84  if (!isa<GlobalValue>(G.getInitializer()))
85  orderValue(G.getInitializer(), OM);
86  for (const GlobalAlias &A : M.aliases())
87  if (!isa<GlobalValue>(A.getAliasee()))
88  orderValue(A.getAliasee(), OM);
89  for (const GlobalIFunc &I : M.ifuncs())
90  if (!isa<GlobalValue>(I.getResolver()))
91  orderValue(I.getResolver(), OM);
92  for (const Function &F : M) {
93  for (const Use &U : F.operands())
94  if (!isa<GlobalValue>(U.get()))
95  orderValue(U.get(), OM);
96  }
97  OM.LastGlobalConstantID = OM.size();
98 
99  // Initializers of GlobalValues are processed in
100  // BitcodeReader::ResolveGlobalAndAliasInits(). Match the order there rather
101  // than ValueEnumerator, and match the code in predictValueUseListOrderImpl()
102  // by giving IDs in reverse order.
103  //
104  // Since GlobalValues never reference each other directly (just through
105  // initializers), their relative IDs only matter for determining order of
106  // uses in their initializers.
107  for (const Function &F : M)
108  orderValue(&F, OM);
109  for (const GlobalAlias &A : M.aliases())
110  orderValue(&A, OM);
111  for (const GlobalIFunc &I : M.ifuncs())
112  orderValue(&I, OM);
113  for (const GlobalVariable &G : M.globals())
114  orderValue(&G, OM);
115  OM.LastGlobalValueID = OM.size();
116 
117  for (const Function &F : M) {
118  if (F.isDeclaration())
119  continue;
120  // Here we need to match the union of ValueEnumerator::incorporateFunction()
121  // and WriteFunction(). Basic blocks are implicitly declared before
122  // anything else (by declaring their size).
123  for (const BasicBlock &BB : F)
124  orderValue(&BB, OM);
125  for (const Argument &A : F.args())
126  orderValue(&A, OM);
127  for (const BasicBlock &BB : F)
128  for (const Instruction &I : BB)
129  for (const Value *Op : I.operands())
130  if ((isa<Constant>(*Op) && !isa<GlobalValue>(*Op)) ||
131  isa<InlineAsm>(*Op))
132  orderValue(Op, OM);
133  for (const BasicBlock &BB : F)
134  for (const Instruction &I : BB)
135  orderValue(&I, OM);
136  }
137  return OM;
138 }
139 
140 static void predictValueUseListOrderImpl(const Value *V, const Function *F,
141  unsigned ID, const OrderMap &OM,
142  UseListOrderStack &Stack) {
143  // Predict use-list order for this one.
144  typedef std::pair<const Use *, unsigned> Entry;
146  for (const Use &U : V->uses())
147  // Check if this user will be serialized.
148  if (OM.lookup(U.getUser()).first)
149  List.push_back(std::make_pair(&U, List.size()));
150 
151  if (List.size() < 2)
152  // We may have lost some users.
153  return;
154 
155  bool IsGlobalValue = OM.isGlobalValue(ID);
156  std::sort(List.begin(), List.end(), [&](const Entry &L, const Entry &R) {
157  const Use *LU = L.first;
158  const Use *RU = R.first;
159  if (LU == RU)
160  return false;
161 
162  auto LID = OM.lookup(LU->getUser()).first;
163  auto RID = OM.lookup(RU->getUser()).first;
164 
165  // Global values are processed in reverse order.
166  //
167  // Moreover, initializers of GlobalValues are set *after* all the globals
168  // have been read (despite having earlier IDs). Rather than awkwardly
169  // modeling this behaviour here, orderModule() has assigned IDs to
170  // initializers of GlobalValues before GlobalValues themselves.
171  if (OM.isGlobalValue(LID) && OM.isGlobalValue(RID))
172  return LID < RID;
173 
174  // If ID is 4, then expect: 7 6 5 1 2 3.
175  if (LID < RID) {
176  if (RID <= ID)
177  if (!IsGlobalValue) // GlobalValue uses don't get reversed.
178  return true;
179  return false;
180  }
181  if (RID < LID) {
182  if (LID <= ID)
183  if (!IsGlobalValue) // GlobalValue uses don't get reversed.
184  return false;
185  return true;
186  }
187 
188  // LID and RID are equal, so we have different operands of the same user.
189  // Assume operands are added in order for all instructions.
190  if (LID <= ID)
191  if (!IsGlobalValue) // GlobalValue uses don't get reversed.
192  return LU->getOperandNo() < RU->getOperandNo();
193  return LU->getOperandNo() > RU->getOperandNo();
194  });
195 
196  if (std::is_sorted(
197  List.begin(), List.end(),
198  [](const Entry &L, const Entry &R) { return L.second < R.second; }))
199  // Order is already correct.
200  return;
201 
202  // Store the shuffle.
203  Stack.emplace_back(V, F, List.size());
204  assert(List.size() == Stack.back().Shuffle.size() && "Wrong size");
205  for (size_t I = 0, E = List.size(); I != E; ++I)
206  Stack.back().Shuffle[I] = List[I].second;
207 }
208 
209 static void predictValueUseListOrder(const Value *V, const Function *F,
210  OrderMap &OM, UseListOrderStack &Stack) {
211  auto &IDPair = OM[V];
212  assert(IDPair.first && "Unmapped value");
213  if (IDPair.second)
214  // Already predicted.
215  return;
216 
217  // Do the actual prediction.
218  IDPair.second = true;
219  if (!V->use_empty() && std::next(V->use_begin()) != V->use_end())
220  predictValueUseListOrderImpl(V, F, IDPair.first, OM, Stack);
221 
222  // Recursive descent into constants.
223  if (const Constant *C = dyn_cast<Constant>(V))
224  if (C->getNumOperands()) // Visit GlobalValues.
225  for (const Value *Op : C->operands())
226  if (isa<Constant>(Op)) // Visit GlobalValues.
227  predictValueUseListOrder(Op, F, OM, Stack);
228 }
229 
231  OrderMap OM = orderModule(M);
232 
233  // Use-list orders need to be serialized after all the users have been added
234  // to a value, or else the shuffles will be incomplete. Store them per
235  // function in a stack.
236  //
237  // Aside from function order, the order of values doesn't matter much here.
238  UseListOrderStack Stack;
239 
240  // We want to visit the functions backward now so we can list function-local
241  // constants in the last Function they're used in. Module-level constants
242  // have already been visited above.
243  for (auto I = M.rbegin(), E = M.rend(); I != E; ++I) {
244  const Function &F = *I;
245  if (F.isDeclaration())
246  continue;
247  for (const BasicBlock &BB : F)
248  predictValueUseListOrder(&BB, &F, OM, Stack);
249  for (const Argument &A : F.args())
250  predictValueUseListOrder(&A, &F, OM, Stack);
251  for (const BasicBlock &BB : F)
252  for (const Instruction &I : BB)
253  for (const Value *Op : I.operands())
254  if (isa<Constant>(*Op) || isa<InlineAsm>(*Op)) // Visit GlobalValues.
255  predictValueUseListOrder(Op, &F, OM, Stack);
256  for (const BasicBlock &BB : F)
257  for (const Instruction &I : BB)
258  predictValueUseListOrder(&I, &F, OM, Stack);
259  }
260 
261  // Visit globals last, since the module-level use-list block will be seen
262  // before the function bodies are processed.
263  for (const GlobalVariable &G : M.globals())
264  predictValueUseListOrder(&G, nullptr, OM, Stack);
265  for (const Function &F : M)
266  predictValueUseListOrder(&F, nullptr, OM, Stack);
267  for (const GlobalAlias &A : M.aliases())
268  predictValueUseListOrder(&A, nullptr, OM, Stack);
269  for (const GlobalIFunc &I : M.ifuncs())
270  predictValueUseListOrder(&I, nullptr, OM, Stack);
271  for (const GlobalVariable &G : M.globals())
272  if (G.hasInitializer())
273  predictValueUseListOrder(G.getInitializer(), nullptr, OM, Stack);
274  for (const GlobalAlias &A : M.aliases())
275  predictValueUseListOrder(A.getAliasee(), nullptr, OM, Stack);
276  for (const GlobalIFunc &I : M.ifuncs())
277  predictValueUseListOrder(I.getResolver(), nullptr, OM, Stack);
278  for (const Function &F : M) {
279  for (const Use &U : F.operands())
280  predictValueUseListOrder(U.get(), nullptr, OM, Stack);
281  }
282 
283  return Stack;
284 }
285 
286 static bool isIntOrIntVectorValue(const std::pair<const Value*, unsigned> &V) {
287  return V.first->getType()->isIntOrIntVectorTy();
288 }
289 
290 ValueEnumerator::ValueEnumerator(const Module &M,
291  bool ShouldPreserveUseListOrder)
292  : ShouldPreserveUseListOrder(ShouldPreserveUseListOrder) {
293  if (ShouldPreserveUseListOrder)
295 
296  // Enumerate the global variables.
297  for (const GlobalVariable &GV : M.globals())
298  EnumerateValue(&GV);
299 
300  // Enumerate the functions.
301  for (const Function & F : M) {
302  EnumerateValue(&F);
303  EnumerateAttributes(F.getAttributes());
304  }
305 
306  // Enumerate the aliases.
307  for (const GlobalAlias &GA : M.aliases())
308  EnumerateValue(&GA);
309 
310  // Enumerate the ifuncs.
311  for (const GlobalIFunc &GIF : M.ifuncs())
312  EnumerateValue(&GIF);
313 
314  // Remember what is the cutoff between globalvalue's and other constants.
315  unsigned FirstConstant = Values.size();
316 
317  // Enumerate the global variable initializers and attributes.
318  for (const GlobalVariable &GV : M.globals()) {
319  if (GV.hasInitializer())
320  EnumerateValue(GV.getInitializer());
321  if (GV.hasAttributes())
322  EnumerateAttributes(GV.getAttributesAsList(AttributeList::FunctionIndex));
323  }
324 
325  // Enumerate the aliasees.
326  for (const GlobalAlias &GA : M.aliases())
327  EnumerateValue(GA.getAliasee());
328 
329  // Enumerate the ifunc resolvers.
330  for (const GlobalIFunc &GIF : M.ifuncs())
331  EnumerateValue(GIF.getResolver());
332 
333  // Enumerate any optional Function data.
334  for (const Function &F : M)
335  for (const Use &U : F.operands())
336  EnumerateValue(U.get());
337 
338  // Enumerate the metadata type.
339  //
340  // TODO: Move this to ValueEnumerator::EnumerateOperandType() once bitcode
341  // only encodes the metadata type when it's used as a value.
342  EnumerateType(Type::getMetadataTy(M.getContext()));
343 
344  // Insert constants and metadata that are named at module level into the slot
345  // pool so that the module symbol table can refer to them...
346  EnumerateValueSymbolTable(M.getValueSymbolTable());
347  EnumerateNamedMetadata(M);
348 
350  for (const GlobalVariable &GV : M.globals()) {
351  MDs.clear();
352  GV.getAllMetadata(MDs);
353  for (const auto &I : MDs)
354  // FIXME: Pass GV to EnumerateMetadata and arrange for the bitcode writer
355  // to write metadata to the global variable's own metadata block
356  // (PR28134).
357  EnumerateMetadata(nullptr, I.second);
358  }
359 
360  // Enumerate types used by function bodies and argument lists.
361  for (const Function &F : M) {
362  for (const Argument &A : F.args())
363  EnumerateType(A.getType());
364 
365  // Enumerate metadata attached to this function.
366  MDs.clear();
367  F.getAllMetadata(MDs);
368  for (const auto &I : MDs)
369  EnumerateMetadata(F.isDeclaration() ? nullptr : &F, I.second);
370 
371  for (const BasicBlock &BB : F)
372  for (const Instruction &I : BB) {
373  for (const Use &Op : I.operands()) {
374  auto *MD = dyn_cast<MetadataAsValue>(&Op);
375  if (!MD) {
376  EnumerateOperandType(Op);
377  continue;
378  }
379 
380  // Local metadata is enumerated during function-incorporation.
381  if (isa<LocalAsMetadata>(MD->getMetadata()))
382  continue;
383 
384  EnumerateMetadata(&F, MD->getMetadata());
385  }
386  EnumerateType(I.getType());
387  if (const CallInst *CI = dyn_cast<CallInst>(&I))
388  EnumerateAttributes(CI->getAttributes());
389  else if (const InvokeInst *II = dyn_cast<InvokeInst>(&I))
390  EnumerateAttributes(II->getAttributes());
391 
392  // Enumerate metadata attached with this instruction.
393  MDs.clear();
394  I.getAllMetadataOtherThanDebugLoc(MDs);
395  for (unsigned i = 0, e = MDs.size(); i != e; ++i)
396  EnumerateMetadata(&F, MDs[i].second);
397 
398  // Don't enumerate the location directly -- it has a special record
399  // type -- but enumerate its operands.
400  if (DILocation *L = I.getDebugLoc())
401  for (const Metadata *Op : L->operands())
402  EnumerateMetadata(&F, Op);
403  }
404  }
405 
406  // Optimize constant ordering.
407  OptimizeConstants(FirstConstant, Values.size());
408 
409  // Organize metadata ordering.
410  organizeMetadata();
411 }
412 
413 unsigned ValueEnumerator::getInstructionID(const Instruction *Inst) const {
414  InstructionMapType::const_iterator I = InstructionMap.find(Inst);
415  assert(I != InstructionMap.end() && "Instruction is not mapped!");
416  return I->second;
417 }
418 
419 unsigned ValueEnumerator::getComdatID(const Comdat *C) const {
420  unsigned ComdatID = Comdats.idFor(C);
421  assert(ComdatID && "Comdat not found!");
422  return ComdatID;
423 }
424 
426  InstructionMap[I] = InstructionCount++;
427 }
428 
429 unsigned ValueEnumerator::getValueID(const Value *V) const {
430  if (auto *MD = dyn_cast<MetadataAsValue>(V))
431  return getMetadataID(MD->getMetadata());
432 
434  assert(I != ValueMap.end() && "Value not in slotcalculator!");
435  return I->second-1;
436 }
437 
438 #if !defined(NDEBUG) || defined(LLVM_ENABLE_DUMP)
440  print(dbgs(), ValueMap, "Default");
441  dbgs() << '\n';
442  print(dbgs(), MetadataMap, "MetaData");
443  dbgs() << '\n';
444 }
445 #endif
446 
448  const char *Name) const {
449 
450  OS << "Map Name: " << Name << "\n";
451  OS << "Size: " << Map.size() << "\n";
452  for (ValueMapType::const_iterator I = Map.begin(),
453  E = Map.end(); I != E; ++I) {
454 
455  const Value *V = I->first;
456  if (V->hasName())
457  OS << "Value: " << V->getName();
458  else
459  OS << "Value: [null]\n";
460  V->print(errs());
461  errs() << '\n';
462 
463  OS << " Uses(" << std::distance(V->use_begin(),V->use_end()) << "):";
464  for (const Use &U : V->uses()) {
465  if (&U != &*V->use_begin())
466  OS << ",";
467  if(U->hasName())
468  OS << " " << U->getName();
469  else
470  OS << " [null]";
471 
472  }
473  OS << "\n\n";
474  }
475 }
476 
478  const char *Name) const {
479 
480  OS << "Map Name: " << Name << "\n";
481  OS << "Size: " << Map.size() << "\n";
482  for (auto I = Map.begin(), E = Map.end(); I != E; ++I) {
483  const Metadata *MD = I->first;
484  OS << "Metadata: slot = " << I->second.ID << "\n";
485  OS << "Metadata: function = " << I->second.F << "\n";
486  MD->print(OS);
487  OS << "\n";
488  }
489 }
490 
491 /// OptimizeConstants - Reorder constant pool for denser encoding.
492 void ValueEnumerator::OptimizeConstants(unsigned CstStart, unsigned CstEnd) {
493  if (CstStart == CstEnd || CstStart+1 == CstEnd) return;
494 
495  if (ShouldPreserveUseListOrder)
496  // Optimizing constants makes the use-list order difficult to predict.
497  // Disable it for now when trying to preserve the order.
498  return;
499 
500  std::stable_sort(Values.begin() + CstStart, Values.begin() + CstEnd,
501  [this](const std::pair<const Value *, unsigned> &LHS,
502  const std::pair<const Value *, unsigned> &RHS) {
503  // Sort by plane.
504  if (LHS.first->getType() != RHS.first->getType())
505  return getTypeID(LHS.first->getType()) < getTypeID(RHS.first->getType());
506  // Then by frequency.
507  return LHS.second > RHS.second;
508  });
509 
510  // Ensure that integer and vector of integer constants are at the start of the
511  // constant pool. This is important so that GEP structure indices come before
512  // gep constant exprs.
513  std::stable_partition(Values.begin() + CstStart, Values.begin() + CstEnd,
515 
516  // Rebuild the modified portion of ValueMap.
517  for (; CstStart != CstEnd; ++CstStart)
518  ValueMap[Values[CstStart].first] = CstStart+1;
519 }
520 
521 
522 /// EnumerateValueSymbolTable - Insert all of the values in the specified symbol
523 /// table into the values table.
524 void ValueEnumerator::EnumerateValueSymbolTable(const ValueSymbolTable &VST) {
525  for (ValueSymbolTable::const_iterator VI = VST.begin(), VE = VST.end();
526  VI != VE; ++VI)
527  EnumerateValue(VI->getValue());
528 }
529 
530 /// Insert all of the values referenced by named metadata in the specified
531 /// module.
532 void ValueEnumerator::EnumerateNamedMetadata(const Module &M) {
533  for (const auto &I : M.named_metadata())
534  EnumerateNamedMDNode(&I);
535 }
536 
537 void ValueEnumerator::EnumerateNamedMDNode(const NamedMDNode *MD) {
538  for (unsigned i = 0, e = MD->getNumOperands(); i != e; ++i)
539  EnumerateMetadata(nullptr, MD->getOperand(i));
540 }
541 
542 unsigned ValueEnumerator::getMetadataFunctionID(const Function *F) const {
543  return F ? getValueID(F) + 1 : 0;
544 }
545 
546 void ValueEnumerator::EnumerateMetadata(const Function *F, const Metadata *MD) {
547  EnumerateMetadata(getMetadataFunctionID(F), MD);
548 }
549 
550 void ValueEnumerator::EnumerateFunctionLocalMetadata(
551  const Function &F, const LocalAsMetadata *Local) {
552  EnumerateFunctionLocalMetadata(getMetadataFunctionID(&F), Local);
553 }
554 
555 void ValueEnumerator::dropFunctionFromMetadata(
556  MetadataMapType::value_type &FirstMD) {
558  auto push = [&Worklist](MetadataMapType::value_type &MD) {
559  auto &Entry = MD.second;
560 
561  // Nothing to do if this metadata isn't tagged.
562  if (!Entry.F)
563  return;
564 
565  // Drop the function tag.
566  Entry.F = 0;
567 
568  // If this is has an ID and is an MDNode, then its operands have entries as
569  // well. We need to drop the function from them too.
570  if (Entry.ID)
571  if (auto *N = dyn_cast<MDNode>(MD.first))
572  Worklist.push_back(N);
573  };
574  push(FirstMD);
575  while (!Worklist.empty())
576  for (const Metadata *Op : Worklist.pop_back_val()->operands()) {
577  if (!Op)
578  continue;
579  auto MD = MetadataMap.find(Op);
580  if (MD != MetadataMap.end())
581  push(*MD);
582  }
583 }
584 
585 void ValueEnumerator::EnumerateMetadata(unsigned F, const Metadata *MD) {
586  // It's vital for reader efficiency that uniqued subgraphs are done in
587  // post-order; it's expensive when their operands have forward references.
588  // If a distinct node is referenced from a uniqued node, it'll be delayed
589  // until the uniqued subgraph has been completely traversed.
590  SmallVector<const MDNode *, 32> DelayedDistinctNodes;
591 
592  // Start by enumerating MD, and then work through its transitive operands in
593  // post-order. This requires a depth-first search.
595  if (const MDNode *N = enumerateMetadataImpl(F, MD))
596  Worklist.push_back(std::make_pair(N, N->op_begin()));
597 
598  while (!Worklist.empty()) {
599  const MDNode *N = Worklist.back().first;
600 
601  // Enumerate operands until we hit a new node. We need to traverse these
602  // nodes' operands before visiting the rest of N's operands.
604  Worklist.back().second, N->op_end(),
605  [&](const Metadata *MD) { return enumerateMetadataImpl(F, MD); });
606  if (I != N->op_end()) {
607  auto *Op = cast<MDNode>(*I);
608  Worklist.back().second = ++I;
609 
610  // Delay traversing Op if it's a distinct node and N is uniqued.
611  if (Op->isDistinct() && !N->isDistinct())
612  DelayedDistinctNodes.push_back(Op);
613  else
614  Worklist.push_back(std::make_pair(Op, Op->op_begin()));
615  continue;
616  }
617 
618  // All the operands have been visited. Now assign an ID.
619  Worklist.pop_back();
620  MDs.push_back(N);
621  MetadataMap[N].ID = MDs.size();
622 
623  // Flush out any delayed distinct nodes; these are all the distinct nodes
624  // that are leaves in last uniqued subgraph.
625  if (Worklist.empty() || Worklist.back().first->isDistinct()) {
626  for (const MDNode *N : DelayedDistinctNodes)
627  Worklist.push_back(std::make_pair(N, N->op_begin()));
628  DelayedDistinctNodes.clear();
629  }
630  }
631 }
632 
633 const MDNode *ValueEnumerator::enumerateMetadataImpl(unsigned F, const Metadata *MD) {
634  if (!MD)
635  return nullptr;
636 
637  assert(
638  (isa<MDNode>(MD) || isa<MDString>(MD) || isa<ConstantAsMetadata>(MD)) &&
639  "Invalid metadata kind");
640 
641  auto Insertion = MetadataMap.insert(std::make_pair(MD, MDIndex(F)));
642  MDIndex &Entry = Insertion.first->second;
643  if (!Insertion.second) {
644  // Already mapped. If F doesn't match the function tag, drop it.
645  if (Entry.hasDifferentFunction(F))
646  dropFunctionFromMetadata(*Insertion.first);
647  return nullptr;
648  }
649 
650  // Don't assign IDs to metadata nodes.
651  if (auto *N = dyn_cast<MDNode>(MD))
652  return N;
653 
654  // Save the metadata.
655  MDs.push_back(MD);
656  Entry.ID = MDs.size();
657 
658  // Enumerate the constant, if any.
659  if (auto *C = dyn_cast<ConstantAsMetadata>(MD))
660  EnumerateValue(C->getValue());
661 
662  return nullptr;
663 }
664 
665 /// EnumerateFunctionLocalMetadataa - Incorporate function-local metadata
666 /// information reachable from the metadata.
667 void ValueEnumerator::EnumerateFunctionLocalMetadata(
668  unsigned F, const LocalAsMetadata *Local) {
669  assert(F && "Expected a function");
670 
671  // Check to see if it's already in!
672  MDIndex &Index = MetadataMap[Local];
673  if (Index.ID) {
674  assert(Index.F == F && "Expected the same function");
675  return;
676  }
677 
678  MDs.push_back(Local);
679  Index.F = F;
680  Index.ID = MDs.size();
681 
682  EnumerateValue(Local->getValue());
683 }
684 
685 static unsigned getMetadataTypeOrder(const Metadata *MD) {
686  // Strings are emitted in bulk and must come first.
687  if (isa<MDString>(MD))
688  return 0;
689 
690  // ConstantAsMetadata doesn't reference anything. We may as well shuffle it
691  // to the front since we can detect it.
692  auto *N = dyn_cast<MDNode>(MD);
693  if (!N)
694  return 1;
695 
696  // The reader is fast forward references for distinct node operands, but slow
697  // when uniqued operands are unresolved.
698  return N->isDistinct() ? 2 : 3;
699 }
700 
701 void ValueEnumerator::organizeMetadata() {
702  assert(MetadataMap.size() == MDs.size() &&
703  "Metadata map and vector out of sync");
704 
705  if (MDs.empty())
706  return;
707 
708  // Copy out the index information from MetadataMap in order to choose a new
709  // order.
711  Order.reserve(MetadataMap.size());
712  for (const Metadata *MD : MDs)
713  Order.push_back(MetadataMap.lookup(MD));
714 
715  // Partition:
716  // - by function, then
717  // - by isa<MDString>
718  // and then sort by the original/current ID. Since the IDs are guaranteed to
719  // be unique, the result of std::sort will be deterministic. There's no need
720  // for std::stable_sort.
721  std::sort(Order.begin(), Order.end(), [this](MDIndex LHS, MDIndex RHS) {
722  return std::make_tuple(LHS.F, getMetadataTypeOrder(LHS.get(MDs)), LHS.ID) <
723  std::make_tuple(RHS.F, getMetadataTypeOrder(RHS.get(MDs)), RHS.ID);
724  });
725 
726  // Rebuild MDs, index the metadata ranges for each function in FunctionMDs,
727  // and fix up MetadataMap.
728  std::vector<const Metadata *> OldMDs = std::move(MDs);
729  MDs.reserve(OldMDs.size());
730  for (unsigned I = 0, E = Order.size(); I != E && !Order[I].F; ++I) {
731  auto *MD = Order[I].get(OldMDs);
732  MDs.push_back(MD);
733  MetadataMap[MD].ID = I + 1;
734  if (isa<MDString>(MD))
735  ++NumMDStrings;
736  }
737 
738  // Return early if there's nothing for the functions.
739  if (MDs.size() == Order.size())
740  return;
741 
742  // Build the function metadata ranges.
743  MDRange R;
744  FunctionMDs.reserve(OldMDs.size());
745  unsigned PrevF = 0;
746  for (unsigned I = MDs.size(), E = Order.size(), ID = MDs.size(); I != E;
747  ++I) {
748  unsigned F = Order[I].F;
749  if (!PrevF) {
750  PrevF = F;
751  } else if (PrevF != F) {
752  R.Last = FunctionMDs.size();
753  std::swap(R, FunctionMDInfo[PrevF]);
754  R.First = FunctionMDs.size();
755 
756  ID = MDs.size();
757  PrevF = F;
758  }
759 
760  auto *MD = Order[I].get(OldMDs);
761  FunctionMDs.push_back(MD);
762  MetadataMap[MD].ID = ++ID;
763  if (isa<MDString>(MD))
764  ++R.NumStrings;
765  }
766  R.Last = FunctionMDs.size();
767  FunctionMDInfo[PrevF] = R;
768 }
769 
770 void ValueEnumerator::incorporateFunctionMetadata(const Function &F) {
771  NumModuleMDs = MDs.size();
772 
773  auto R = FunctionMDInfo.lookup(getValueID(&F) + 1);
774  NumMDStrings = R.NumStrings;
775  MDs.insert(MDs.end(), FunctionMDs.begin() + R.First,
776  FunctionMDs.begin() + R.Last);
777 }
778 
779 void ValueEnumerator::EnumerateValue(const Value *V) {
780  assert(!V->getType()->isVoidTy() && "Can't insert void values!");
781  assert(!isa<MetadataAsValue>(V) && "EnumerateValue doesn't handle Metadata!");
782 
783  // Check to see if it's already in!
784  unsigned &ValueID = ValueMap[V];
785  if (ValueID) {
786  // Increment use count.
787  Values[ValueID-1].second++;
788  return;
789  }
790 
791  if (auto *GO = dyn_cast<GlobalObject>(V))
792  if (const Comdat *C = GO->getComdat())
793  Comdats.insert(C);
794 
795  // Enumerate the type of this value.
796  EnumerateType(V->getType());
797 
798  if (const Constant *C = dyn_cast<Constant>(V)) {
799  if (isa<GlobalValue>(C)) {
800  // Initializers for globals are handled explicitly elsewhere.
801  } else if (C->getNumOperands()) {
802  // If a constant has operands, enumerate them. This makes sure that if a
803  // constant has uses (for example an array of const ints), that they are
804  // inserted also.
805 
806  // We prefer to enumerate them with values before we enumerate the user
807  // itself. This makes it more likely that we can avoid forward references
808  // in the reader. We know that there can be no cycles in the constants
809  // graph that don't go through a global variable.
810  for (User::const_op_iterator I = C->op_begin(), E = C->op_end();
811  I != E; ++I)
812  if (!isa<BasicBlock>(*I)) // Don't enumerate BB operand to BlockAddress.
813  EnumerateValue(*I);
814 
815  // Finally, add the value. Doing this could make the ValueID reference be
816  // dangling, don't reuse it.
817  Values.push_back(std::make_pair(V, 1U));
818  ValueMap[V] = Values.size();
819  return;
820  }
821  }
822 
823  // Add the value.
824  Values.push_back(std::make_pair(V, 1U));
825  ValueID = Values.size();
826 }
827 
828 
829 void ValueEnumerator::EnumerateType(Type *Ty) {
830  unsigned *TypeID = &TypeMap[Ty];
831 
832  // We've already seen this type.
833  if (*TypeID)
834  return;
835 
836  // If it is a non-anonymous struct, mark the type as being visited so that we
837  // don't recursively visit it. This is safe because we allow forward
838  // references of these in the bitcode reader.
839  if (StructType *STy = dyn_cast<StructType>(Ty))
840  if (!STy->isLiteral())
841  *TypeID = ~0U;
842 
843  // Enumerate all of the subtypes before we enumerate this type. This ensures
844  // that the type will be enumerated in an order that can be directly built.
845  for (Type *SubTy : Ty->subtypes())
846  EnumerateType(SubTy);
847 
848  // Refresh the TypeID pointer in case the table rehashed.
849  TypeID = &TypeMap[Ty];
850 
851  // Check to see if we got the pointer another way. This can happen when
852  // enumerating recursive types that hit the base case deeper than they start.
853  //
854  // If this is actually a struct that we are treating as forward ref'able,
855  // then emit the definition now that all of its contents are available.
856  if (*TypeID && *TypeID != ~0U)
857  return;
858 
859  // Add this type now that its contents are all happily enumerated.
860  Types.push_back(Ty);
861 
862  *TypeID = Types.size();
863 }
864 
865 // Enumerate the types for the specified value. If the value is a constant,
866 // walk through it, enumerating the types of the constant.
867 void ValueEnumerator::EnumerateOperandType(const Value *V) {
868  EnumerateType(V->getType());
869 
870  assert(!isa<MetadataAsValue>(V) && "Unexpected metadata operand");
871 
872  const Constant *C = dyn_cast<Constant>(V);
873  if (!C)
874  return;
875 
876  // If this constant is already enumerated, ignore it, we know its type must
877  // be enumerated.
878  if (ValueMap.count(C))
879  return;
880 
881  // This constant may have operands, make sure to enumerate the types in
882  // them.
883  for (const Value *Op : C->operands()) {
884  // Don't enumerate basic blocks here, this happens as operands to
885  // blockaddress.
886  if (isa<BasicBlock>(Op))
887  continue;
888 
889  EnumerateOperandType(Op);
890  }
891 }
892 
893 void ValueEnumerator::EnumerateAttributes(AttributeList PAL) {
894  if (PAL.isEmpty()) return; // null is always 0.
895 
896  // Do a lookup.
897  unsigned &Entry = AttributeListMap[PAL];
898  if (Entry == 0) {
899  // Never saw this before, add it.
900  AttributeLists.push_back(PAL);
901  Entry = AttributeLists.size();
902  }
903 
904  // Do lookups for all attribute groups.
905  for (unsigned i = PAL.index_begin(), e = PAL.index_end(); i != e; ++i) {
906  AttributeSet AS = PAL.getAttributes(i);
907  if (!AS.hasAttributes())
908  continue;
909  IndexAndAttrSet Pair = {i, AS};
910  unsigned &Entry = AttributeGroupMap[Pair];
911  if (Entry == 0) {
912  AttributeGroups.push_back(Pair);
913  Entry = AttributeGroups.size();
914  }
915  }
916 }
917 
919  InstructionCount = 0;
920  NumModuleValues = Values.size();
921 
922  // Add global metadata to the function block. This doesn't include
923  // LocalAsMetadata.
924  incorporateFunctionMetadata(F);
925 
926  // Adding function arguments to the value table.
927  for (const auto &I : F.args())
928  EnumerateValue(&I);
929 
930  FirstFuncConstantID = Values.size();
931 
932  // Add all function-level constants to the value table.
933  for (const BasicBlock &BB : F) {
934  for (const Instruction &I : BB)
935  for (const Use &OI : I.operands()) {
936  if ((isa<Constant>(OI) && !isa<GlobalValue>(OI)) || isa<InlineAsm>(OI))
937  EnumerateValue(OI);
938  }
939  BasicBlocks.push_back(&BB);
940  ValueMap[&BB] = BasicBlocks.size();
941  }
942 
943  // Optimize the constant layout.
944  OptimizeConstants(FirstFuncConstantID, Values.size());
945 
946  // Add the function's parameter attributes so they are available for use in
947  // the function's instruction.
948  EnumerateAttributes(F.getAttributes());
949 
950  FirstInstID = Values.size();
951 
952  SmallVector<LocalAsMetadata *, 8> FnLocalMDVector;
953  // Add all of the instructions.
954  for (const BasicBlock &BB : F) {
955  for (const Instruction &I : BB) {
956  for (const Use &OI : I.operands()) {
957  if (auto *MD = dyn_cast<MetadataAsValue>(&OI))
958  if (auto *Local = dyn_cast<LocalAsMetadata>(MD->getMetadata()))
959  // Enumerate metadata after the instructions they might refer to.
960  FnLocalMDVector.push_back(Local);
961  }
962 
963  if (!I.getType()->isVoidTy())
964  EnumerateValue(&I);
965  }
966  }
967 
968  // Add all of the function-local metadata.
969  for (unsigned i = 0, e = FnLocalMDVector.size(); i != e; ++i) {
970  // At this point, every local values have been incorporated, we shouldn't
971  // have a metadata operand that references a value that hasn't been seen.
972  assert(ValueMap.count(FnLocalMDVector[i]->getValue()) &&
973  "Missing value for metadata operand");
974  EnumerateFunctionLocalMetadata(F, FnLocalMDVector[i]);
975  }
976 }
977 
979  /// Remove purged values from the ValueMap.
980  for (unsigned i = NumModuleValues, e = Values.size(); i != e; ++i)
981  ValueMap.erase(Values[i].first);
982  for (unsigned i = NumModuleMDs, e = MDs.size(); i != e; ++i)
983  MetadataMap.erase(MDs[i]);
984  for (unsigned i = 0, e = BasicBlocks.size(); i != e; ++i)
985  ValueMap.erase(BasicBlocks[i]);
986 
987  Values.resize(NumModuleValues);
988  MDs.resize(NumModuleMDs);
989  BasicBlocks.clear();
990  NumMDStrings = 0;
991 }
992 
995  unsigned Counter = 0;
996  for (const BasicBlock &BB : *F)
997  IDMap[&BB] = ++Counter;
998 }
999 
1000 /// getGlobalBasicBlockID - This returns the function-specific ID for the
1001 /// specified basic block. This is relatively expensive information, so it
1002 /// should only be used by rare constructs such as address-of-label.
1004  unsigned &Idx = GlobalBasicBlockIDs[BB];
1005  if (Idx != 0)
1006  return Idx-1;
1007 
1008  IncorporateFunctionInfoGlobalBBIDs(BB->getParent(), GlobalBasicBlockIDs);
1009  return getGlobalBasicBlockID(BB);
1010 }
1011 
1013  return Log2_32_Ceil(getTypes().size() + 1);
1014 }
uint64_t CallInst * C
use_iterator use_end()
Definition: Value.h:342
unsigned Log2_32_Ceil(uint32_t Value)
Return the ceil log base 2 of the specified value, 32 if the value is zero.
Definition: MathExtras.h:544
Tracking metadata reference owned by Metadata.
Definition: Metadata.h:709
This class provides a symbol table of name/value pairs.
ArrayRef< Type * > subtypes() const
Definition: Type.h:314
iterator_range< use_iterator > uses()
Definition: Value.h:350
raw_ostream & errs()
This returns a reference to a raw_ostream for standard error.
bool isDistinct() const
Definition: Metadata.h:941
static unsigned getMetadataTypeOrder(const Metadata *MD)
This class represents an incoming formal argument to a Function.
Definition: Argument.h:30
iterator begin()
Get an iterator that from the beginning of the symbol table.
unsigned getValueID(const Value *V) const
MDNode * getOperand(unsigned i) const
Definition: Metadata.cpp:1073
Compute iterated dominance frontiers using a linear time algorithm.
Definition: AllocatorList.h:24
#define LLVM_DUMP_METHOD
Mark debug helper function definitions like dump() that should not be stripped from debug builds...
Definition: Compiler.h:449
static void predictValueUseListOrder(const Value *V, const Function *F, OrderMap &OM, UseListOrderStack &Stack)
A Module instance is used to store all the information related to an LLVM module. ...
Definition: Module.h:63
LLVM_ATTRIBUTE_ALWAYS_INLINE size_type size() const
Definition: SmallVector.h:136
This class represents a function call, abstracting a target machine&#39;s calling convention.
static int Counter
void setInstructionID(const Instruction *I)
unsigned second
Metadata node.
Definition: Metadata.h:862
static Type * getMetadataTy(LLVMContext &C)
Definition: Type.cpp:166
unsigned index_end() const
Definition: Attributes.h:634
void reserve(size_type N)
Definition: SmallVector.h:380
static void predictValueUseListOrderImpl(const Value *V, const Function *F, unsigned ID, const OrderMap &OM, UseListOrderStack &Stack)
op_iterator op_end() const
Definition: Metadata.h:1061
op_iterator op_begin()
Definition: User.h:214
unsigned getMetadataID(const Metadata *MD) const
void incorporateFunction(const Function &F)
incorporateFunction/purgeFunction - If you&#39;d like to deal with a function, use these two methods to g...
static bool isIntOrIntVectorValue(const std::pair< const Value *, unsigned > &V)
std::pair< iterator, bool > insert(const std::pair< KeyT, ValueT > &KV)
Definition: DenseMap.h:176
A tuple of MDNodes.
Definition: Metadata.h:1316
Class to represent struct types.
Definition: DerivedTypes.h:201
A Use represents the edge between a Value definition and its users.
Definition: Use.h:56
const TypeList & getTypes() const
TypeID
Definitions of all of the base types for the Type system.
Definition: Type.h:55
uint64_t computeBitsRequiredForTypeIndicies() const
unsigned getNumOperands() const
Definition: Metadata.cpp:1069
static const uint16_t * lookup(unsigned opcode, unsigned domain, ArrayRef< uint16_t[3]> Table)
op_iterator op_begin() const
Definition: Metadata.h:1057
unsigned idFor(const T &Entry) const
idFor - return the ID for an existing entry.
Definition: UniqueVector.h:58
#define F(x, y, z)
Definition: MD5.cpp:55
Type * getType() const
All values are typed, get the type of this value.
Definition: Value.h:245
reverse_iterator rend()
Definition: Module.h:578
op_range operands() const
Definition: Metadata.h:1065
iterator find(const KeyT &Val)
Definition: ValueMap.h:158
iterator end()
Get an iterator to the end of the symbol table.
Debug location.
const char * Name
iterator find(const_arg_type_t< KeyT > Val)
Definition: DenseMap.h:131
AttributeSet getAttributes(unsigned Index) const
The attributes for the specified index are returned.
bool isVoidTy() const
Return true if this is &#39;void&#39;.
Definition: Type.h:141
bool erase(const KeyT &Val)
Definition: DenseMap.h:247
bool hasName() const
Definition: Value.h:251
LLVM Basic Block Representation.
Definition: BasicBlock.h:59
The instances of the Type class are immutable: once they are created, they are never changed...
Definition: Type.h:46
This is an important base class in LLVM.
Definition: Constant.h:42
void print(raw_ostream &OS, const ValueMapType &Map, const char *Name) const
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator begin()
Definition: SmallVector.h:116
unsigned getInstructionID(const Instruction *I) const
This file contains the declarations for the subclasses of Constant, which represent the different fla...
std::vector< UseListOrder > UseListOrderStack
Definition: UseListOrder.h:40
#define A
Definition: LargeTest.cpp:12
iterator_range< named_metadata_iterator > named_metadata()
Definition: Module.h:697
op_iterator op_end()
Definition: User.h:216
unsigned getGlobalBasicBlockID(const BasicBlock *BB) const
getGlobalBasicBlockID - This returns the function-specific ID for the specified basic block...
void print(raw_ostream &O, bool IsForDebug=false) const
Implement operator<< on Value.
Definition: AsmWriter.cpp:3459
static OrderMap orderModule(const Module &M)
op_range operands()
Definition: User.h:222
static UseListOrderStack predictUseListOrder(const Module &M)
Metadata wrapper in the Value hierarchy.
Definition: Metadata.h:172
const AMDGPUAS & AS
Value * getValue() const
Definition: Metadata.h:377
unsigned first
reverse_iterator rbegin()
Definition: Module.h:576
iterator end()
Definition: ValueMap.h:138
void print(raw_ostream &OS, const Module *M=nullptr, bool IsForDebug=false) const
Print.
Definition: AsmWriter.cpp:3592
unsigned getNumOperands() const
Definition: User.h:176
See the file comment.
Definition: ValueMap.h:86
#define E
Definition: LargeTest.cpp:27
std::pair< unsigned, AttributeSet > IndexAndAttrSet
Attribute groups as encoded in bitcode are almost AttributeSets, but they include the AttributeList i...
This is a &#39;vector&#39; (really, a variable-sized array), optimized for the case when the array is small...
Definition: SmallVector.h:864
Module.h This file contains the declarations for the Module class.
const DataFlowGraph & G
Definition: RDFGraph.cpp:206
const size_t N
LLVM_NODISCARD T pop_back_val()
Definition: SmallVector.h:385
raw_ostream & dbgs()
dbgs() - This returns a reference to a raw_ostream for debugging messages.
Definition: Debug.cpp:132
void swap(llvm::BitVector &LHS, llvm::BitVector &RHS)
Implement std::swap in terms of BitVector swap.
Definition: BitVector.h:923
unsigned getTypeID(Type *T) const
UseListOrderStack UseListOrders
use_iterator use_begin()
Definition: Value.h:334
LLVM_ATTRIBUTE_ALWAYS_INLINE iterator end()
Definition: SmallVector.h:120
static void orderValue(const Value *V, OrderMap &OM)
unsigned insert(const T &Entry)
insert - Append entry to the vector if it doesn&#39;t already exist.
Definition: UniqueVector.h:41
LLVM_NODISCARD bool empty() const
Definition: SmallVector.h:61
iterator begin()
Definition: DenseMap.h:69
StringRef getName() const
Return a constant reference to the value&#39;s name.
Definition: Value.cpp:218
const Function * getParent() const
Return the enclosing method, or null if none.
Definition: BasicBlock.h:108
const NodeList & List
Definition: RDFGraph.cpp:205
#define I(x, y, z)
Definition: MD5.cpp:58
iterator end()
Definition: DenseMap.h:73
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
Definition: Casting.h:323
iterator_range< ifunc_iterator > ifuncs()
Definition: Module.h:619
ValueT lookup(const_arg_type_t< KeyT > Val) const
lookup - Return the entry for the specified key, or a default constructed value if no such entry exis...
Definition: DenseMap.h:166
static void IncorporateFunctionInfoGlobalBBIDs(const Function *F, DenseMap< const BasicBlock *, unsigned > &IDMap)
bool isDeclaration() const
Return true if the primary definition of this global value is outside of the current translation unit...
Definition: Globals.cpp:200
size_type count(const KeyT &Val) const
Return 1 if the specified key is in the map, 0 otherwise.
Definition: ValueMap.h:154
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
bool hasAttributes() const
Return true if attributes exists in this set.
Definition: Attributes.h:261
LLVM Value Representation.
Definition: Value.h:73
This class implements an extremely fast bulk output stream that can only output to a stream...
Definition: raw_ostream.h:44
Invoke instruction.
iterator_range< global_iterator > globals()
Definition: Module.h:561
void sort(Policy policy, RandomAccessIterator Start, RandomAccessIterator End, const Comparator &Comp=Comparator())
Definition: Parallel.h:201
unsigned getComdatID(const Comdat *C) const
auto find_if(R &&Range, UnaryPredicate P) -> decltype(std::begin(Range))
Provide wrappers to std::find_if which take ranges instead of having to pass begin/end explicitly...
Definition: STLExtras.h:846
bool isEmpty() const
Return true if there are no attributes.
Definition: Attributes.h:646
Root of the metadata hierarchy.
Definition: Metadata.h:58
bool use_empty() const
Definition: Value.h:322
iterator_range< arg_iterator > args()
Definition: Function.h:613
unsigned index_begin() const
Use these to iterate over the valid attribute indices.
Definition: Attributes.h:633
bool erase(const KeyT &Val)
Definition: ValueMap.h:193
iterator_range< alias_iterator > aliases()
Definition: Module.h:601