30 void ValueMapTypeRemapper::anchor() {}
31 void ValueMaterializer::anchor() {}
37 struct DelayedBasicBlock {
39 std::unique_ptr<BasicBlock> TempBB;
42 : OldBB(Old.getBasicBlock()),
43 TempBB(
BasicBlock::Create(Old.getContext())) {}
46 struct WorklistEntry {
57 struct AppendingGVTy {
61 struct GlobalAliaseeTy {
68 unsigned AppendingGVIsOldCtorDtor : 1;
69 unsigned AppendingGVNumNewMembers;
72 AppendingGVTy AppendingGV;
73 GlobalAliaseeTy GlobalAliasee;
78 struct MappingContext {
85 : VM(&VM), Materializer(Materializer) {}
90 friend class MDNodeMapper;
98 unsigned CurrentMCID = 0;
107 : Flags(Flags), TypeMapper(TypeMapper),
108 MCs(1, MappingContext(VM, Materializer)) {}
111 ~Mapper() {
assert(!hasWorkToDo() &&
"Expected to be flushed"); }
113 bool hasWorkToDo()
const {
return !Worklist.empty(); }
118 MCs.push_back(MappingContext(VM, Materializer));
119 return MCs.size() - 1;
129 return cast_or_null<Constant>(mapValue(C));
146 void scheduleRemapFunction(
Function &
F,
unsigned MCID);
175 bool HasChanged =
false;
177 TempMDNode Placeholder;
181 struct UniquedGraph {
189 void propagateChanges();
203 MDNodeMapper(Mapper &M) : M(M) {}
283 bool createPOT(UniquedGraph &
G,
const MDNode &FirstN);
305 void mapNodesInPOT(UniquedGraph &
G);
313 template <
class OperandMapper>
314 void remapOperands(
MDNode &
N, OperandMapper mapOperand);
323 if (I != getVM().
end()) {
329 if (
auto *Materializer = getMaterializer()) {
330 if (
Value *NewV = Materializer->materialize(const_cast<Value *>(V))) {
338 if (isa<GlobalValue>(V)) {
341 return getVM()[V] =
const_cast<Value *
>(V);
344 if (
const InlineAsm *IA = dyn_cast<InlineAsm>(V)) {
348 NewTy = cast<FunctionType>(TypeMapper->remapType(NewTy));
350 if (NewTy != IA->getFunctionType())
351 V =
InlineAsm::get(NewTy, IA->getAsmString(), IA->getConstraintString(),
352 IA->hasSideEffects(), IA->isAlignStack());
355 return getVM()[V] =
const_cast<Value *
>(V);
358 if (
const auto *MDV = dyn_cast<MetadataAsValue>(V)) {
359 const Metadata *MD = MDV->getMetadata();
361 if (
auto *LAM = dyn_cast<LocalAsMetadata>(MD)) {
363 if (
Value *LV = mapValue(LAM->getValue())) {
364 if (V == LAM->getValue())
365 return const_cast<Value *>(V);
380 return getVM()[V] =
const_cast<Value *
>(V);
383 auto *MappedMD = mapMetadata(MD);
385 return getVM()[V] =
const_cast<Value *
>(V);
396 return mapBlockAddress(*BA);
398 auto mapValueOrNull = [
this](
Value *V) {
399 auto Mapped = mapValue(V);
401 "Unexpected null mapping for constant operand without "
402 "NullMapMissingGlobalValues flag");
409 Value *Mapped =
nullptr;
410 for (; OpNo != NumOperands; ++OpNo) {
412 Mapped = mapValueOrNull(Op);
422 NewTy = TypeMapper->remapType(NewTy);
426 if (OpNo == NumOperands && NewTy == C->
getType())
427 return getVM()[V] =
C;
433 for (
unsigned j = 0; j != OpNo; ++j)
437 if (OpNo != NumOperands) {
441 for (++OpNo; OpNo != NumOperands; ++OpNo) {
448 Type *NewSrcTy =
nullptr;
450 if (
auto *GEPO = dyn_cast<GEPOperator>(C))
451 NewSrcTy = TypeMapper->remapType(GEPO->getSourceElementType());
454 return getVM()[V] =
CE->getWithOperands(Ops, NewTy,
false, NewSrcTy);
455 if (isa<ConstantArray>(C))
457 if (isa<ConstantStruct>(C))
459 if (isa<ConstantVector>(C))
462 if (isa<UndefValue>(C))
464 if (isa<ConstantAggregateZero>(C))
466 assert(isa<ConstantPointerNull>(C));
478 DelayedBBs.push_back(DelayedBasicBlock(BA));
479 BB = DelayedBBs.
back().TempBB.get();
488 getVM().MD()[Key].reset(Val);
493 return mapToMetadata(MD, const_cast<Metadata *>(MD));
502 if (
auto *CMD = dyn_cast<ConstantAsMetadata>(Op))
503 assert((!*MappedOp || M.getVM().count(CMD->getValue()) ||
504 M.getVM().getMappedMD(Op)) &&
505 "Expected Value to be memoized");
507 assert((isa<MDString>(Op) || M.getVM().getMappedMD(Op)) &&
508 "Expected result to be memoized");
515 return mapDistinctNode(N);
519 MDNode *MDNodeMapper::mapDistinctNode(
const MDNode &N) {
521 assert(!M.getVM().getMappedMD(&N) &&
"Expected an unmapped node");
522 DistinctWorklist.push_back(cast<MDNode>(
526 return DistinctWorklist.back();
532 return const_cast<ConstantAsMetadata *>(&CMD);
543 if (isa<MDString>(Op))
544 return const_cast<Metadata *>(Op);
546 if (
auto *CMD = dyn_cast<ConstantAsMetadata>(Op))
552 Metadata &MDNodeMapper::UniquedGraph::getFwdReference(
MDNode &Op) {
553 auto Where = Info.find(&Op);
554 assert(Where != Info.end() &&
"Expected a valid reference");
556 auto &OpD = Where->second;
561 if (!OpD.Placeholder)
562 OpD.Placeholder = Op.
clone();
564 return *OpD.Placeholder;
567 template <
class OperandMapper>
568 void MDNodeMapper::remapOperands(
MDNode &N, OperandMapper mapOperand) {
581 struct POTWorklistEntry {
587 bool HasChanged =
false;
589 POTWorklistEntry(
MDNode &N) : N(&N), Op(N.op_begin()) {}
593 bool MDNodeMapper::createPOT(UniquedGraph &
G,
const MDNode &FirstN) {
594 assert(G.Info.empty() &&
"Expected a fresh traversal");
598 bool AnyChanges =
false;
600 Worklist.
push_back(POTWorklistEntry(const_cast<MDNode &>(FirstN)));
601 (void)G.Info[&FirstN];
602 while (!Worklist.
empty()) {
604 auto &WE = Worklist.
back();
605 if (
MDNode *N = visitOperands(G, WE.Op, WE.N->op_end(), WE.HasChanged)) {
607 Worklist.
push_back(POTWorklistEntry(*N));
612 assert(WE.N->isUniqued() &&
"Expected only uniqued nodes");
613 assert(WE.Op == WE.N->op_end() &&
"Expected to visit all operands");
614 auto &
D = G.Info[WE.N];
615 AnyChanges |= D.HasChanged = WE.HasChanged;
617 G.POT.push_back(WE.N);
631 HasChanged |= Op != *MappedOp;
638 "Only uniqued operands cannot be mapped immediately");
639 if (G.Info.insert(std::make_pair(&OpN,
Data())).second)
645 void MDNodeMapper::UniquedGraph::propagateChanges() {
655 auto Where = Info.find(Op);
656 return Where != Info.
end() && Where->second.HasChanged;
660 AnyChanges = D.HasChanged =
true;
662 }
while (AnyChanges);
665 void MDNodeMapper::mapNodesInPOT(UniquedGraph &G) {
668 for (
auto *N : G.POT) {
677 bool HadPlaceholder(D.Placeholder);
680 TempMDNode ClonedN = D.Placeholder ? std::move(D.Placeholder) : N->clone();
681 remapOperands(*ClonedN, [
this, &D, &G](
Metadata *Old) {
684 assert(G.Info[Old].ID > D.ID &&
"Expected a forward reference");
685 return &G.getFwdReference(*cast<MDNode>(Old));
689 M.mapToMetadata(N, NewN);
698 for (
auto *N : CyclicNodes)
704 assert(DistinctWorklist.empty() &&
"MDNodeMapper::map is not recursive");
705 assert(!(M.Flags & RF_NoModuleLevelChanges) &&
706 "MDNodeMapper::map assumes module-level changes");
712 N.
isUniqued() ? mapTopLevelUniquedNode(N) : mapDistinctNode(N);
713 while (!DistinctWorklist.empty())
714 remapOperands(*DistinctWorklist.pop_back_val(), [
this](
Metadata *Old) {
717 return mapTopLevelUniquedNode(*cast<MDNode>(Old));
722 Metadata *MDNodeMapper::mapTopLevelUniquedNode(
const MDNode &FirstN) {
727 if (!createPOT(G, FirstN)) {
729 for (
const MDNode *N : G.POT)
731 return &
const_cast<MDNode &
>(FirstN);
735 G.propagateChanges();
741 return *getMappedOp(&FirstN);
746 struct MapMetadataDisabler {
762 if (isa<MDString>(MD))
767 if ((
Flags & RF_NoModuleLevelChanges))
768 return const_cast<Metadata *>(MD);
770 if (
auto *CMD = dyn_cast<ConstantAsMetadata>(MD)) {
772 MapMetadataDisabler MMD(getVM());
781 assert(isa<MDNode>(MD) &&
"Expected a metadata node");
787 assert(MD &&
"Expected valid metadata");
788 assert(!isa<LocalAsMetadata>(MD) &&
"Unexpected local metadata");
793 return MDNodeMapper(*this).map(*cast<MDNode>(MD));
796 void Mapper::flush() {
798 while (!Worklist.
empty()) {
800 CurrentMCID = E.MCID;
802 case WorklistEntry::MapGlobalInit:
803 E.Data.GVInit.GV->setInitializer(mapConstant(E.Data.GVInit.Init));
805 case WorklistEntry::MapAppendingVar: {
806 unsigned PrefixSize = AppendingInits.size() - E.AppendingGVNumNewMembers;
807 mapAppendingVariable(*E.Data.AppendingGV.GV,
808 E.Data.AppendingGV.InitPrefix,
809 E.AppendingGVIsOldCtorDtor,
811 AppendingInits.resize(PrefixSize);
814 case WorklistEntry::MapGlobalAliasee:
815 E.Data.GlobalAliasee.GA->setAliasee(
816 mapConstant(E.Data.GlobalAliasee.Aliasee));
819 remapFunction(*E.Data.RemapF);
827 while (!DelayedBBs.empty()) {
828 DelayedBasicBlock DBB = DelayedBBs.pop_back_val();
829 BasicBlock *BB = cast_or_null<BasicBlock>(mapValue(DBB.OldBB));
830 DBB.TempBB->replaceAllUsesWith(BB ? BB : DBB.OldBB);
837 Value *V = mapValue(Op);
843 "Referenced value not in value map!");
847 if (
PHINode *PN = dyn_cast<PHINode>(I)) {
848 for (
unsigned i = 0, e = PN->getNumIncomingValues();
i != e; ++
i) {
849 Value *V = mapValue(PN->getIncomingBlock(
i));
852 PN->setIncomingBlock(
i, cast<BasicBlock>(V));
855 "Referenced block not in value map!");
862 for (
const auto &
MI : MDs) {
864 MDNode *New = cast_or_null<MDNode>(mapMetadata(Old));
878 Tys.
push_back(TypeMapper->remapType(Ty));
883 if (
auto *AI = dyn_cast<AllocaInst>(I))
884 AI->setAllocatedType(TypeMapper->remapType(AI->getAllocatedType()));
885 if (
auto *
GEP = dyn_cast<GetElementPtrInst>(I)) {
886 GEP->setSourceElementType(
887 TypeMapper->remapType(
GEP->getSourceElementType()));
888 GEP->setResultElementType(
889 TypeMapper->remapType(
GEP->getResultElementType()));
894 void Mapper::remapFunction(
Function &F) {
904 for (
const auto &I : MDs)
905 F.
addMetadata(I.first, *cast<MDNode>(mapMetadata(I.second)));
910 A.mutateType(TypeMapper->remapType(
A.getType()));
923 unsigned NumElements =
924 cast<ArrayType>(InitPrefix->
getType())->getNumElements();
925 for (
unsigned I = 0; I != NumElements; ++
I)
936 Type *Tys[3] = {
ST.getElementType(0),
ST.getElementType(1), VoidPtrTy};
940 for (
auto *V : NewMembers) {
943 auto *S = cast<ConstantStruct>(V);
944 auto *E1 = mapValue(S->getOperand(0));
945 auto *E2 = mapValue(S->getOperand(1));
950 NewV = cast_or_null<Constant>(mapValue(V));
961 assert(AlreadyScheduled.insert(&GV).second &&
"Should not reschedule");
962 assert(MCID < MCs.size() &&
"Invalid mapping context");
965 WE.Kind = WorklistEntry::MapGlobalInit;
967 WE.Data.GVInit.GV = &GV;
968 WE.Data.GVInit.Init = &Init;
977 assert(AlreadyScheduled.insert(&GV).second &&
"Should not reschedule");
978 assert(MCID < MCs.size() &&
"Invalid mapping context");
981 WE.Kind = WorklistEntry::MapAppendingVar;
983 WE.Data.AppendingGV.GV = &GV;
984 WE.Data.AppendingGV.InitPrefix = InitPrefix;
985 WE.AppendingGVIsOldCtorDtor = IsOldCtorDtor;
986 WE.AppendingGVNumNewMembers = NewMembers.
size();
988 AppendingInits.append(NewMembers.
begin(), NewMembers.
end());
993 assert(AlreadyScheduled.insert(&GA).second &&
"Should not reschedule");
994 assert(MCID < MCs.size() &&
"Invalid mapping context");
997 WE.Kind = WorklistEntry::MapGlobalAliasee;
999 WE.Data.GlobalAliasee.GA = &GA;
1000 WE.Data.GlobalAliasee.Aliasee = &Aliasee;
1004 void Mapper::scheduleRemapFunction(
Function &F,
unsigned MCID) {
1005 assert(AlreadyScheduled.insert(&F).second &&
"Should not reschedule");
1006 assert(MCID < MCs.size() &&
"Invalid mapping context");
1011 WE.Data.RemapF = &
F;
1016 assert(!hasWorkToDo() &&
"Expected to have flushed the worklist");
1017 this->Flags = this->Flags |
Flags;
1021 return reinterpret_cast<Mapper *
>(pImpl);
1026 class FlushingMapper {
1030 explicit FlushingMapper(
void *pImpl) : M(*
getAsMapper(pImpl)) {
1031 assert(!M.hasWorkToDo() &&
"Expected to be flushed");
1033 ~FlushingMapper() { M.flush(); }
1034 Mapper *operator->()
const {
return &M; }
1042 : pImpl(new Mapper(VM, Flags, TypeMapper, Materializer)) {}
1049 return getAsMapper(pImpl)->registerAlternateMappingContext(VM, Materializer);
1053 FlushingMapper(pImpl)->addFlags(Flags);
1057 return FlushingMapper(pImpl)->mapValue(&V);
1061 return cast_or_null<Constant>(
mapValue(C));
1065 return FlushingMapper(pImpl)->mapMetadata(&MD);
1073 FlushingMapper(pImpl)->remapInstruction(&I);
1077 FlushingMapper(pImpl)->remapFunction(F);
1083 getAsMapper(pImpl)->scheduleMapGlobalInitializer(GV, Init, MCID);
1092 GV, InitPrefix, IsOldCtorDtor, NewMembers, MCID);
1097 getAsMapper(pImpl)->scheduleMapGlobalAliasee(GA, Aliasee, MCID);
1101 getAsMapper(pImpl)->scheduleRemapFunction(F, MCID);
void getAllMetadata(SmallVectorImpl< std::pair< unsigned, MDNode * >> &MDs) const
Appends all attachments for the global to MDs, sorting by attachment ID.
void push_back(const T &Elt)
const_iterator end(StringRef path)
Get end iterator over path.
Tracking metadata reference owned by Metadata.
LLVM Argument representation.
const Instruction & back() const
static void remapInstruction(Instruction *I, ValueToValueMapTy &VMap)
Convert the instruction operands from referencing the current values into those specified by VMap...
unsigned getNumParams() const
Return the number of fixed parameters this function type requires.
void replaceOperandWith(unsigned I, Metadata *New)
Replace a specific operand.
const T & front() const
front - Get the first element.
void RemapFunction(Function &F, ValueToValueMapTy &VM, RemapFlags Flags=RF_None, ValueMapTypeRemapper *TypeMapper=nullptr, ValueMaterializer *Materializer=nullptr)
Remap the operands, metadata, arguments, and instructions of a function.
Implements a dense probed hash-table based set.
unsigned getNumOperands() const
unsigned getNumOperands() const
Return number of MDNode operands.
Any global values not in value map are mapped to null instead of mapping to self. ...
static ConstantAggregateZero * get(Type *Ty)
ArrayRef< Type * > params() const
void scheduleRemapFunction(Function &F, unsigned MappingContextID=0)
void getAllMetadata(SmallVectorImpl< std::pair< unsigned, MDNode * >> &MDs) const
Get all metadata attached to this Instruction.
Type * getElementType() const
void reserve(size_type N)
static Mapper * getAsMapper(void *pImpl)
static Constant * getNullValue(Type *Ty)
Constructor to create a '0' constant of arbitrary type.
static std::enable_if< std::is_base_of< MDNode, T >::value, T * >::type replaceWithDistinct(std::unique_ptr< T, TempMDNodeDeleter > N)
Replace a temporary node with a distinct one.
The address of a basic block.
struct fuzzer::@269 Flags
ArrayRef< T > makeArrayRef(const T &OneElt)
Construct an ArrayRef from a single element.
void setInitializer(Constant *InitVal)
setInitializer - Sets the initializer for this global variable, removing any existing initializer if ...
void remapInstruction(Instruction &I)
A Use represents the edge between a Value definition and its users.
Constant * mapConstant(const Constant &C)
static GCRegistry::Add< StatepointGC > D("statepoint-example","an example strategy for statepoint")
static Constant * get(ArrayRef< Constant * > V)
bool none_of(R &&Range, UnaryPredicate P)
Provide wrappers to std::none_of which take ranges instead of having to pass begin/end explicitly...
Windows NT (Windows on ARM)
void resolveCycles()
Resolve cycles.
LLVM_NODISCARD bool empty() const
A constant value that is initialized with an expression using other constant values.
Class to represent function types.
Metadata * mapMetadata(const Metadata &MD)
Instruct the remapper to move distinct metadata instead of duplicating it when there are module-level...
static FunctionType * get(Type *Result, ArrayRef< Type * > Params, bool isVarArg)
This static method is the primary way of constructing a FunctionType.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory)...
size_t size() const
size - Get the array size.
If this flag is set, the remapper knows that only local values within a function (such as an instruct...
Class to represent pointers.
static GCRegistry::Add< CoreCLRGC > E("coreclr","CoreCLR-compatible GC")
This is a class that can be implemented by clients to materialize Values on demand.
static std::enable_if< std::is_base_of< MDNode, T >::value, T * >::type replaceWithUniqued(std::unique_ptr< T, TempMDNodeDeleter > N)
Replace a temporary node with a uniqued one.
static ConstantPointerNull * get(PointerType *T)
Static factory methods - Return objects of the specified value.
LLVM Basic Block Representation.
The instances of the Type class are immutable: once they are created, they are never changed...
static ConstantAsMetadata * wrapConstantAsMetadata(const ConstantAsMetadata &CMD, Value *MappedV)
static BlockAddress * get(Function *F, BasicBlock *BB)
Return a BlockAddress for the specified function and basic block.
static MDTuple * get(LLVMContext &Context, ArrayRef< Metadata * > MDs)
This is an important base class in LLVM.
This file contains the declarations for the subclasses of Constant, which represent the different fla...
static Constant * get(ArrayType *T, ArrayRef< Constant * > V)
void scheduleMapGlobalInitializer(GlobalVariable &GV, Constant &Init, unsigned MappingContextID=0)
Value * getOperand(unsigned i) const
void disableMapMetadata()
static Constant * get(StructType *T, ArrayRef< Constant * > V)
Constant * getAggregateElement(unsigned Elt) const
For aggregates (struct/array/vector) return the constant that corresponds to the specified element if...
TempMDNode clone() const
Create a (temporary) clone of this.
static UndefValue * get(Type *T)
Static factory methods - Return an 'undef' object of the specified type.
RemapFlags
These are flags that the value mapping APIs allow.
LLVMContext & getContext() const
All values hold a context through their type.
void setMetadata(unsigned KindID, MDNode *Node)
Set the metadata of the specified kind to the specified node.
const MDOperand & getOperand(unsigned I) const
static StructType * get(LLVMContext &Context, ArrayRef< Type * > Elements, bool isPacked=false)
This static method is the primary way to create a literal StructType.
void scheduleMapAppendingVariable(GlobalVariable &GV, Constant *InitPrefix, bool IsOldCtorDtor, ArrayRef< Constant * > NewMembers, unsigned MappingContextID=0)
This is a 'vector' (really, a variable-sized array), optimized for the case when the array is small...
Type * getType() const
All values are typed, get the type of this value.
void addMetadata(unsigned KindID, MDNode &MD)
Add a metadata attachment.
LLVM_NODISCARD T pop_back_val()
BasicBlock * getBasicBlock() const
void remapFunction(Function &F)
void scheduleMapGlobalAliasee(GlobalAlias &GA, Constant &Aliasee, unsigned MappingContextID=0)
static GCRegistry::Add< ShadowStackGC > C("shadow-stack","Very portable GC for uncooperative code generators")
This is a class that can be implemented by clients to remap types when cloning constants and instruct...
If this flag is set, the remapper ignores missing function-local entries (Argument, Instruction, BasicBlock) that are not in the value map.
PointerType * getType() const
Global values are always pointers.
LLVM_NODISCARD std::enable_if<!is_simple_type< Y >::value, typename cast_retty< X, const Y >::ret_type >::type dyn_cast(const Y &Val)
void addFlags(RemapFlags Flags)
Add to the current RemapFlags.
op_range operands() const
static InlineAsm * get(FunctionType *Ty, StringRef AsmString, StringRef Constraints, bool hasSideEffects, bool isAlignStack=false, AsmDialect asmDialect=AD_ATT)
InlineAsm::get - Return the specified uniqued inline asm string.
void mutateType(Type *Ty)
Mutate the type of this Value to be of the specified type.
Value * mapValue(const Value &V)
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
Function * getFunction() const
LLVM Value Representation.
unsigned registerAlternateMappingContext(ValueToValueMapTy &VM, ValueMaterializer *Materializer=nullptr)
Register an alternate mapping context.
bool isResolved() const
Check if node is fully resolved.
static GCRegistry::Add< ErlangGC > A("erlang","erlang-compatible garbage collector")
static IntegerType * getInt8Ty(LLVMContext &C)
iterator_range< arg_iterator > args()
MDNode * mapMDNode(const MDNode &N)