Go to the documentation of this file.
23 if (
From == To)
return Changed;
25 assert((!isa<Constant>(
this) || isa<GlobalValue>(
this)) &&
26 "Cannot call User::replaceUsesOfWith on a constant!");
36 if (
auto DVI = dyn_cast_or_null<DbgVariableIntrinsic>(
this)) {
38 DVI->replaceVariableLocationOp(
From, To);
54 "Alignment is insufficient for 'hung-off-uses' pieces");
60 Use *Begin =
static_cast<Use*
>(::operator
new(
size));
62 setOperandList(Begin);
63 for (; Begin != End; Begin++)
64 new (Begin)
Use(
this);
74 assert(NewNumUses > OldNumUses &&
"realloc must grow num uses");
81 std::copy(OldOps, OldOps + OldNumUses, NewOps);
85 auto *OldPtr =
reinterpret_cast<char *
>(OldOps + OldNumUses);
86 auto *NewPtr =
reinterpret_cast<char *
>(NewOps + NewNumUses);
89 Use::zap(OldOps, OldOps + OldNumUses,
true);
101 return {MutableARef.begin(), MutableARef.end()};
108 auto *DI =
reinterpret_cast<DescriptorInfo *
>(getIntrusiveOperands()) - 1;
109 assert(DI->SizeInBytes != 0 &&
"Should not have had a descriptor otherwise!");
112 reinterpret_cast<uint8_t *
>(DI) - DI->SizeInBytes, DI->SizeInBytes);
116 return isa<AssumeInst>(
this) || isa<PseudoProbeInst>(
this);
123 void *User::allocateFixedOperandUser(
size_t Size,
unsigned Us,
124 unsigned DescBytes) {
127 static_assert(
sizeof(
DescriptorInfo) %
sizeof(
void *) == 0,
"Required below");
129 unsigned DescBytesToAllocate =
131 assert(DescBytesToAllocate %
sizeof(
void *) == 0 &&
132 "We need this to satisfy alignment constraints for Uses");
134 uint8_t *Storage =
static_cast<uint8_t *
>(
135 ::operator
new(Size +
sizeof(
Use) * Us + DescBytesToAllocate));
136 Use *Start =
reinterpret_cast<Use *
>(Storage + DescBytesToAllocate);
137 Use *End = Start + Us;
138 User *Obj =
reinterpret_cast<User*
>(End);
142 for (; Start != End; Start++)
143 new (Start)
Use(Obj);
145 if (DescBytes != 0) {
146 auto *DescInfo =
reinterpret_cast<DescriptorInfo *
>(Storage + DescBytes);
153 void *User::operator
new(
size_t Size,
unsigned Us) {
154 return allocateFixedOperandUser(Size, Us, 0);
157 void *User::operator
new(
size_t Size,
unsigned Us,
unsigned DescBytes) {
158 return allocateFixedOperandUser(Size, Us, DescBytes);
161 void *User::operator
new(
size_t Size) {
163 void *Storage = ::operator
new(Size +
sizeof(
Use *));
164 Use **HungOffOperandList =
static_cast<Use **
>(Storage);
165 User *Obj =
reinterpret_cast<User *
>(HungOffOperandList + 1);
169 *HungOffOperandList =
nullptr;
182 User *Obj =
static_cast<User *
>(Usr);
186 Use **HungOffOperandList =
static_cast<Use **
>(Usr) - 1;
190 ::operator
delete(HungOffOperandList);
196 uint8_t *Storage =
reinterpret_cast<uint8_t *
>(DI) - DI->
SizeInBytes;
197 ::
operator delete(Storage);
202 ::operator
delete(Storage);
This is an optimization pass for GlobalISel generic memory operations.
static void zap(Use *Start, const Use *Stop, bool del=false)
Destroys Use operands when the number of operands of a User changes.
void growHungoffUses(unsigned N, bool IsPhi=false)
Grow the number of hung off uses.
LLVM Basic Block Representation.
static GCRegistry::Add< CoreCLRGC > E("coreclr", "CoreCLR-compatible GC")
ArrayRef< const uint8_t > getDescriptor() const
Returns the descriptor co-allocated with this User instance.
void allocHungoffUses(unsigned N, bool IsPhi=false)
Allocate the array of Uses, followed by a pointer (with bottom bit set) to the User.
bool is_contained(R &&Range, const E &Element)
Wrapper function around std::find to detect if an element exists in a container.
assert(ImpDefSCC.getReg()==AMDGPU::SCC &&ImpDefSCC.isDef())
@ BasicBlock
Various leaf nodes.
bool isDroppable() const
A droppable user is a user for which uses can be dropped without affecting correctness and should be ...
void setOperand(unsigned i, Value *Val)
auto size(R &&Range, std::enable_if_t< std::is_base_of< std::random_access_iterator_tag, typename std::iterator_traits< decltype(Range.begin())>::iterator_category >::value, void > *=nullptr)
Get the size of a range.
ArrayRef - Represent a constant reference to an array (0 or more elements consecutively in memory),...
const Use * getOperandList() const
bool replaceUsesOfWith(Value *From, Value *To)
Replace uses of one Value with another.
#define LLVM_NO_SANITIZE_MEMORY_ATTRIBUTE
unsigned getNumOperands() const
BlockVerifier::State From
Value * getOperand(unsigned i) const
we should consider alternate ways to model stack dependencies Lots of things could be done in WebAssemblyTargetTransformInfo cpp there are numerous optimization related hooks that can be overridden in WebAssemblyTargetLowering Instead of the OptimizeReturned which should consider preserving the returned attribute through to MachineInstrs and extending the MemIntrinsicResults pass to do this optimization on calls too That would also let the WebAssemblyPeephole pass clean up dead defs for such as it does for stores Consider implementing and or getMachineCombinerPatterns Find a clean way to fix the problem which leads to the Shrink Wrapping pass being run after the WebAssembly PEI pass When setting multiple variables to the same we currently get code like const It could be done with a smaller encoding like local tee $pop5 local copy
LLVM Value Representation.
A Use represents the edge between a Value definition and its users.